From 7f1ca10f10ba3231d1ac95f92fb869d25b119c85 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 12 Feb 2023 13:13:38 +0100 Subject: [PATCH 001/436] added boilerplate for rowtype refactoring --- .../org/polypheny/db/PolyImplementation.java | 21 ++ .../algebra/AlgStructuredTypeFlattener.java | 10 +- .../db/algebra/core/AlgFactories.java | 3 +- .../polypheny/db/algebra/core/Project.java | 4 + .../algebra/core/document/DocumentScan.java | 3 +- .../algebra/core/document/DocumentValues.java | 15 +- .../document/LogicalDocumentValues.java | 30 +-- .../db/algebra/type/AlgDataTypeFactory.java | 3 + .../db/algebra/type/AlgDocumentType.java | 231 ++++++++++++++++++ .../polypheny/db/algebra/type/StructKind.java | 5 + .../org/polypheny/db/tools/AlgBuilder.java | 223 +++++++++++++---- .../java/org/polypheny/db/type/PolyType.java | 19 +- .../db/routing/routers/BaseRouter.java | 9 +- .../db/routing/routers/DmlRouterImpl.java | 2 +- .../java/org/polypheny/db/mql/DmlTest.java | 14 ++ .../db/adapter/mongodb/MongoRules.java | 55 ++++- .../polypheny/db/webui/crud/LanguageCrud.java | 36 ++- .../polypheny/db/webui/models/DbColumn.java | 17 +- .../polypheny/db/webui/models/FieldDef.java | 32 +++ .../org/polypheny/db/webui/models/Result.java | 10 +- 20 files changed, 609 insertions(+), 133 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/algebra/type/AlgDocumentType.java create mode 100644 webui/src/main/java/org/polypheny/db/webui/models/FieldDef.java diff --git a/core/src/main/java/org/polypheny/db/PolyImplementation.java b/core/src/main/java/org/polypheny/db/PolyImplementation.java index 89e50fd1f8..136385c0d4 100644 --- a/core/src/main/java/org/polypheny/db/PolyImplementation.java +++ b/core/src/main/java/org/polypheny/db/PolyImplementation.java @@ -20,10 +20,13 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import javax.annotation.Nullable; import lombok.Getter; import lombok.experimental.Accessors; @@ -362,4 +365,22 @@ public static void addMonitoringInformation( Statement statement, String kind, i } } + + public List> getDocRows( Statement statement, boolean noLimit ) { + bindable = null; + if ( !Kind.DDL.contains( kind ) ) { + bindable = preparedResult.getBindable( CursorFactory.ARRAY ); + } + + Iterator iterator = createIterator( getBindable(), statement, true ); + + final Iterable iterable = () -> iterator; + + return StreamSupport + .stream( iterable.spliterator(), false ) + .map( d -> Arrays.asList( (Object[]) d ) ) + .collect( Collectors.toList() ); + + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java index 3c5fe648e1..7468371a3e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java @@ -97,6 +97,7 @@ import org.polypheny.db.algebra.stream.LogicalDelta; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; @@ -216,10 +217,11 @@ public AlgNode rewrite( AlgNode root ) { AlgNode flattened = getNewForOldRel( root ); flattenedRootType = flattened.getRowType(); - // If requested, add an additional projection which puts everything back into structured form for return to the client. + // If requested, add another projection which puts everything back into structured form for return to the client. restructured = false; List structuringExps = null; - if ( restructure ) { + + if ( restructure && root.getRowType().getStructKind() != StructKind.SEMI ) { iRestructureInput = 0; structuringExps = restructureFields( root.getRowType() ); } @@ -772,7 +774,9 @@ private void flattenProjections( if ( !prefix.equals( "" ) ) { fieldName = prefix + "$" + fieldName; } - flattenProjection( shuttle, exp, fieldName, flattenedExps ); + if ( exp.getType().getStructKind() != StructKind.SEMI ) { + flattenProjection( shuttle, exp, fieldName, flattenedExps ); + } } } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java index 3d2f35304a..f09ee96bbb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java @@ -538,8 +538,7 @@ public AlgNode createDocuments( AlgDataType rowType ) { return LogicalDocumentValues.create( cluster, - ImmutableList.copyOf( tuples ), - rowType ); + ImmutableList.copyOf( tuples ) ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Project.java b/core/src/main/java/org/polypheny/db/algebra/core/Project.java index 023dc16dca..a1043a5893 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Project.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Project.java @@ -59,6 +59,7 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexShuttle; import org.polypheny.db.rex.RexUtil; +import org.polypheny.db.schema.ModelTrait; import org.polypheny.db.util.Litmus; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Permutation; @@ -152,6 +153,9 @@ public final List> getNamedProjects() { @Override public boolean isValid( Litmus litmus, Context context ) { + if ( !traitSet.contains( ModelTrait.RELATIONAL ) ) { + return true; // for non-structured we have no guarantees + } if ( !super.isValid( litmus, context ) ) { return litmus.fail( null ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java index b7560fdb88..fc3f58b738 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java @@ -23,6 +23,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; +import org.polypheny.db.algebra.type.AlgDocumentType; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.plan.AlgOptCluster; @@ -48,7 +49,7 @@ public DocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable co AlgDataType docType = cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ); // todo dl: change after RowType refactor if ( this.collection.getTable().getSchemaType() == NamespaceType.DOCUMENT ) { - this.rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, docType ) ) ); + this.rowType = new AlgDocumentType(); } else { List list = collection.getRowType().getFieldList().stream() .map( f -> new AlgDataTypeFieldImpl( f.getName(), f.getIndex(), cluster.getTypeFactory().createPolyType( PolyType.ANY ) ) ) diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java index fe03b40927..bdd7381163 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java @@ -29,9 +29,7 @@ import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalValues; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; -import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.algebra.type.AlgDocumentType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; @@ -39,7 +37,6 @@ import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.type.PolyType; public abstract class DocumentValues extends AbstractAlgNode implements DocumentAlg { @@ -52,9 +49,9 @@ public abstract class DocumentValues extends AbstractAlgNode implements Document * Creates a {@link DocumentValues}. * {@link org.polypheny.db.schema.ModelTrait#DOCUMENT} node, which contains values. */ - public DocumentValues( AlgOptCluster cluster, AlgTraitSet traitSet, AlgDataType rowType, ImmutableList documentTuples ) { + public DocumentValues( AlgOptCluster cluster, AlgTraitSet traitSet, ImmutableList documentTuples ) { super( cluster, traitSet ); - this.rowType = rowType; + this.rowType = new AlgDocumentType(); this.documentTuples = validate( documentTuples ); } @@ -129,11 +126,7 @@ public LogicalValues getRelationalEquivalent() { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder() ); - AlgRecordType rowType = new AlgRecordType( List.of( - new AlgDataTypeFieldImpl( "_id_", 0, cluster.getTypeFactory().createPolyType( PolyType.VARCHAR, 24 ) ), - new AlgDataTypeFieldImpl( "_data_", 1, cluster.getTypeFactory().createPolyType( PolyType.VARCHAR, 2024 ) ) ) ); - - return new LogicalValues( cluster, out, rowType, relationalize( documentTuples, cluster.getRexBuilder() ) ); + return new LogicalValues( cluster, out, ((AlgDocumentType) rowType).asRelational(), relationalize( documentTuples, cluster.getRexBuilder() ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java index bb5bf83111..524d4cbebe 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java @@ -30,23 +30,17 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.BsonUtil; public class LogicalDocumentValues extends DocumentValues implements RelationalTransformable { - private final static PolyTypeFactoryImpl typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - /** * Java representation of multiple documents, which can be retrieved in the original BSON format form @@ -72,32 +66,22 @@ public class LogicalDocumentValues extends DocumentValues implements RelationalT * * * @param cluster the cluster, which holds the information regarding the ongoing operation - * @param defaultRowType, substitution rowType, which is "_id", "_data" and possible fixed columns if they exist * @param traitSet the used traitSet * @param tuples the documents in their native BSON format */ - public LogicalDocumentValues( AlgOptCluster cluster, AlgDataType defaultRowType, AlgTraitSet traitSet, ImmutableList tuples ) { - super( cluster, traitSet, defaultRowType, tuples ); - } - - - public static AlgNode create( AlgOptCluster cluster, ImmutableList values ) { - List fields = new ArrayList<>(); - fields.add( new AlgDataTypeFieldImpl( "d", 0, typeFactory.createPolyType( PolyType.DOCUMENT ) ) );//typeFactory.createMapType( typeFactory.createPolyType( PolyType.VARCHAR, 2024 ), typeFactory.createPolyType( PolyType.ANY ) ) ) ); - AlgDataType defaultRowType = new AlgRecordType( fields ); - - return create( cluster, values, defaultRowType ); + public LogicalDocumentValues( AlgOptCluster cluster, AlgTraitSet traitSet, ImmutableList tuples ) { + super( cluster, traitSet, tuples ); } - public static AlgNode create( AlgOptCluster cluster, ImmutableList tuples, AlgDataType defaultRowType ) { + public static AlgNode create( AlgOptCluster cluster, ImmutableList tuples ) { final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ); - return new LogicalDocumentValues( cluster, defaultRowType, traitSet, tuples ); + return new LogicalDocumentValues( cluster, traitSet, tuples ); } public static AlgNode create( LogicalValues input ) { - return create( input.getCluster(), bsonify( input.getTuples(), input.getRowType() ), input.getRowType() ); + return create( input.getCluster(), bsonify( input.getTuples(), input.getRowType() ) ); } @@ -149,7 +133,7 @@ public static LogicalDocumentValues createOneRow( AlgOptCluster cluster ) { .add( "ZERO", null, PolyType.INTEGER ) .nullable( false ) .build(); - return new LogicalDocumentValues( cluster, rowType, cluster.traitSet(), ImmutableList.builder().build() ); + return new LogicalDocumentValues( cluster, cluster.traitSet(), ImmutableList.builder().build() ); } @@ -163,7 +147,7 @@ public NamespaceType getModel() { public AlgNode copy( AlgTraitSet traitSet, List inputs ) { assert traitSet.containsIfApplicable( Convention.NONE ); assert inputs.isEmpty(); - return new LogicalDocumentValues( getCluster(), rowType, traitSet, documentTuples ); + return new LogicalDocumentValues( getCluster(), traitSet, documentTuples ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeFactory.java b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeFactory.java index 39556b6cfa..7021ee70ab 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeFactory.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeFactory.java @@ -41,6 +41,7 @@ import java.util.Objects; import org.polypheny.db.nodes.IntervalQualifier; import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.Collation; import org.polypheny.db.util.Glossary; import org.polypheny.db.util.ValidatorUtil; @@ -57,6 +58,8 @@ */ public interface AlgDataTypeFactory { + AlgDataTypeFactory DEFAULT = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + /** * Returns the type system. * diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgDocumentType.java b/core/src/main/java/org/polypheny/db/algebra/type/AlgDocumentType.java new file mode 100644 index 0000000000..7504869de8 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/type/AlgDocumentType.java @@ -0,0 +1,231 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.type; + +import com.google.common.collect.ImmutableList; +import java.io.Serializable; +import java.nio.charset.Charset; +import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Getter; +import lombok.Setter; +import org.polypheny.db.nodes.IntervalQualifier; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.util.Collation; + +public class AlgDocumentType implements Serializable, AlgDataType, AlgDataTypeFamily, AlgDataTypeField { + + @Getter + private final StructKind structKind; + private final ImmutableList fixedFields; + + @Getter + private final String name; + + @Getter + @Setter + private String physicalName = null; + + + public AlgDocumentType( @Nullable String name, @Nonnull List fixedFields ) { + this.name = name; + this.structKind = fixedFields.isEmpty() ? StructKind.NONE : StructKind.SEMI; + assert fixedFields != null; + this.fixedFields = ImmutableList.copyOf( fixedFields ); + } + + + public AlgDocumentType() { + this( null, List.of( new AlgDocumentType( "_id_", List.of() ) ) ); + } + + + public AlgDataType asRelational() { + return new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "_data_", 1, AlgDataTypeFactory.DEFAULT.createPolyType( PolyType.VARCHAR, 2024 ) ) ) ); + } + + + @Override + public boolean isStruct() { + return false; + } + + + @Override + public List getFieldList() { + return List.of( this ); + } + + + @Override + public List getFieldNames() { + if ( name == null ) { + return List.of( "$d" ); + } + return List.of( name ); + } + + + @Override + public int getFieldCount() { + return 1; + } + + + @Override + public AlgDataTypeField getField( String fieldName, boolean caseSensitive, boolean elideRecord ) { + throw new RuntimeException( "getField on DocumentType" ); + + } + + + @Override + public boolean isNullable() { + return false; + } + + + @Override + public AlgDataType getComponentType() { + return null; + } + + + @Override + public AlgDataType getKeyType() { + return null; + } + + + @Override + public AlgDataType getValueType() { + return null; + } + + + @Override + public Charset getCharset() { + return null; + } + + + @Override + public Collation getCollation() { + return null; + } + + + @Override + public IntervalQualifier getIntervalQualifier() { + return null; + } + + + @Override + public int getPrecision() { + return 0; + } + + + @Override + public int getRawPrecision() { + return 0; + } + + + @Override + public int getScale() { + return 0; + } + + + @Override + public PolyType getPolyType() { + return null; + } + + + @Override + public String getFullTypeString() { + return null; + } + + + @Override + public AlgDataTypeFamily getFamily() { + return null; + } + + + @Override + public AlgDataTypePrecedenceList getPrecedenceList() { + return null; + } + + + @Override + public AlgDataTypeComparability getComparability() { + return AlgDataTypeComparability.ALL; + } + + + @Override + public boolean isDynamicStruct() { + return false; + } + + + @Override + public int getIndex() { + return 0; + } + + + @Override + public AlgDataType getType() { + return this; + } + + + @Override + public boolean isDynamicStar() { + return false; + } + + + @Override + public String getKey() { + if ( name == null ) { + return "$d"; + } + return name; + } + + + @Override + public AlgDataType getValue() { + return this; + } + + + @Override + public AlgDataType setValue( AlgDataType value ) { + throw new RuntimeException( "Error while setting field on AlgDocumentType" ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/type/StructKind.java b/core/src/main/java/org/polypheny/db/algebra/type/StructKind.java index eef1cbe2c6..bd5071fae1 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/StructKind.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/StructKind.java @@ -66,6 +66,11 @@ public enum StructKind { */ NONE, + /** + * This type might have some defined structure but also undefined parts. + */ + SEMI, + /** * This is a traditional structured type, where each field must be referenced explicitly. * diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 87a5bf4ec8..83c0f00abd 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -34,17 +34,59 @@ package org.polypheny.db.tools; +import static org.polypheny.db.util.Static.RESOURCE; + import com.google.common.base.Preconditions; -import com.google.common.collect.*; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.math.BigDecimal; +import java.util.AbstractList; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Deque; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.Getter; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.function.Experimental; import org.bson.BsonValue; -import org.polypheny.db.algebra.*; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgDistribution; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.constant.SemiJoinType; -import org.polypheny.db.algebra.core.*; +import org.polypheny.db.algebra.core.Aggregate; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.AlgFactories.ScanFactory; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.core.Filter; +import org.polypheny.db.algebra.core.Intersect; +import org.polypheny.db.algebra.core.Join; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.Match; +import org.polypheny.db.algebra.core.Minus; +import org.polypheny.db.algebra.core.Project; +import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.SemiJoin; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; @@ -60,11 +102,27 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; +import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.*; -import org.polypheny.db.rex.*; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptPredicateList; +import org.polypheny.db.plan.AlgOptSchema; +import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptUtil; +import org.polypheny.db.plan.Context; +import org.polypheny.db.plan.Contexts; +import org.polypheny.db.rex.RexBuilder; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexCorrelVariable; +import org.polypheny.db.rex.RexExecutor; +import org.polypheny.db.rex.RexInputRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexShuttle; +import org.polypheny.db.rex.RexSimplify; +import org.polypheny.db.rex.RexUtil; import org.polypheny.db.runtime.Hook; import org.polypheny.db.runtime.PolyCollections.PolyDictionary; import org.polypheny.db.schema.ModelTrait; @@ -72,17 +130,21 @@ import org.polypheny.db.schema.graph.PolyNode; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyType; -import org.polypheny.db.util.*; +import org.polypheny.db.util.DateString; +import org.polypheny.db.util.Holder; +import org.polypheny.db.util.ImmutableBitSet; +import org.polypheny.db.util.ImmutableIntList; +import org.polypheny.db.util.ImmutableNullableList; +import org.polypheny.db.util.Litmus; +import org.polypheny.db.util.NlsString; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.TimeString; +import org.polypheny.db.util.TimestampString; +import org.polypheny.db.util.Util; +import org.polypheny.db.util.ValidatorUtil; import org.polypheny.db.util.mapping.Mapping; import org.polypheny.db.util.mapping.Mappings; -import javax.annotation.Nonnull; -import java.math.BigDecimal; -import java.util.*; -import java.util.stream.Collectors; - -import static org.polypheny.db.util.Static.RESOURCE; - /** * Builder for relational expressions. @@ -300,7 +362,7 @@ public AlgBuilder push( AlgNode node ) { */ public void replaceTop( AlgNode node ) { final Frame frame = stack.pop(); - stack.push( new Frame( node, frame.fields ) ); + stack.push( new Frame( node, frame.structured, null ) ); } @@ -312,7 +374,7 @@ public void replaceTop( AlgNode node, int amount ) { for ( int i = 0; i < amount - 1; i++ ) { stack.pop(); } - stack.push( new Frame( node, frame.fields ) ); + stack.push( new Frame( node, frame.structured, null ) ); } @@ -455,7 +517,7 @@ public RexInputRef field( String fieldName ) { */ public RexInputRef field( int inputCount, int inputOrdinal, String fieldName ) { final Frame frame = peek_( inputCount, inputOrdinal ); - final List fieldNames = Pair.left( frame.fields() ); + final List fieldNames = Pair.left( frame.relFields() ); int i = fieldNames.indexOf( fieldName ); if ( i >= 0 ) { return field( inputCount, inputOrdinal, i ); @@ -504,7 +566,7 @@ private RexNode field( int inputCount, int inputOrdinal, int fieldOrdinal, boole final AlgDataTypeField field = rowType.getFieldList().get( fieldOrdinal ); final int offset = inputOffset( inputCount, inputOrdinal ); final RexInputRef ref = cluster.getRexBuilder().makeInputRef( field.getType(), offset + fieldOrdinal ); - final AlgDataTypeField aliasField = frame.fields().get( fieldOrdinal ); + final AlgDataTypeField aliasField = frame.relFields().get( fieldOrdinal ); if ( !alias || field.getName().equals( aliasField.getName() ) ) { return ref; } else { @@ -531,7 +593,7 @@ public RexNode field( int inputCount, String alias, String fieldName ) { final List fields = new ArrayList<>(); for ( int inputOrdinal = 0; inputOrdinal < inputCount; ++inputOrdinal ) { final Frame frame = peek_( inputOrdinal ); - for ( Ord p : Ord.zip( frame.fields ) ) { + for ( Ord p : Ord.zip( frame.structured ) ) { // If alias and field name match, reference that field. if ( p.e.left.contains( alias ) && p.e.right.getName().equals( fieldName ) ) { return field( inputCount, inputCount - 1 - inputOrdinal, p.i ); @@ -1362,7 +1424,7 @@ public AlgBuilder filter( Iterable predicates ) { if ( !simplifiedPredicates.isAlwaysTrue() ) { final Frame frame = stack.pop(); final AlgNode filter = filterFactory.createFilter( frame.alg, simplifiedPredicates ); - stack.push( new Frame( filter, frame.fields ) ); + stack.push( new Frame( filter, frame.structured, null ) ); } return this; } @@ -1464,21 +1526,21 @@ public AlgBuilder project( Iterable nodes, Iterable f // Carefully build a list of fields, so that table aliases from the input can be seen for fields that are based on a RexInputRef. final Frame frame1 = stack.pop(); - final List fields = new ArrayList<>(); + final List relFields = new ArrayList<>(); for ( AlgDataTypeField f : project.getInput().getRowType().getFieldList() ) { - fields.add( new Field( ImmutableSet.of(), f ) ); + relFields.add( new RelField( ImmutableSet.of(), f ) ); } - for ( Pair pair : Pair.zip( project.getProjects(), frame1.fields ) ) { + for ( Pair pair : Pair.zip( project.getProjects(), frame1.structured ) ) { switch ( pair.left.getKind() ) { case INPUT_REF: final int i = ((RexInputRef) pair.left).getIndex(); - final Field field = fields.get( i ); + final RelField relField = relFields.get( i ); final ImmutableSet aliases = pair.right.left; - fields.set( i, new Field( aliases, field.right ) ); + relFields.set( i, new RelField( aliases, relField.right ) ); break; } } - stack.push( new Frame( project.getInput(), ImmutableList.copyOf( fields ) ) ); + stack.push( new Frame( project.getInput(), ImmutableList.copyOf( relFields ) ) ); return project( newNodes, fieldNameList, force ); } @@ -1505,7 +1567,7 @@ public AlgBuilder project( Iterable nodes, Iterable f for ( int i = 0; i < fieldNameList.size(); ++i ) { final RexNode node = nodeList.get( i ); String name = fieldNameList.get( i ); - Field field; + RelField relField; if ( name == null || uniqueNameList.contains( name ) ) { int j = 0; if ( name == null ) { @@ -1521,14 +1583,14 @@ public AlgBuilder project( Iterable nodes, Iterable f case INPUT_REF: // preserve alg aliases for INPUT_REF fields final int index = ((RexInputRef) node).getIndex(); - field = new Field( frame.fields.get( index ).left, fieldType ); + relField = new RelField( frame.structured.get( index ).left, fieldType ); break; default: - field = new Field( ImmutableSet.of(), fieldType ); + relField = new RelField( ImmutableSet.of(), fieldType ); break; } uniqueNameList.add( name ); - fields.add( field ); + fields.add( relField ); } if ( !force && RexUtil.isIdentity( nodeList, inputRowType ) ) { if ( fieldNameList.equals( inputRowType.getFieldNames() ) ) { @@ -1600,7 +1662,7 @@ public AlgBuilder projectNamed( Iterable nodes, Iterable exprList, RexNode expr, int i ) { switch ( expr.getKind() ) { case INPUT_REF: final RexInputRef ref = (RexInputRef) expr; - return stack.peek().fields.get( ref.getIndex() ).getValue().getName(); + return stack.peek().structured.get( ref.getIndex() ).getValue().getName(); case CAST: return inferAlias( exprList, ((RexCall) expr).getOperands().get( 0 ), -1 ); case AS: @@ -1808,12 +1870,12 @@ public AlgBuilder aggregate( GroupKey groupKey, Iterable aggCalls ) { final Kind kind = node.getKind(); switch ( kind ) { case INPUT_REF: - fields.add( frame.fields.get( ((RexInputRef) node).getIndex() ) ); + fields.add( frame.unstructured.get( ((RexInputRef) node).getIndex() ) ); break; default: String name = aggregateFields.get( i ).getName(); AlgDataTypeField fieldType = new AlgDataTypeFieldImpl( name, i, node.getType() ); - fields.add( new Field( ImmutableSet.of(), fieldType ) ); + fields.add( new RelField( ImmutableSet.of(), fieldType ) ); break; } i++; @@ -1823,7 +1885,7 @@ public AlgBuilder aggregate( GroupKey groupKey, Iterable aggCalls ) { for ( int j = 0; j < groupSet.cardinality(); ++j ) { final AlgDataTypeField field = aggregateFields.get( i ); final AlgDataTypeField fieldType = new AlgDataTypeFieldImpl( field.getName(), i, field.getType() ); - fields.add( new Field( ImmutableSet.of(), fieldType ) ); + fields.add( new RelField( ImmutableSet.of(), fieldType ) ); i++; } } @@ -1831,7 +1893,7 @@ public AlgBuilder aggregate( GroupKey groupKey, Iterable aggCalls ) { for ( int j = 0; j < aggregateCalls.size(); ++j ) { final AggregateCall call = aggregateCalls.get( j ); final AlgDataTypeField fieldType = new AlgDataTypeFieldImpl( aggregateFields.get( i + j ).getName(), i + j, call.getType() ); - fields.add( new Field( ImmutableSet.of(), fieldType ) ); + fields.add( new RelField( ImmutableSet.of(), fieldType ) ); } stack.push( new Frame( aggregate, fields.build() ) ); return this; @@ -1978,8 +2040,8 @@ public AlgBuilder join( JoinAlgType joinType, RexNode condition, Set fields = ImmutableList.builder(); - fields.addAll( left.fields ); - fields.addAll( right.fields ); + fields.addAll( left.structured ); + fields.addAll( right.structured ); stack.push( new Frame( join, fields.build() ) ); filter( postCondition ); return this; @@ -2032,8 +2094,8 @@ public AlgBuilder semiJoin( RexNode... conditions ) { */ public AlgBuilder as( final String alias ) { final Frame pair = stack.pop(); - List newFields = Util.transform( pair.fields, field -> field.addAlias( alias ) ); - stack.push( new Frame( pair.alg, ImmutableList.copyOf( newFields ) ) ); + List newRelFields = Util.transform( pair.structured, relField -> relField.addAlias( alias ) ); + stack.push( new Frame( pair.alg, ImmutableList.copyOf( newRelFields ) ) ); return this; } @@ -2802,27 +2864,56 @@ List registerExpressions( Iterable nodes ) { private static class Frame { final AlgNode alg; - final ImmutableList fields; + final ImmutableList structured; + final ImmutableList unstructured; private Frame( AlgNode alg, ImmutableList fields ) { + + List structured = new ArrayList<>(); + List unstructured = new ArrayList<>(); + for ( Field field : fields ) { + if ( field.isStructured() ) { + structured.add( (RelField) field ); + } else { + unstructured.add( (DocField) field ); + } + } + this.alg = alg; + this.structured = ImmutableList.copyOf( structured ); + this.unstructured = ImmutableList.copyOf( unstructured ); + } + + + private Frame( AlgNode alg, ImmutableList structured, ImmutableList unstructured ) { this.alg = alg; - this.fields = fields; + this.structured = structured; + this.unstructured = unstructured; } private Frame( AlgNode alg ) { String tableAlias = deriveAlias( alg ); - ImmutableList.Builder builder = ImmutableList.builder(); + ImmutableSet aliases = tableAlias == null ? ImmutableSet.of() : ImmutableSet.of( tableAlias ); + if ( alg.getRowType().getStructKind() == StructKind.SEMI ) { + ImmutableList.Builder builder = ImmutableList.builder(); + this.alg = alg; + builder.add( new DocField( aliases ) ); + this.structured = null; + this.unstructured = builder.build(); + return; + } + ImmutableList.Builder builder = ImmutableList.builder(); for ( AlgDataTypeField field : alg.getRowType().getFieldList() ) { - builder.add( new Field( aliases, field ) ); + builder.add( new RelField( aliases, field ) ); } this.alg = alg; - this.fields = builder.build(); + this.structured = builder.build(); + this.unstructured = null; } @@ -2837,29 +2928,61 @@ private static String deriveAlias( AlgNode alg ) { } - List fields() { - return Pair.right( fields ); + List relFields() { + return Pair.right( structured ); } + + } + + + private interface Field { + + boolean isStructured(); + } /** * A field that belongs to a stack {@link Frame}. */ - private static class Field extends Pair, AlgDataTypeField> { + private static class RelField extends Pair, AlgDataTypeField> implements Field { - Field( ImmutableSet left, AlgDataTypeField right ) { + RelField( ImmutableSet left, AlgDataTypeField right ) { super( left, right ); } - Field addAlias( String alias ) { + RelField addAlias( String alias ) { if ( left.contains( alias ) ) { return this; } final ImmutableSet aliasList = ImmutableSet.builder().addAll( left ).add( alias ).build(); - return new Field( aliasList, right ); + return new RelField( aliasList, right ); + } + + + @Override + public boolean isStructured() { + return true; + } + + } + + + private static class DocField implements Field { + + private final ImmutableSet aliases; + + + DocField( ImmutableSet aliases ) { + this.aliases = aliases; + } + + + @Override + public boolean isStructured() { + return false; } } diff --git a/core/src/main/java/org/polypheny/db/type/PolyType.java b/core/src/main/java/org/polypheny/db/type/PolyType.java index c6c53d0aa3..9e9c49bad2 100644 --- a/core/src/main/java/org/polypheny/db/type/PolyType.java +++ b/core/src/main/java/org/polypheny/db/type/PolyType.java @@ -33,15 +33,22 @@ package org.polypheny.db.type; -import com.google.common.collect.*; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.common.collect.Sets; import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; -import org.apache.calcite.avatica.util.TimeUnit; -import org.polypheny.db.util.Util; - import java.math.BigDecimal; import java.sql.Types; -import java.util.*; +import java.util.Arrays; +import java.util.Calendar; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.calcite.avatica.util.TimeUnit; +import org.polypheny.db.util.Util; /** @@ -280,7 +287,7 @@ public enum PolyType { DOCUMENT( PrecScale.NO_NO, false, - Types.OTHER, + Types.STRUCT, PolyTypeFamily.DOCUMENT ), GRAPH( diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 6325554016..d66fdb2ef1 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -149,8 +149,8 @@ protected static List addDocumentNodes( AlgDataType rowType, RexBuilder OperatorName.MQL_QUERY_VALUE ), List.of( RexInputRef.of( 0, rowType ), - rexBuilder.makeArray( rexBuilder.getTypeFactory().createArrayType( - rexBuilder.getTypeFactory().createPolyType( PolyType.VARCHAR, 255 ), 1 ), + rexBuilder.makeArray( + rexBuilder.getTypeFactory().createArrayType( rexBuilder.getTypeFactory().createPolyType( PolyType.VARCHAR, 255 ), 1 ), List.of( rexBuilder.makeLiteral( "_id" ) ) ) ) ), (forceVarchar ? rexBuilder.makeCall( data, @@ -604,8 +604,9 @@ private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statement s private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer adapterId, Statement statement, RoutedAlgBuilder builder ) { List columns = catalog.getColumns( node.getCollection().getTable().getTableId() ); AlgTraitSet out = node.getTraitSet().replace( ModelTrait.RELATIONAL ); - builder.scan( getSubstitutionTable( statement, node.getCollection().getTable().getTableId(), columns.get( 0 ).id, adapterId ) ); - builder.project( node.getCluster().getRexBuilder().makeInputRef( node.getRowType(), 1 ) ); + PreparingTable subTable = getSubstitutionTable( statement, node.getCollection().getTable().getTableId(), columns.get( 0 ).id, adapterId ); + builder.scan( subTable ); + builder.project( node.getCluster().getRexBuilder().makeInputRef( subTable.getRowType().getFieldList().get( 1 ).getType(), 1 ) ); builder.push( new LogicalTransformer( builder.getCluster(), List.of( builder.build() ), null, out.replace( ModelTrait.DOCUMENT ), ModelTrait.RELATIONAL, ModelTrait.DOCUMENT, node.getRowType(), false ) ); return builder; } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index ae1033d17b..c3e1ce556c 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -921,7 +921,7 @@ private AlgNode createDocumentTransform( AlgNode query, RexBuilder rexBuilder ) names.add( "_data_" ); List updates = addDocumentNodes( query.getRowType(), rexBuilder, false ); - return LogicalProject.create( query, updates, names ); + return LogicalDocumentProject.create( query, updates, names ); } diff --git a/dbms/src/test/java/org/polypheny/db/mql/DmlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DmlTest.java index caee389141..b82b1d3841 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DmlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DmlTest.java @@ -41,6 +41,20 @@ @Category({ AdapterTestSuite.class, CassandraExcluded.class }) public class DmlTest extends MqlTestTemplate { + @Test + public void emptyTest() { + String name = "test"; + execute( "db.createCollection(\"" + name + "\")" ); + + Result result = find( "{}", "{}" ); + + assertTrue( + MongoConnection.checkResultSet( + result, + ImmutableList.of(), true ) ); + + } + @Test public void insertTest() { diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java index 88fb07b075..f47c953552 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java @@ -35,6 +35,15 @@ import com.google.common.collect.ImmutableList; import com.mongodb.client.gridfs.GridFSBucket; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; +import java.util.stream.Collectors; import lombok.Getter; import org.bson.BsonArray; import org.bson.BsonDocument; @@ -46,10 +55,20 @@ import org.polypheny.db.adapter.mongodb.MongoAlg.Implementor; import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; import org.polypheny.db.adapter.mongodb.bson.BsonDynamic; -import org.polypheny.db.algebra.*; +import org.polypheny.db.algebra.AbstractAlgNode; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgShuttleImpl; +import org.polypheny.db.algebra.InvalidAlgException; +import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.convert.ConverterRule; -import org.polypheny.db.algebra.core.*; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.AlgFactories; +import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.document.DocumentSort; import org.polypheny.db.algebra.core.document.DocumentValues; @@ -68,24 +87,37 @@ import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.*; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptPlanner; +import org.polypheny.db.plan.AlgOptRule; +import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgTrait; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.plan.Convention; import org.polypheny.db.plan.volcano.AlgSubset; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.rex.*; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexDynamicParam; +import org.polypheny.db.rex.RexFieldAccess; +import org.polypheny.db.rex.RexInputRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexVisitorImpl; import org.polypheny.db.schema.ModifiableTable; import org.polypheny.db.schema.Table; import org.polypheny.db.schema.document.DocumentRules; import org.polypheny.db.sql.language.fun.SqlDatetimePlusOperator; import org.polypheny.db.sql.language.fun.SqlDatetimeSubtractionOperator; import org.polypheny.db.type.PolyType; -import org.polypheny.db.util.*; +import org.polypheny.db.util.BsonUtil; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.UnsupportedRexCallVisitor; +import org.polypheny.db.util.Util; +import org.polypheny.db.util.ValidatorUtil; import org.polypheny.db.util.trace.PolyphenyDbTrace; import org.slf4j.Logger; -import java.util.*; -import java.util.function.Predicate; -import java.util.stream.Collectors; - /** * Rules and relational operators for {@link MongoAlg#CONVENTION MONGO} calling convention. @@ -808,7 +840,6 @@ public AlgNode convert( AlgNode alg ) { DocumentValues documentValues = (DocumentValues) alg; return new MongoDocuments( alg.getCluster(), - alg.getRowType(), documentValues.getDocumentTuples(), alg.getTraitSet().replace( out ) ); @@ -822,8 +853,8 @@ public AlgNode convert( AlgNode alg ) { public static class MongoDocuments extends DocumentValues implements MongoAlg { - public MongoDocuments( AlgOptCluster cluster, AlgDataType defaultRowType, ImmutableList documentTuples, AlgTraitSet traitSet ) { - super( cluster, traitSet, defaultRowType, documentTuples ); + public MongoDocuments( AlgOptCluster cluster, ImmutableList documentTuples, AlgTraitSet traitSet ) { + super( cluster, traitSet, documentTuples ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index fde6f84be0..7e969738f7 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -62,6 +62,7 @@ import org.polypheny.db.transaction.TransactionManager; import org.polypheny.db.webui.Crud; import org.polypheny.db.webui.models.DbColumn; +import org.polypheny.db.webui.models.FieldDef; import org.polypheny.db.webui.models.Index; import org.polypheny.db.webui.models.Placement; import org.polypheny.db.webui.models.Placement.DocumentStore; @@ -178,25 +179,29 @@ public static void attachError( Transaction transaction, List results, S @NotNull - public static Result getResult( QueryLanguage language, Statement statement, QueryRequest request, String query, PolyImplementation result, Transaction transaction, final boolean noLimit ) { + public static Result getResult( QueryLanguage language, Statement statement, QueryRequest request, String query, PolyImplementation implementation, Transaction transaction, final boolean noLimit ) { Catalog catalog = Catalog.getInstance(); - List> rows = result.getRows( statement, noLimit ? -1 : language == QueryLanguage.from( "cypher" ) ? RuntimeConfig.UI_NODE_AMOUNT.getInteger() : RuntimeConfig.UI_PAGE_SIZE.getInteger() ); + if ( language == QueryLanguage.from( "mql" ) ) { + return getDocResult( statement, request, query, implementation, transaction, noLimit ); + } + + List> rows = implementation.getRows( statement, noLimit ? -1 : language == QueryLanguage.from( "cypher" ) ? RuntimeConfig.UI_NODE_AMOUNT.getInteger() : RuntimeConfig.UI_PAGE_SIZE.getInteger() ); - boolean hasMoreRows = result.hasMoreRows(); + boolean hasMoreRows = implementation.hasMoreRows(); CatalogTable catalogTable = null; if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); try { - catalogTable = Catalog.getInstance().getTable( statement.getPrepareContext().getDefaultSchemaName(), t[0], t[1] ); + catalogTable = catalog.getTable( statement.getPrepareContext().getDefaultSchemaName(), t[0], t[1] ); } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { log.error( "Caught exception", e ); } } ArrayList header = new ArrayList<>(); - for ( AlgDataTypeField metaData : result.rowType.getFieldList() ) { + for ( AlgDataTypeField metaData : implementation.rowType.getFieldList() ) { String columnName = metaData.getName(); String filter = ""; @@ -238,7 +243,7 @@ public static Result getResult( QueryLanguage language, Statement statement, Que ArrayList data = Crud.computeResultData( rows, header, statement.getTransaction() ); return new Result( header.toArray( new DbColumn[0] ), data.toArray( new String[0][] ) ) - .setNamespaceType( result.getNamespaceType() ) + .setNamespaceType( implementation.getNamespaceType() ) .setNamespaceName( request.database ) .setLanguage( language ) .setAffectedRows( data.size() ) @@ -248,6 +253,25 @@ public static Result getResult( QueryLanguage language, Statement statement, Que } + private static Result getDocResult( Statement statement, QueryRequest request, String query, PolyImplementation implementation, Transaction transaction, boolean noLimit ) { + + List> data = implementation.getDocRows( statement, noLimit ); + + List header = new ArrayList<>(); + + header.add( new FieldDef() ); + + return new Result( header.toArray( new FieldDef[0] ), data.toArray( new String[0][] ) ) + .setNamespaceType( implementation.getNamespaceType() ) + .setNamespaceName( request.database ) + .setLanguage( QueryLanguage.from( "mql" ) ) + .setAffectedRows( data.size() ) + .setHasMoreRows( implementation.hasMoreRows() ) + .setXid( transaction.getXid().toString() ) + .setGeneratedQuery( query ); + } + + /** * Creates a new document collection */ diff --git a/webui/src/main/java/org/polypheny/db/webui/models/DbColumn.java b/webui/src/main/java/org/polypheny/db/webui/models/DbColumn.java index 4ea0771ab3..7e51354c91 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/DbColumn.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/DbColumn.java @@ -22,7 +22,6 @@ import com.google.gson.stream.JsonToken; import com.google.gson.stream.JsonWriter; import java.io.IOException; -import lombok.Setter; import lombok.experimental.Accessors; @@ -30,15 +29,7 @@ * Information about a column of a table for the header of a table in the UI */ @Accessors(chain = true) -public class DbColumn { - - public String name; - @Setter - public String physicalName; - - // for both - public String dataType; //varchar/int/etc - public String collectionsType; +public class DbColumn extends FieldDef { // for the Data-Table in the UI public SortState sort; @@ -173,6 +164,12 @@ private DbColumn( JsonReader in ) throws IOException { } + @Override + public DbColumn setPhysicalName( String physicalName ) { + return (DbColumn) super.setPhysicalName( physicalName ); + } + + private Integer handleInteger( JsonReader in ) throws IOException { if ( in.peek() == JsonToken.NULL ) { in.nextNull(); diff --git a/webui/src/main/java/org/polypheny/db/webui/models/FieldDef.java b/webui/src/main/java/org/polypheny/db/webui/models/FieldDef.java new file mode 100644 index 0000000000..dab03db0f9 --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/models/FieldDef.java @@ -0,0 +1,32 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.models; + +import lombok.Setter; +import lombok.experimental.Accessors; + +@Accessors(chain = true) +public class FieldDef { + + public String name; + @Setter + public String physicalName; + // for both + public String dataType; //varchar/int/etc + public String collectionsType; + +} diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Result.java b/webui/src/main/java/org/polypheny/db/webui/models/Result.java index 157b14939c..8cd6b38a71 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/Result.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/Result.java @@ -47,7 +47,7 @@ public class Result { */ @Getter @Setter - private DbColumn[] header; + private FieldDef[] header; /** * The rows containing the fetched data */ @@ -167,7 +167,7 @@ public class Result { * @param header columns of the result * @param data data of the result */ - public Result( final DbColumn[] header, final String[][] data ) { + public Result( final FieldDef[] header, final String[][] data ) { this.header = header; this.data = data; } @@ -487,8 +487,10 @@ private void handleDbColumns( JsonWriter out, Result result ) throws IOException } out.beginArray(); TypeAdapter dbSerializer = DbColumn.getSerializer(); - for ( DbColumn column : result.getHeader() ) { - dbSerializer.write( out, column ); + for ( FieldDef column : result.getHeader() ) { + if ( column instanceof DbColumn ) { + dbSerializer.write( out, (DbColumn) column ); + } } out.endArray(); } From 38a1aebfcf9f688c97fab33ebd8a2b252cb39cc3 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 12 Feb 2023 13:58:28 +0100 Subject: [PATCH 002/436] boilerplating for catalog refactor --- plugins/poly-catalog/build.gradle | 75 + plugins/poly-catalog/gradle.properties | 27 + .../polypheny/db/catalog/ModelCatalog.java | 27 + .../org/polypheny/db/catalog/PolyCatalog.java | 1338 +++++++++++++++++ .../catalog/document/CatalogCollection.java | 21 + .../catalog/document/CatalogDocDatabase.java | 21 + .../db/catalog/document/DocumentCatalog.java | 46 + .../db/catalog/graph/CatalogGraph.java | 21 + .../db/catalog/graph/GraphCatalog.java | 45 + .../db/catalog/relational/CatalogColumn.java | 21 + .../db/catalog/relational/CatalogSchema.java | 22 + .../db/catalog/relational/CatalogTable.java | 21 + .../catalog/relational/RelationalCatalog.java | 49 + settings.gradle | 1 + 14 files changed, 1735 insertions(+) create mode 100644 plugins/poly-catalog/build.gradle create mode 100644 plugins/poly-catalog/gradle.properties create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogCollection.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogColumn.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java diff --git a/plugins/poly-catalog/build.gradle b/plugins/poly-catalog/build.gradle new file mode 100644 index 0000000000..1aa465eb1b --- /dev/null +++ b/plugins/poly-catalog/build.gradle @@ -0,0 +1,75 @@ +group "org.polypheny" + + +dependencies { + compileOnly project(":core") + + + api group: "org.apache.commons", name: "commons-lang3", version: commons_lang3_version // Apache 2.0 + + ////// SLF4J + implementation group: 'org.pf4j', name: 'pf4j', version: pf4jVersion // Apache 2.0 + + // --- Test Compile --- + testImplementation project(path: ":core", configuration: "tests") + testImplementation project(path: ":core") + testImplementation group: "junit", name: "junit", version: junit_version +} + + +sourceSets { + main { + java { + srcDirs = ["src/main/java"] + outputDir = file(project.buildDir.absolutePath + "/classes") + } + resources { + srcDirs = ["src/main/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "/classes") + } + test { + java { + srcDirs = ["src/test/java"] + outputDir = file(project.buildDir.absolutePath + "/test-classes") + } + resources { + srcDirs = ["src/test/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "/test-classes") + } +} + +compileJava { + dependsOn(":core:processResources") +} + +delombok { + dependsOn(":core:processResources") +} + + +/** + * JARs + */ +jar { + manifest { + attributes "Manifest-Version": "1.0" + attributes "Copyright": "The Polypheny Project (polypheny.org)" + attributes "Version": "$project.version" + } +} +java { + withJavadocJar() + withSourcesJar() +} + +licensee { + allow('Apache-2.0') + allow('MIT') + + allowUrl('https://www.eclipse.org/licenses/edl-v10.html') // EDL 1.0 + + // Category B + allow('EPL-1.0') +} diff --git a/plugins/poly-catalog/gradle.properties b/plugins/poly-catalog/gradle.properties new file mode 100644 index 0000000000..90302a3805 --- /dev/null +++ b/plugins/poly-catalog/gradle.properties @@ -0,0 +1,27 @@ +# +# Copyright 2019-2023 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pluginVersion = 0.0.1 + +pluginId = mapdb-catalog +pluginClass = org.polypheny.db.catalog.MapDBCatalogPlugin +pluginProvider = The Polypheny Project +pluginDependencies = +pluginUrlPath = +pluginCategories = catalog +pluginPolyDependencies = +pluginIsSystemComponent = true +pluginIsUiVisible = true \ No newline at end of file diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java new file mode 100644 index 0000000000..a4d1ce4220 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java @@ -0,0 +1,27 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog; + +public interface ModelCatalog { + + void commit(); + + void rollback(); + + boolean hasUncommitedChanges(); + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java new file mode 100644 index 0000000000..c4d47ac36a --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -0,0 +1,1338 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.document.DocumentCatalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.CatalogCollectionMapping; +import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; +import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogConstraint; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.CatalogDatabase; +import org.polypheny.db.catalog.entity.CatalogForeignKey; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogGraphMapping; +import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.CatalogPartitionGroup; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.CatalogQueryInterface; +import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; +import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownConstraintException; +import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; +import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; +import org.polypheny.db.catalog.exceptions.UnknownIndexException; +import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.graph.GraphCatalog; +import org.polypheny.db.catalog.relational.RelationalCatalog; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.partition.properties.PartitionProperty; +import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.type.PolyType; + + +/** + * Central catalog, which distributes the operations to the corresponding model catalogs. + * Object are as follows: + * Namespace -> Schema (Relational), Graph (Graph), Database (Document) + * Entity -> Table (Relational), does not exist (Graph), Collection (Document) + * Field -> Column (Relational), does not exist (Graph), Field (Document) + */ +@Slf4j +public class PolyCatalog extends Catalog { + + private final RelationalCatalog relational; + private final GraphCatalog graphs; + private final DocumentCatalog documents; + + private final ImmutableList catalogs; + + private final Map users = new HashMap<>(); + + private final AtomicLong namespaceIdBuilder = new AtomicLong( 0 ); + + + public PolyCatalog() { + this.documents = new DocumentCatalog(); + this.graphs = new GraphCatalog(); + this.relational = new RelationalCatalog(); + + catalogs = ImmutableList.of( this.relational, this.graphs, this.documents ); + } + + + @Override + public void commit() throws NoTablePrimaryKeyException { + log.debug( "commit" ); + catalogs.stream().filter( ModelCatalog::hasUncommitedChanges ).forEach( ModelCatalog::commit ); + } + + + @Override + public void rollback() { + log.debug( "rollback" ); + catalogs.stream().filter( ModelCatalog::hasUncommitedChanges ).forEach( ModelCatalog::rollback ); + } + + + @Override + public Map getAlgTypeInfo() { + return null; + } + + + @Override + public Map getNodeInfo() { + return null; + } + + + @Override + public void restoreInterfacesIfNecessary() { + + } + + + @Override + public void validateColumns() { + + } + + + @Override + public void restoreColumnPlacements( Transaction transaction ) { + + } + + + @Override + public void restoreViews( Transaction transaction ) { + + } + + + @Override + public int addUser( String name, String password ) { + return 0; + } + + + @Override + public long addDatabase( String name, int ownerId, String ownerName, long defaultSchemaId, String defaultSchemaName ) { + return 0; + } + + + @Override + public void deleteDatabase( long databaseId ) { + + } + + + @Override + public List getDatabases( Pattern pattern ) { + return null; + } + + + @Override + public CatalogDatabase getDatabase( String databaseName ) throws UnknownDatabaseException { + return null; + } + + + @Override + public CatalogDatabase getDatabase( long databaseId ) { + return null; + } + + + @Override + public List getSchemas( Pattern databaseNamePattern, Pattern schemaNamePattern ) { + return null; + } + + + @Override + public List getSchemas( long databaseId, Pattern schemaNamePattern ) { + return null; + } + + + @Override + public CatalogSchema getSchema( long schemaId ) { + return null; + } + + + @Override + public CatalogSchema getSchema( String databaseName, String schemaName ) throws UnknownSchemaException, UnknownDatabaseException { + return null; + } + + + @Override + public CatalogSchema getSchema( long databaseId, String schemaName ) throws UnknownSchemaException { + return null; + } + + + // todo rename "create" + @Override + public long addNamespace( String name, long databaseId, int ownerId, NamespaceType namespaceType ) { + long id = namespaceIdBuilder.getAndIncrement(); + documents.createDatabase( id, name, databaseId, namespaceType ); + graphs.createGraph( id, name, databaseId, namespaceType ); + relational.createSchema( id, name, databaseId, namespaceType ); + + return id; + } + + + @Override + public boolean checkIfExistsSchema( long databaseId, String schemaName ) { + return false; + } + + + @Override + public void renameSchema( long schemaId, String name ) { + + } + + + @Override + public void setSchemaOwner( long schemaId, long ownerId ) { + + } + + + @Override + public long addGraph( long databaseId, String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { + return 0; + } + + + @Override + public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException { + + } + + + @Override + public void deleteGraph( long id ) { + + } + + + @Override + public CatalogGraphDatabase getGraph( long id ) { + return null; + } + + + @Override + public List getGraphs( long databaseId, Pattern graphName ) { + return null; + } + + + @Override + public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { + + } + + + @Override + public void removeGraphAlias( long graphId, String alias, boolean ifExists ) { + + } + + + @Override + public CatalogGraphMapping getGraphMapping( long graphId ) { + return null; + } + + + @Override + public void deleteSchema( long schemaId ) { + + } + + + @Override + public List getTables( long schemaId, Pattern tableNamePattern ) { + return null; + } + + + @Override + public List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public CatalogTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException { + return null; + } + + + @Override + public List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public CatalogTable getTable( long tableId ) { + return null; + } + + + @Override + public CatalogTable getTable( long schemaId, String tableName ) throws UnknownTableException { + return null; + } + + + @Override + public CatalogTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException { + return null; + } + + + @Override + public CatalogTable getTableFromPartition( long partitionId ) { + return null; + } + + + @Override + public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { + return 0; + } + + + @Override + public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { + return 0; + } + + + @Override + public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { + return 0; + } + + + @Override + public boolean checkIfExistsEntity( long namespaceId, String entityName ) { + return false; + } + + + @Override + public boolean checkIfExistsEntity( long tableId ) { + return false; + } + + + @Override + public void renameTable( long tableId, String name ) { + + } + + + @Override + public void deleteTable( long tableId ) { + + } + + + @Override + public void setTableOwner( long tableId, int ownerId ) { + + } + + + @Override + public void setPrimaryKey( long tableId, Long keyId ) { + + } + + + @Override + public void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { + + } + + + @Override + public void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ) { + + } + + + @Override + public CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ) { + return null; + } + + + @Override + public boolean checkIfExistsColumnPlacement( int adapterId, long columnId ) { + return false; + } + + + @Override + public List getColumnPlacement( long columnId ) { + return null; + } + + + @Override + public List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ) { + return null; + } + + + @Override + public List getColumnPlacementsOnAdapterSortedByPhysicalPosition( int adapterId, long tableId ) { + return null; + } + + + @Override + public List getColumnPlacementsOnAdapter( int adapterId ) { + return null; + } + + + @Override + public List getColumnPlacementsByColumn( long columnId ) { + return null; + } + + + @Override + public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { + return null; + } + + + @Override + public ImmutableMap> getPartitionPlacementsByAdapter( long tableId ) { + return null; + } + + + @Override + public ImmutableMap> getPartitionGroupsByAdapter( long tableId ) { + return null; + } + + + @Override + public long getPartitionGroupByPartition( long partitionId ) { + return 0; + } + + + @Override + public List getKeys() { + return null; + } + + + @Override + public List getTableKeys( long tableId ) { + return null; + } + + + @Override + public List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ) { + return null; + } + + + @Override + public void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ) { + + } + + + @Override + public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ) { + + } + + + @Override + public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ) { + + } + + + @Override + public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ) { + + } + + + @Override + public List getColumns( long tableId ) { + return null; + } + + + @Override + public List getColumns( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { + return null; + } + + + @Override + public CatalogColumn getColumn( long columnId ) { + return null; + } + + + @Override + public CatalogColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { + return null; + } + + + @Override + public CatalogColumn getColumn( String databaseName, String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownDatabaseException, UnknownTableException { + return null; + } + + + @Override + public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { + return 0; + } + + + @Override + public void renameColumn( long columnId, String name ) { + + } + + + @Override + public void setColumnPosition( long columnId, int position ) { + + } + + + @Override + public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ) throws GenericCatalogException { + + } + + + @Override + public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { + + } + + + @Override + public void setCollation( long columnId, Collation collation ) { + + } + + + @Override + public boolean checkIfExistsColumn( long tableId, String columnName ) { + return false; + } + + + @Override + public void deleteColumn( long columnId ) { + + } + + + @Override + public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { + + } + + + @Override + public void deleteDefaultValue( long columnId ) { + + } + + + @Override + public CatalogPrimaryKey getPrimaryKey( long key ) { + return null; + } + + + @Override + public boolean isPrimaryKey( long keyId ) { + return false; + } + + + @Override + public boolean isForeignKey( long keyId ) { + return false; + } + + + @Override + public boolean isIndex( long keyId ) { + return false; + } + + + @Override + public boolean isConstraint( long keyId ) { + return false; + } + + + @Override + public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { + + } + + + @Override + public List getForeignKeys( long tableId ) { + return null; + } + + + @Override + public List getExportedKeys( long tableId ) { + return null; + } + + + @Override + public List getConstraints( long tableId ) { + return null; + } + + + @Override + public List getIndexes( CatalogKey key ) { + return null; + } + + + @Override + public List getForeignKeys( CatalogKey key ) { + return null; + } + + + @Override + public List getConstraints( CatalogKey key ) { + return null; + } + + + @Override + public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { + return null; + } + + + @Override + public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { + return null; + } + + + @Override + public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { + + } + + + @Override + public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { + + } + + + @Override + public List getIndexes( long tableId, boolean onlyUnique ) { + return null; + } + + + @Override + public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { + return null; + } + + + @Override + public boolean checkIfExistsIndex( long tableId, String indexName ) { + return false; + } + + + @Override + public CatalogIndex getIndex( long indexId ) { + return null; + } + + + @Override + public List getIndexes() { + return null; + } + + + @Override + public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException { + return 0; + } + + + @Override + public void setIndexPhysicalName( long indexId, String physicalName ) { + + } + + + @Override + public void deleteIndex( long indexId ) { + + } + + + @Override + public void deletePrimaryKey( long tableId ) throws GenericCatalogException { + + } + + + @Override + public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { + + } + + + @Override + public void deleteConstraint( long constraintId ) throws GenericCatalogException { + + } + + + @Override + public CatalogUser getUser( String userName ) throws UnknownUserException { + return null; + } + + + @Override + public CatalogUser getUser( int userId ) { + return null; + } + + + @Override + public List getAdapters() { + return null; + } + + + @Override + public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { + return null; + } + + + @Override + public CatalogAdapter getAdapter( int adapterId ) { + return null; + } + + + @Override + public boolean checkIfExistsAdapter( int adapterId ) { + return false; + } + + + @Override + public int addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { + return 0; + } + + + @Override + public void updateAdapterSettings( int adapterId, Map newSettings ) { + + } + + + @Override + public void deleteAdapter( int adapterId ) { + + } + + + @Override + public List getQueryInterfaces() { + return null; + } + + + @Override + public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { + return null; + } + + + @Override + public CatalogQueryInterface getQueryInterface( int ifaceId ) { + return null; + } + + + @Override + public int addQueryInterface( String uniqueName, String clazz, Map settings ) { + return 0; + } + + + @Override + public void deleteQueryInterface( int ifaceId ) { + + } + + + @Override + public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { + return 0; + } + + + @Override + public void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ) { + + } + + + @Override + public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) { + return null; + } + + + @Override + public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { + return 0; + } + + + @Override + public void deletePartition( long tableId, long schemaId, long partitionId ) { + + } + + + @Override + public CatalogPartition getPartition( long partitionId ) { + return null; + } + + + @Override + public List getPartitionsByTable( long tableId ) { + return null; + } + + + @Override + public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { + + } + + + @Override + public void mergeTable( long tableId ) { + + } + + + @Override + public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { + + } + + + @Override + public List getPartitionGroups( long tableId ) { + return null; + } + + + @Override + public List getPartitionGroups( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public void updatePartitionGroup( long partitionGroupId, List partitionIds ) { + + } + + + @Override + public void addPartitionToGroup( long partitionGroupId, Long partitionId ) { + + } + + + @Override + public void removePartitionFromGroup( long partitionGroupId, Long partitionId ) { + + } + + + @Override + public void updatePartition( long partitionId, Long partitionGroupId ) { + + } + + + @Override + public List getPartitions( long partitionGroupId ) { + return null; + } + + + @Override + public List getPartitions( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public List getPartitionGroupNames( long tableId ) { + return null; + } + + + @Override + public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { + return null; + } + + + @Override + public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { + return null; + } + + + @Override + public List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ) { + return null; + } + + + @Override + public List getPartitionsOnDataPlacement( int adapterId, long tableId ) { + return null; + } + + + @Override + public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ) { + return null; + } + + + @Override + public CatalogDataPlacement getDataPlacement( int adapterId, long tableId ) { + return null; + } + + + @Override + public List getDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllColumnFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllPartitionFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { + return null; + } + + + @Override + public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { + return null; + } + + + @Override + public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { + return null; + } + + + @Override + public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { + return false; + } + + + @Override + public void flagTableForDeletion( long tableId, boolean flag ) { + + } + + + @Override + public boolean isTableFlaggedForDeletion( long tableId ) { + return false; + } + + + @Override + public void addPartitionPlacement( int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { + + } + + + @Override + public void addDataPlacement( int adapterId, long tableId ) { + + } + + + @Override + public CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ) { + return null; + } + + + @Override + protected void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ) { + + } + + + @Override + public long addGraphPlacement( int adapterId, long graphId ) { + return 0; + } + + + @Override + public List getGraphPlacements( int adapterId ) { + return null; + } + + + @Override + public void deleteGraphPlacement( int adapterId, long graphId ) { + + } + + + @Override + public void updateGraphPlacementPhysicalNames( long graphId, int adapterId, String physicalGraphName ) { + + } + + + @Override + public CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ) { + return null; + } + + + @Override + public void removeDataPlacement( int adapterId, long tableId ) { + + } + + + @Override + protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) { + + } + + + @Override + protected void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ) { + + } + + + @Override + public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { + + } + + + @Override + protected void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ) { + + } + + + @Override + protected void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ) { + + } + + + @Override + protected void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ) { + + } + + + @Override + protected void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ) { + + } + + + @Override + public void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ) { + + } + + + @Override + public void updatePartitionPlacementPhysicalNames( int adapterId, long partitionId, String physicalSchemaName, String physicalTableName ) { + + } + + + @Override + public void deletePartitionPlacement( int adapterId, long partitionId ) { + + } + + + @Override + public CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ) { + return null; + } + + + @Override + public List getPartitionPlacementsByAdapter( int adapterId ) { + return null; + } + + + @Override + public List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ) { + return null; + } + + + @Override + public List getAllPartitionPlacementsByTable( long tableId ) { + return null; + } + + + @Override + public List getPartitionPlacements( long partitionId ) { + return null; + } + + + @Override + public List getTablesForPeriodicProcessing() { + return null; + } + + + @Override + public void addTableToPeriodicProcessing( long tableId ) { + + } + + + @Override + public void removeTableFromPeriodicProcessing( long tableId ) { + + } + + + @Override + public boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ) { + return false; + } + + + @Override + public void deleteViewDependencies( CatalogView catalogView ) { + + } + + + @Override + public void updateMaterializedViewRefreshTime( long materializedViewId ) { + + } + + + @Override + public CatalogCollection getCollection( long collectionId ) { + return null; + } + + + @Override + public List getCollections( long namespaceId, Pattern namePattern ) { + return null; + } + + + @Override + public long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ) { + return 0; + } + + + @Override + public long addCollectionPlacement( int adapterId, long collectionId, PlacementType placementType ) { + return 0; + } + + + @Override + public CatalogCollectionMapping getCollectionMapping( long id ) { + return null; + } + + + @Override + public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { + return 0; + } + + + @Override + public List getCollectionPlacementsByAdapter( int adapterId ) { + return null; + } + + + @Override + public CatalogCollectionPlacement getCollectionPlacement( long collectionId, int adapterId ) { + return null; + } + + + @Override + public void updateCollectionPartitionPhysicalNames( long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { + + } + + + @Override + public void deleteCollection( long id ) { + + } + + + @Override + public void dropCollectionPlacement( long id, int adapterId ) { + + } + + + @Override + public void close() { + + } + + + @Override + public void clear() { + + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogCollection.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogCollection.java new file mode 100644 index 0000000000..5496f86b35 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogCollection.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.document; + +public class CatalogCollection { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java new file mode 100644 index 0000000000..aee0a55a93 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.document; + +public class CatalogDocDatabase { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java new file mode 100644 index 0000000000..78bc7fb047 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java @@ -0,0 +1,46 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.document; + +import java.util.HashMap; +import java.util.Map; +import org.polypheny.db.catalog.ModelCatalog; + +public class DocumentCatalog implements ModelCatalog { + + Map databases = new HashMap<>(); + Map collections = new HashMap<>(); + + + @Override + public void commit() { + + } + + + @Override + public void rollback() { + + } + + + @Override + public boolean hasUncommitedChanges() { + return false; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java new file mode 100644 index 0000000000..bfd3a88c56 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.graph; + +public class CatalogGraph { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java new file mode 100644 index 0000000000..919ae20a9e --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.graph; + +import java.util.ArrayList; +import java.util.List; +import org.polypheny.db.catalog.ModelCatalog; + +public class GraphCatalog implements ModelCatalog { + + public List graphs = new ArrayList<>(); + + + @Override + public void commit() { + + } + + + @Override + public void rollback() { + + } + + + @Override + public boolean hasUncommitedChanges() { + return false; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogColumn.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogColumn.java new file mode 100644 index 0000000000..bdb4b17352 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogColumn.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.relational; + +public class CatalogColumn { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java new file mode 100644 index 0000000000..6eb0a84cd0 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java @@ -0,0 +1,22 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.relational; + +public class CatalogSchema { + + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java new file mode 100644 index 0000000000..a18670dad5 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.relational; + +public class CatalogTable { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java new file mode 100644 index 0000000000..55d14fdc13 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java @@ -0,0 +1,49 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.relational; + +import java.util.HashMap; +import java.util.Map; +import org.polypheny.db.catalog.ModelCatalog; + +public class RelationalCatalog implements ModelCatalog { + + private Map schemas = new HashMap<>(); + + private Map tables = new HashMap<>(); + + private Map columns = new HashMap<>(); + + + @Override + public void commit() { + + } + + + @Override + public void rollback() { + + } + + + @Override + public boolean hasUncommitedChanges() { + return false; + } + +} diff --git a/settings.gradle b/settings.gradle index e86ba4affb..4035a25c69 100644 --- a/settings.gradle +++ b/settings.gradle @@ -13,6 +13,7 @@ include 'plugins' // catalog include 'plugins:mapdb-catalog' +include 'plugins:poly-catalog' include 'plugins:mapdb-monitoring' From 4284798214537467d80c6f607315d586630a4ea7 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 12 Feb 2023 23:37:55 +0100 Subject: [PATCH 003/436] added provisional mapping logic for catalog --- .../org/polypheny/db/catalog/PolyCatalog.java | 1325 ++--------------- .../db/catalog/document/DocumentCatalog.java | 16 +- .../db/catalog/graph/GraphCatalog.java | 11 +- .../mappings/CatalogDocumentMapping.java | 34 + .../catalog/mappings/CatalogGraphMapping.java | 36 + .../catalog/mappings/CatalogModelMapping.java | 23 + .../mappings/CatalogRelationalMapping.java | 34 + .../catalog/relational/RelationalCatalog.java | 20 +- 8 files changed, 269 insertions(+), 1230 deletions(-) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index c4d47ac36a..536da230b1 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -17,61 +17,21 @@ package org.polypheny.db.catalog; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.algebra.AlgCollation; -import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.document.DocumentCatalog; -import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollection; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.CatalogView; -import org.polypheny.db.catalog.entity.MaterializedCriteria; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.graph.GraphCatalog; +import org.polypheny.db.catalog.mappings.CatalogDocumentMapping; +import org.polypheny.db.catalog.mappings.CatalogGraphMapping; +import org.polypheny.db.catalog.mappings.CatalogModelMapping; +import org.polypheny.db.catalog.mappings.CatalogRelationalMapping; import org.polypheny.db.catalog.relational.RelationalCatalog; -import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.partition.properties.PartitionProperty; -import org.polypheny.db.transaction.Transaction; -import org.polypheny.db.type.PolyType; /** @@ -82,1257 +42,168 @@ * Field -> Column (Relational), does not exist (Graph), Field (Document) */ @Slf4j -public class PolyCatalog extends Catalog { +public class PolyCatalog { private final RelationalCatalog relational; - private final GraphCatalog graphs; - private final DocumentCatalog documents; + private final GraphCatalog graph; + private final DocumentCatalog document; private final ImmutableList catalogs; private final Map users = new HashMap<>(); + private final Map mappings = new HashMap<>(); + private final AtomicLong namespaceIdBuilder = new AtomicLong( 0 ); + private final AtomicLong entityIdBuilder = new AtomicLong( 0 ); + + private final AtomicLong fieldIdBuilder = new AtomicLong( 0 ); public PolyCatalog() { - this.documents = new DocumentCatalog(); - this.graphs = new GraphCatalog(); + this.document = new DocumentCatalog(); + this.graph = new GraphCatalog(); this.relational = new RelationalCatalog(); - catalogs = ImmutableList.of( this.relational, this.graphs, this.documents ); + catalogs = ImmutableList.of( this.relational, this.graph, this.document ); } - @Override public void commit() throws NoTablePrimaryKeyException { log.debug( "commit" ); catalogs.stream().filter( ModelCatalog::hasUncommitedChanges ).forEach( ModelCatalog::commit ); } - @Override public void rollback() { log.debug( "rollback" ); catalogs.stream().filter( ModelCatalog::hasUncommitedChanges ).forEach( ModelCatalog::rollback ); } - @Override - public Map getAlgTypeInfo() { - return null; - } - - - @Override - public Map getNodeInfo() { - return null; - } - - - @Override - public void restoreInterfacesIfNecessary() { - - } - - - @Override - public void validateColumns() { - - } - - - @Override - public void restoreColumnPlacements( Transaction transaction ) { - - } - - - @Override - public void restoreViews( Transaction transaction ) { - - } - - - @Override - public int addUser( String name, String password ) { - return 0; - } - - - @Override - public long addDatabase( String name, int ownerId, String ownerName, long defaultSchemaId, String defaultSchemaName ) { - return 0; - } - - - @Override - public void deleteDatabase( long databaseId ) { - - } - - - @Override - public List getDatabases( Pattern pattern ) { - return null; - } - - - @Override - public CatalogDatabase getDatabase( String databaseName ) throws UnknownDatabaseException { - return null; - } - - - @Override - public CatalogDatabase getDatabase( long databaseId ) { - return null; - } - - - @Override - public List getSchemas( Pattern databaseNamePattern, Pattern schemaNamePattern ) { - return null; - } - - - @Override - public List getSchemas( long databaseId, Pattern schemaNamePattern ) { - return null; - } - - - @Override - public CatalogSchema getSchema( long schemaId ) { - return null; - } - - - @Override - public CatalogSchema getSchema( String databaseName, String schemaName ) throws UnknownSchemaException, UnknownDatabaseException { - return null; - } - - - @Override - public CatalogSchema getSchema( long databaseId, String schemaName ) throws UnknownSchemaException { - return null; - } - - - // todo rename "create" - @Override public long addNamespace( String name, long databaseId, int ownerId, NamespaceType namespaceType ) { long id = namespaceIdBuilder.getAndIncrement(); - documents.createDatabase( id, name, databaseId, namespaceType ); - graphs.createGraph( id, name, databaseId, namespaceType ); - relational.createSchema( id, name, databaseId, namespaceType ); - - return id; - } - - - @Override - public boolean checkIfExistsSchema( long databaseId, String schemaName ) { - return false; - } - - - @Override - public void renameSchema( long schemaId, String name ) { - - } - - - @Override - public void setSchemaOwner( long schemaId, long ownerId ) { - - } - - - @Override - public long addGraph( long databaseId, String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { - return 0; - } - - - @Override - public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException { - - } - - - @Override - public void deleteGraph( long id ) { - - } - - - @Override - public CatalogGraphDatabase getGraph( long id ) { - return null; - } - - - @Override - public List getGraphs( long databaseId, Pattern graphName ) { - return null; - } - - - @Override - public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { - - } - - - @Override - public void removeGraphAlias( long graphId, String alias, boolean ifExists ) { - - } - - - @Override - public CatalogGraphMapping getGraphMapping( long graphId ) { - return null; - } - - - @Override - public void deleteSchema( long schemaId ) { - - } - - - @Override - public List getTables( long schemaId, Pattern tableNamePattern ) { - return null; - } - - - @Override - public List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ) { - return null; - } - - - @Override - public CatalogTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException { - return null; - } - - - @Override - public List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { - return null; - } - - - @Override - public CatalogTable getTable( long tableId ) { - return null; - } - - - @Override - public CatalogTable getTable( long schemaId, String tableName ) throws UnknownTableException { - return null; - } - - - @Override - public CatalogTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException { - return null; - } - - - @Override - public CatalogTable getTableFromPartition( long partitionId ) { - return null; - } + CatalogModelMapping mapping = null; + switch ( namespaceType ) { + case RELATIONAL: + mapping = addRelationalNamespace( id, name, databaseId, namespaceType ); + break; + case DOCUMENT: + mapping = addDocumentNamespace( id, name, databaseId, namespaceType ); + break; + case GRAPH: + mapping = addGraphNamespace( id, name, databaseId, namespaceType ); + break; + } + + mappings.put( id, mapping ); - @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { - return 0; - } - - - @Override - public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { - return 0; - } - - - @Override - public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { - return 0; - } - - - @Override - public boolean checkIfExistsEntity( long namespaceId, String entityName ) { - return false; - } - - - @Override - public boolean checkIfExistsEntity( long tableId ) { - return false; - } - - - @Override - public void renameTable( long tableId, String name ) { - - } - - - @Override - public void deleteTable( long tableId ) { - - } - - - @Override - public void setTableOwner( long tableId, int ownerId ) { - - } - - - @Override - public void setPrimaryKey( long tableId, Long keyId ) { - - } - - - @Override - public void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - - } - - - @Override - public void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ) { - - } - - - @Override - public CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ) { - return null; - } - - - @Override - public boolean checkIfExistsColumnPlacement( int adapterId, long columnId ) { - return false; - } - - - @Override - public List getColumnPlacement( long columnId ) { - return null; - } - - - @Override - public List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ) { - return null; - } - - - @Override - public List getColumnPlacementsOnAdapterSortedByPhysicalPosition( int adapterId, long tableId ) { - return null; - } - - - @Override - public List getColumnPlacementsOnAdapter( int adapterId ) { - return null; - } - - - @Override - public List getColumnPlacementsByColumn( long columnId ) { - return null; - } - - - @Override - public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { - return null; - } - - - @Override - public ImmutableMap> getPartitionPlacementsByAdapter( long tableId ) { - return null; - } - - - @Override - public ImmutableMap> getPartitionGroupsByAdapter( long tableId ) { - return null; - } - - - @Override - public long getPartitionGroupByPartition( long partitionId ) { - return 0; - } - - - @Override - public List getKeys() { - return null; - } - - - @Override - public List getTableKeys( long tableId ) { - return null; - } - - - @Override - public List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ) { - return null; - } - - - @Override - public void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ) { - - } - - - @Override - public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ) { - - } - - - @Override - public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ) { - - } - - - @Override - public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ) { - - } - - - @Override - public List getColumns( long tableId ) { - return null; - } - - - @Override - public List getColumns( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { - return null; - } - - - @Override - public CatalogColumn getColumn( long columnId ) { - return null; - } - - - @Override - public CatalogColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { - return null; - } - - - @Override - public CatalogColumn getColumn( String databaseName, String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownDatabaseException, UnknownTableException { - return null; - } - - - @Override - public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { - return 0; + return id; } - @Override - public void renameColumn( long columnId, String name ) { + private CatalogModelMapping addGraphNamespace( long id, String name, long databaseId, NamespaceType namespaceType ) { + // add to model catalog + graph.addGraph( id, name, databaseId, namespaceType ); - } + // add substitutions for other models + long nodeId = entityIdBuilder.getAndIncrement(); + long nPropertiesId = entityIdBuilder.getAndIncrement(); + long edgeId = entityIdBuilder.getAndIncrement(); + long ePropertiesId = entityIdBuilder.getAndIncrement(); + // add relational + relational.addSchema( id, name, databaseId, namespaceType ); + relational.addTable( nodeId, "_nodes_", id ); + relational.addTable( nPropertiesId, "_nProperties_", id ); + relational.addTable( edgeId, "_edges_", id ); + relational.addTable( ePropertiesId, "_eProperties_", id ); - @Override - public void setColumnPosition( long columnId, int position ) { + // add document + document.addDatabase( id, name, databaseId, namespaceType ); + document.addCollection( nodeId, "_nodes_", id ); + document.addCollection( nPropertiesId, "_nProperties_", id ); + document.addCollection( edgeId, "_edges_", id ); + document.addCollection( ePropertiesId, "_eProperties_", id ); + return new CatalogGraphMapping( id, nodeId, nPropertiesId, edgeId, ePropertiesId ); } - @Override - public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ) throws GenericCatalogException { - - } - + private CatalogModelMapping addDocumentNamespace( long id, String name, long databaseId, NamespaceType namespaceType ) { + // add to model catalog + document.addDatabase( id, name, databaseId, namespaceType ); - @Override - public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { + // add substitutions to other models + relational.addSchema( id, name, databaseId, namespaceType ); + graph.addGraph( id, name, databaseId, namespaceType ); + return new CatalogDocumentMapping( id ); } - @Override - public void setCollation( long columnId, Collation collation ) { - - } + private CatalogModelMapping addRelationalNamespace( long id, String name, long databaseId, NamespaceType namespaceType ) { + // add to model catalog + relational.addSchema( id, name, databaseId, namespaceType ); + // add substitutions to other models + document.addDatabase( id, name, databaseId, namespaceType ); + graph.addGraph( id, name, databaseId, namespaceType ); - @Override - public boolean checkIfExistsColumn( long tableId, String columnName ) { - return false; + return new CatalogRelationalMapping( id ); } - @Override - public void deleteColumn( long columnId ) { - - } - + public long addEntity( String name, long namespaceId, NamespaceType type, int ownerId ) { + long id = entityIdBuilder.getAndIncrement(); - @Override - public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { + switch ( type ) { + case RELATIONAL: + addRelationalEntity( id, name, namespaceId ); + break; + case DOCUMENT: + addDocumentEntity( id, name, namespaceId ); + break; + case GRAPH: + // do nothing + break; + } + return id; } - @Override - public void deleteDefaultValue( long columnId ) { - + private void addDocumentEntity( long id, String name, long namespaceId ) { + document.addCollection( id, name, namespaceId ); } - @Override - public CatalogPrimaryKey getPrimaryKey( long key ) { - return null; + private void addRelationalEntity( long id, String name, long namespaceId ) { + relational.addTable( id, name, namespaceId ); } - @Override - public boolean isPrimaryKey( long keyId ) { - return false; - } + public long addField( String name, long entityId, AlgDataType type, NamespaceType namespaceType ) { + long id = fieldIdBuilder.getAndIncrement(); + switch ( namespaceType ) { + case RELATIONAL: + addColumn( id, name, entityId, type ); + break; + case DOCUMENT: + case GRAPH: + // not available for models + break; + } - @Override - public boolean isForeignKey( long keyId ) { - return false; + return id; } - @Override - public boolean isIndex( long keyId ) { - return false; + private void addColumn( long id, String name, long entityId, AlgDataType type ) { + relational.addColumn( id, name, entityId, type ); } - @Override - public boolean isConstraint( long keyId ) { - return false; - } - - - @Override - public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { - - } - - - @Override - public List getForeignKeys( long tableId ) { - return null; - } - - - @Override - public List getExportedKeys( long tableId ) { - return null; - } - - - @Override - public List getConstraints( long tableId ) { - return null; - } - - - @Override - public List getIndexes( CatalogKey key ) { - return null; - } - - - @Override - public List getForeignKeys( CatalogKey key ) { - return null; - } - - - @Override - public List getConstraints( CatalogKey key ) { - return null; - } - - - @Override - public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { - return null; - } - - - @Override - public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { - return null; - } - - - @Override - public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { - - } - - - @Override - public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { - - } - - - @Override - public List getIndexes( long tableId, boolean onlyUnique ) { - return null; - } - - - @Override - public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { - return null; - } - - - @Override - public boolean checkIfExistsIndex( long tableId, String indexName ) { - return false; - } - - - @Override - public CatalogIndex getIndex( long indexId ) { - return null; - } - - - @Override - public List getIndexes() { - return null; - } - - - @Override - public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException { - return 0; - } - - - @Override - public void setIndexPhysicalName( long indexId, String physicalName ) { - - } - - - @Override - public void deleteIndex( long indexId ) { - - } - - - @Override - public void deletePrimaryKey( long tableId ) throws GenericCatalogException { - - } - - - @Override - public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { - - } - - - @Override - public void deleteConstraint( long constraintId ) throws GenericCatalogException { - - } - - - @Override - public CatalogUser getUser( String userName ) throws UnknownUserException { - return null; - } - - - @Override - public CatalogUser getUser( int userId ) { - return null; - } - - - @Override - public List getAdapters() { - return null; - } - - - @Override - public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { - return null; - } - - - @Override - public CatalogAdapter getAdapter( int adapterId ) { - return null; - } - - - @Override - public boolean checkIfExistsAdapter( int adapterId ) { - return false; - } - - - @Override - public int addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { - return 0; - } - - - @Override - public void updateAdapterSettings( int adapterId, Map newSettings ) { - - } - - - @Override - public void deleteAdapter( int adapterId ) { - - } - - - @Override - public List getQueryInterfaces() { - return null; - } - - - @Override - public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { - return null; - } - - - @Override - public CatalogQueryInterface getQueryInterface( int ifaceId ) { - return null; - } - - - @Override - public int addQueryInterface( String uniqueName, String clazz, Map settings ) { - return 0; - } - - - @Override - public void deleteQueryInterface( int ifaceId ) { - - } - - - @Override - public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { - return 0; - } - - - @Override - public void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ) { - - } - - - @Override - public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) { - return null; - } - - - @Override - public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { - return 0; - } - - - @Override - public void deletePartition( long tableId, long schemaId, long partitionId ) { - - } - - - @Override - public CatalogPartition getPartition( long partitionId ) { - return null; - } - - - @Override - public List getPartitionsByTable( long tableId ) { - return null; - } - - - @Override - public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { - - } - - - @Override - public void mergeTable( long tableId ) { - - } - - - @Override - public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { - - } - - - @Override - public List getPartitionGroups( long tableId ) { - return null; - } - - - @Override - public List getPartitionGroups( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { - return null; - } - - - @Override - public void updatePartitionGroup( long partitionGroupId, List partitionIds ) { - - } - - - @Override - public void addPartitionToGroup( long partitionGroupId, Long partitionId ) { - - } - - - @Override - public void removePartitionFromGroup( long partitionGroupId, Long partitionId ) { - - } - - - @Override - public void updatePartition( long partitionId, Long partitionGroupId ) { - - } - - - @Override - public List getPartitions( long partitionGroupId ) { - return null; - } - - - @Override - public List getPartitions( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { - return null; - } - - - @Override - public List getPartitionGroupNames( long tableId ) { - return null; - } - - - @Override - public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { - return null; - } - - - @Override - public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { - return null; - } - - - @Override - public List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ) { - return null; - } - - - @Override - public List getPartitionsOnDataPlacement( int adapterId, long tableId ) { - return null; - } - - - @Override - public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ) { - return null; - } - - - @Override - public CatalogDataPlacement getDataPlacement( int adapterId, long tableId ) { - return null; - } - - - @Override - public List getDataPlacements( long tableId ) { - return null; - } - - - @Override - public List getAllFullDataPlacements( long tableId ) { - return null; - } - - - @Override - public List getAllColumnFullDataPlacements( long tableId ) { - return null; - } - - - @Override - public List getAllPartitionFullDataPlacements( long tableId ) { - return null; - } - - - @Override - public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { - return null; - } - - - @Override - public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { - return null; - } - - - @Override - public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { - return null; - } - - - @Override - public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { - return false; - } - - - @Override - public void flagTableForDeletion( long tableId, boolean flag ) { - - } - - - @Override - public boolean isTableFlaggedForDeletion( long tableId ) { - return false; - } - - - @Override - public void addPartitionPlacement( int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { - - } - - - @Override - public void addDataPlacement( int adapterId, long tableId ) { - - } - - - @Override - public CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ) { - return null; - } - - - @Override - protected void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ) { - - } - - - @Override - public long addGraphPlacement( int adapterId, long graphId ) { - return 0; - } - - - @Override - public List getGraphPlacements( int adapterId ) { - return null; - } - - - @Override - public void deleteGraphPlacement( int adapterId, long graphId ) { - - } - - - @Override - public void updateGraphPlacementPhysicalNames( long graphId, int adapterId, String physicalGraphName ) { - - } - - - @Override - public CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ) { - return null; - } - - - @Override - public void removeDataPlacement( int adapterId, long tableId ) { - - } - - - @Override - protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) { - - } - - - @Override - protected void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ) { - - } - - - @Override - public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { - - } - - - @Override - protected void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ) { - - } - - - @Override - protected void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ) { - - } - - - @Override - protected void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ) { - - } - - - @Override - protected void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ) { - - } - - - @Override - public void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ) { - - } - - - @Override - public void updatePartitionPlacementPhysicalNames( int adapterId, long partitionId, String physicalSchemaName, String physicalTableName ) { - - } - - - @Override - public void deletePartitionPlacement( int adapterId, long partitionId ) { - - } - - - @Override - public CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ) { - return null; - } - - - @Override - public List getPartitionPlacementsByAdapter( int adapterId ) { - return null; - } - - - @Override - public List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ) { - return null; - } - - - @Override - public List getAllPartitionPlacementsByTable( long tableId ) { - return null; - } - - - @Override - public List getPartitionPlacements( long partitionId ) { - return null; - } - - - @Override - public List getTablesForPeriodicProcessing() { - return null; - } - - - @Override - public void addTableToPeriodicProcessing( long tableId ) { - - } - - - @Override - public void removeTableFromPeriodicProcessing( long tableId ) { - - } - - - @Override - public boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ) { - return false; - } - - - @Override - public void deleteViewDependencies( CatalogView catalogView ) { - - } - - - @Override - public void updateMaterializedViewRefreshTime( long materializedViewId ) { - - } - - - @Override - public CatalogCollection getCollection( long collectionId ) { - return null; - } - - - @Override - public List getCollections( long namespaceId, Pattern namePattern ) { - return null; - } - - - @Override - public long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ) { - return 0; - } - - - @Override - public long addCollectionPlacement( int adapterId, long collectionId, PlacementType placementType ) { - return 0; - } - - - @Override - public CatalogCollectionMapping getCollectionMapping( long id ) { - return null; - } - - - @Override - public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { - return 0; - } - - - @Override - public List getCollectionPlacementsByAdapter( int adapterId ) { - return null; - } - - - @Override - public CatalogCollectionPlacement getCollectionPlacement( long collectionId, int adapterId ) { - return null; - } - - - @Override - public void updateCollectionPartitionPhysicalNames( long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { - - } - - - @Override - public void deleteCollection( long id ) { - - } - - - @Override - public void dropCollectionPlacement( long id, int adapterId ) { - - } - - - @Override - public void close() { - - } - - - @Override - public void clear() { - - } - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java index 78bc7fb047..675542412b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java @@ -18,6 +18,7 @@ import java.util.HashMap; import java.util.Map; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.ModelCatalog; public class DocumentCatalog implements ModelCatalog { @@ -26,15 +27,19 @@ public class DocumentCatalog implements ModelCatalog { Map collections = new HashMap<>(); + private boolean openChanges = false; + + @Override public void commit() { - + openChanges = false; } @Override public void rollback() { + openChanges = false; } @@ -43,4 +48,13 @@ public boolean hasUncommitedChanges() { return false; } + + public void addDatabase( long id, String name, long databaseId, NamespaceType namespaceType ) { + } + + + public void addCollection( long id, String name, long namespaceId ) { + + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java index 919ae20a9e..690c2d3a88 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java @@ -18,28 +18,37 @@ import java.util.ArrayList; import java.util.List; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.ModelCatalog; public class GraphCatalog implements ModelCatalog { public List graphs = new ArrayList<>(); + private boolean openChanges = false; + @Override public void commit() { + openChanges = false; } @Override public void rollback() { + openChanges = false; } @Override public boolean hasUncommitedChanges() { - return false; + return openChanges; + } + + + public void addGraph( long id, String name, long databaseId, NamespaceType namespaceType ) { } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java new file mode 100644 index 0000000000..a3dbf8fe55 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java @@ -0,0 +1,34 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.mappings; + +public class CatalogDocumentMapping extends CatalogModelMapping { + + private final long id; + + + public CatalogDocumentMapping( long id ) { + this.id = id; + } + + + @Override + public String getGraphLabel() { + return "_collection_" + id; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java new file mode 100644 index 0000000000..2b551fdf0d --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.mappings; + +import lombok.AllArgsConstructor; + +@AllArgsConstructor +public class CatalogGraphMapping extends CatalogModelMapping { + + private final long id; + private final long nodesId; + private final long nodesPropertiesId; + private final long edgesId; + private final long edgesPropertiesId; + + + @Override + public String getGraphLabel() { + return null; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java new file mode 100644 index 0000000000..ee1e30eece --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java @@ -0,0 +1,23 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.mappings; + +public abstract class CatalogModelMapping { + + public abstract String getGraphLabel(); + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java new file mode 100644 index 0000000000..c8302523aa --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java @@ -0,0 +1,34 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.mappings; + +public class CatalogRelationalMapping extends CatalogModelMapping { + + private final long id; + + + public CatalogRelationalMapping( long id ) { + this.id = id; + } + + + @Override + public String getGraphLabel() { + return "_table_" + id; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java index 55d14fdc13..e139c49bad 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java @@ -18,6 +18,8 @@ import java.util.HashMap; import java.util.Map; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.ModelCatalog; public class RelationalCatalog implements ModelCatalog { @@ -28,22 +30,38 @@ public class RelationalCatalog implements ModelCatalog { private Map columns = new HashMap<>(); + private boolean openChanges = false; + @Override public void commit() { + openChanges = false; } @Override public void rollback() { + openChanges = false; } @Override public boolean hasUncommitedChanges() { - return false; + return openChanges; + } + + + public void addSchema( long id, String name, long databaseId, NamespaceType namespaceType ) { + } + + + public void addTable( long id, String name, long namespaceId ) { + } + + + public void addColumn( long id, String name, long entityId, AlgDataType type ) { } } From 2159437e7512cd6b2273591413c7b890974dff51 Mon Sep 17 00:00:00 2001 From: datomo Date: Mon, 13 Feb 2023 15:10:45 +0100 Subject: [PATCH 004/436] boilerplate code for substitution entities --- .../java/org/polypheny/db/catalog/PolyCatalog.java | 11 +++++++++++ .../db/catalog/document/DocumentCatalog.java | 4 ++++ .../org/polypheny/db/catalog/graph/GraphCatalog.java | 4 ++++ .../db/catalog/relational/RelationalCatalog.java | 4 ++++ 4 files changed, 23 insertions(+) diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 536da230b1..5e74de0802 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -175,12 +175,23 @@ public long addEntity( String name, long namespaceId, NamespaceType type, int ow private void addDocumentEntity( long id, String name, long namespaceId ) { + // add target data model entity document.addCollection( id, name, namespaceId ); + + // add substitution entity + relational.addSubstitutionTable( id, name, namespaceId, NamespaceType.DOCUMENT ); + graph.addSubstitutionGraph( id, name, namespaceId, NamespaceType.DOCUMENT ); } private void addRelationalEntity( long id, String name, long namespaceId ) { + // add target data model entity relational.addTable( id, name, namespaceId ); + + // add substitution entity + graph.addSubstitutionGraph( id, name, namespaceId, NamespaceType.RELATIONAL ); + document.addSubstitutionCollection( id, name, namespaceId, NamespaceType.RELATIONAL ); + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java index 675542412b..ead14d4581 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java @@ -57,4 +57,8 @@ public void addCollection( long id, String name, long namespaceId ) { } + + public void addSubstitutionCollection( long id, String name, long namespaceId, NamespaceType relational ) { + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java index 690c2d3a88..06d0ace94c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java @@ -51,4 +51,8 @@ public boolean hasUncommitedChanges() { public void addGraph( long id, String name, long databaseId, NamespaceType namespaceType ) { } + + public void addSubstitutionGraph( long id, String name, long namespaceId, NamespaceType document ) { + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java index e139c49bad..8a9a2c5302 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java @@ -64,4 +64,8 @@ public void addTable( long id, String name, long namespaceId ) { public void addColumn( long id, String name, long entityId, AlgDataType type ) { } + + public void addSubstitutionTable( long id, String name, long namespaceId, NamespaceType document ) { + } + } From aab3b271c98dd197b3dab3ba2372336d402b0b28 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 15 Feb 2023 16:01:07 +0100 Subject: [PATCH 005/436] added serialization logic --- plugins/poly-catalog/build.gradle | 2 +- .../polypheny/db/catalog/ModelCatalog.java | 3 +- .../org/polypheny/db/catalog/PolyCatalog.java | 122 +++++++++++++++++- .../db/catalog/SerializableCatalog.java | 35 +++++ .../db/catalog/document/DocumentCatalog.java | 10 +- .../db/catalog/graph/GraphCatalog.java | 11 +- .../catalog/relational/RelationalCatalog.java | 11 +- 7 files changed, 183 insertions(+), 11 deletions(-) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java diff --git a/plugins/poly-catalog/build.gradle b/plugins/poly-catalog/build.gradle index 1aa465eb1b..e6d3a073e5 100644 --- a/plugins/poly-catalog/build.gradle +++ b/plugins/poly-catalog/build.gradle @@ -4,7 +4,7 @@ group "org.polypheny" dependencies { compileOnly project(":core") - + api group: 'io.activej', name: 'activej-serializer', version: '5.4.3' api group: "org.apache.commons", name: "commons-lang3", version: commons_lang3_version // Apache 2.0 ////// SLF4J diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java index a4d1ce4220..856ae1307c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java @@ -22,6 +22,7 @@ public interface ModelCatalog { void rollback(); - boolean hasUncommitedChanges(); + boolean hasUncommittedChanges(); + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 5e74de0802..8bf8d6e13b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -17,11 +17,18 @@ package org.polypheny.db.catalog; import com.google.common.collect.ImmutableList; +import io.activej.serializer.BinarySerializer; +import io.activej.serializer.SerializerBuilder; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.algebra.constant.FunctionCategory; +import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.document.DocumentCatalog; import org.polypheny.db.catalog.entity.CatalogUser; @@ -32,6 +39,17 @@ import org.polypheny.db.catalog.mappings.CatalogModelMapping; import org.polypheny.db.catalog.mappings.CatalogRelationalMapping; import org.polypheny.db.catalog.relational.RelationalCatalog; +import org.polypheny.db.nodes.Identifier; +import org.polypheny.db.nodes.Operator; +import org.polypheny.db.plan.AlgOptPlanner; +import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.prepare.Prepare; +import org.polypheny.db.prepare.Prepare.CatalogReader; +import org.polypheny.db.prepare.Prepare.PreparingTable; +import org.polypheny.db.schema.PolyphenyDbSchema; +import org.polypheny.db.schema.graph.Graph; +import org.polypheny.db.util.Moniker; +import org.polypheny.db.util.NameMatcher; /** @@ -42,7 +60,10 @@ * Field -> Column (Relational), does not exist (Graph), Field (Document) */ @Slf4j -public class PolyCatalog { +public class PolyCatalog implements SerializableCatalog, Prepare.CatalogReader { + + @Getter + BinarySerializer serializer = SerializerBuilder.create().build( PolyCatalog.class ); private final RelationalCatalog relational; private final GraphCatalog graph; @@ -71,13 +92,13 @@ public PolyCatalog() { public void commit() throws NoTablePrimaryKeyException { log.debug( "commit" ); - catalogs.stream().filter( ModelCatalog::hasUncommitedChanges ).forEach( ModelCatalog::commit ); + catalogs.stream().filter( ModelCatalog::hasUncommittedChanges ).forEach( ModelCatalog::commit ); } public void rollback() { log.debug( "rollback" ); - catalogs.stream().filter( ModelCatalog::hasUncommitedChanges ).forEach( ModelCatalog::rollback ); + catalogs.stream().filter( ModelCatalog::hasUncommittedChanges ).forEach( ModelCatalog::rollback ); } @@ -217,4 +238,99 @@ private void addColumn( long id, String name, long entityId, AlgDataType type ) } + @Override + public void lookupOperatorOverloads( Identifier opName, FunctionCategory category, Syntax syntax, List operatorList ) { + + } + + + @Override + public List getOperatorList() { + return null; + } + + + @Override + public AlgDataType getNamedType( Identifier typeName ) { + return null; + } + + + @Override + public List getAllSchemaObjectNames( List names ) { + return null; + } + + + @Override + public List> getSchemaPaths() { + return null; + } + + + @Override + public NameMatcher nameMatcher() { + return null; + } + + + @Override + public AlgDataType createTypeFromProjection( AlgDataType type, List columnNameList ) { + return null; + } + + + @Override + public PolyphenyDbSchema getRootSchema() { + return null; + } + + + @Override + public AlgDataTypeFactory getTypeFactory() { + return null; + } + + + @Override + public void registerRules( AlgOptPlanner planner ) throws Exception { + + } + + + @Override + public PreparingTable getTableForMember( List names ) { + return null; + } + + + @Override + public CatalogReader withSchemaPath( List schemaPath ) { + return null; + } + + + @Override + public PreparingTable getTable( List names ) { + return null; + } + + + @Override + public AlgOptTable getCollection( List names ) { + return null; + } + + + @Override + public Graph getGraph( String name ) { + return null; + } + + + @Override + public C unwrap( Class aClass ) { + return null; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java new file mode 100644 index 0000000000..24c19f9e81 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog; + +import io.activej.serializer.BinarySerializer; + +public interface SerializableCatalog { + + BinarySerializer getSerializer(); + + default byte[] serialize( Class clazz ) { + byte[] buffer = new byte[200]; + getSerializer().encode( buffer, 0, this ); + return buffer; + } + + default SerializableCatalog deserialize( byte[] serialized, Class clazz ) { + return getSerializer().decode( serialized, 0 ); + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java index ead14d4581..27bad69987 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java @@ -16,16 +16,22 @@ package org.polypheny.db.catalog.document; +import io.activej.serializer.BinarySerializer; +import io.activej.serializer.SerializerBuilder; import java.util.HashMap; import java.util.Map; +import lombok.Getter; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.ModelCatalog; +import org.polypheny.db.catalog.SerializableCatalog; -public class DocumentCatalog implements ModelCatalog { +public class DocumentCatalog implements ModelCatalog, SerializableCatalog { Map databases = new HashMap<>(); Map collections = new HashMap<>(); + @Getter + BinarySerializer serializer = SerializerBuilder.create().build( DocumentCatalog.class ); private boolean openChanges = false; @@ -44,7 +50,7 @@ public void rollback() { @Override - public boolean hasUncommitedChanges() { + public boolean hasUncommittedChanges() { return false; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java index 06d0ace94c..419149307f 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java @@ -16,15 +16,22 @@ package org.polypheny.db.catalog.graph; +import io.activej.serializer.BinarySerializer; +import io.activej.serializer.SerializerBuilder; import java.util.ArrayList; import java.util.List; +import lombok.Getter; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.ModelCatalog; +import org.polypheny.db.catalog.SerializableCatalog; -public class GraphCatalog implements ModelCatalog { +public class GraphCatalog implements ModelCatalog, SerializableCatalog { public List graphs = new ArrayList<>(); + @Getter + BinarySerializer serializer = SerializerBuilder.create().build( GraphCatalog.class ); + private boolean openChanges = false; @@ -43,7 +50,7 @@ public void rollback() { @Override - public boolean hasUncommitedChanges() { + public boolean hasUncommittedChanges() { return openChanges; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java index 8a9a2c5302..848cb655ce 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java @@ -16,13 +16,20 @@ package org.polypheny.db.catalog.relational; +import io.activej.serializer.BinarySerializer; +import io.activej.serializer.SerializerBuilder; import java.util.HashMap; import java.util.Map; +import lombok.Getter; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.ModelCatalog; +import org.polypheny.db.catalog.SerializableCatalog; -public class RelationalCatalog implements ModelCatalog { +public class RelationalCatalog implements ModelCatalog, SerializableCatalog { + + @Getter + BinarySerializer serializer = SerializerBuilder.create().build( RelationalCatalog.class ); private Map schemas = new HashMap<>(); @@ -48,7 +55,7 @@ public void rollback() { @Override - public boolean hasUncommitedChanges() { + public boolean hasUncommittedChanges() { return openChanges; } From f40efaec77a003e14d317ddec60b818c031e68c1 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 15 Feb 2023 23:58:58 +0100 Subject: [PATCH 006/436] added catalog plugin and some testing --- plugins/poly-catalog/build.gradle | 2 +- plugins/poly-catalog/gradle.properties | 4 +- .../polypheny/db/catalog/CatalogPlugin.java | 49 ++++++ .../org/polypheny/db/catalog/PolyCatalog.java | 140 +++--------------- .../db/catalog/SerializableCatalog.java | 10 +- .../db/catalog/document/DocumentCatalog.java | 3 +- .../db/catalog/graph/GraphCatalog.java | 3 +- .../db/catalog/relational/CatalogSchema.java | 9 ++ .../catalog/relational/RelationalCatalog.java | 4 +- settings.gradle | 4 +- 10 files changed, 96 insertions(+), 132 deletions(-) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java diff --git a/plugins/poly-catalog/build.gradle b/plugins/poly-catalog/build.gradle index e6d3a073e5..91b85d6fcd 100644 --- a/plugins/poly-catalog/build.gradle +++ b/plugins/poly-catalog/build.gradle @@ -4,7 +4,7 @@ group "org.polypheny" dependencies { compileOnly project(":core") - api group: 'io.activej', name: 'activej-serializer', version: '5.4.3' + api group: 'io.activej', name: 'activej-serializer', version: '5.5-rc3' api group: "org.apache.commons", name: "commons-lang3", version: commons_lang3_version // Apache 2.0 ////// SLF4J diff --git a/plugins/poly-catalog/gradle.properties b/plugins/poly-catalog/gradle.properties index 90302a3805..7925c5b92b 100644 --- a/plugins/poly-catalog/gradle.properties +++ b/plugins/poly-catalog/gradle.properties @@ -16,8 +16,8 @@ pluginVersion = 0.0.1 -pluginId = mapdb-catalog -pluginClass = org.polypheny.db.catalog.MapDBCatalogPlugin +pluginId = poly-catalog +pluginClass = org.polypheny.db.catalog.CatalogPlugin pluginProvider = The Polypheny Project pluginDependencies = pluginUrlPath = diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java new file mode 100644 index 0000000000..1f3a07959a --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java @@ -0,0 +1,49 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog; + +import org.pf4j.Plugin; +import org.pf4j.PluginWrapper; +import org.polypheny.db.catalog.Catalog.NamespaceType; + +public class CatalogPlugin extends Plugin { + + /** + * Constructor to be used by plugin manager for plugin instantiation. + * Your plugins have to provide constructor with this exact signature to + * be successfully loaded by manager. + * + * @param wrapper + */ + public CatalogPlugin( PluginWrapper wrapper ) { + super( wrapper ); + } + + + @Override + public void start() { + PolyCatalog catalog = new PolyCatalog(); + catalog.addNamespace( "test", 0, 0, NamespaceType.RELATIONAL ); + catalog.addNamespace( "test2", 0, 0, NamespaceType.RELATIONAL ); + + byte[] buffer = catalog.serialize( PolyCatalog.class ); + + PolyCatalog catalog1 = catalog.deserialize( buffer, PolyCatalog.class ); + + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 8bf8d6e13b..98f5e7dc77 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -18,17 +18,14 @@ import com.google.common.collect.ImmutableList; import io.activej.serializer.BinarySerializer; -import io.activej.serializer.SerializerBuilder; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.algebra.constant.FunctionCategory; -import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.document.DocumentCatalog; import org.polypheny.db.catalog.entity.CatalogUser; @@ -39,17 +36,6 @@ import org.polypheny.db.catalog.mappings.CatalogModelMapping; import org.polypheny.db.catalog.mappings.CatalogRelationalMapping; import org.polypheny.db.catalog.relational.RelationalCatalog; -import org.polypheny.db.nodes.Identifier; -import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.prepare.Prepare; -import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingTable; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.graph.Graph; -import org.polypheny.db.util.Moniker; -import org.polypheny.db.util.NameMatcher; /** @@ -60,14 +46,17 @@ * Field -> Column (Relational), does not exist (Graph), Field (Document) */ @Slf4j -public class PolyCatalog implements SerializableCatalog, Prepare.CatalogReader { +public class PolyCatalog implements SerializableCatalog { @Getter - BinarySerializer serializer = SerializerBuilder.create().build( PolyCatalog.class ); + BinarySerializer serializer = SerializableCatalog.builder.get().build( PolyCatalog.class ); - private final RelationalCatalog relational; - private final GraphCatalog graph; - private final DocumentCatalog document; + @Serialize + public final RelationalCatalog relational; + @Serialize + public final GraphCatalog graph; + @Serialize + public final DocumentCatalog document; private final ImmutableList catalogs; @@ -82,9 +71,17 @@ public class PolyCatalog implements SerializableCatalog, Prepare.CatalogReader { public PolyCatalog() { - this.document = new DocumentCatalog(); - this.graph = new GraphCatalog(); - this.relational = new RelationalCatalog(); + this( new DocumentCatalog(), new GraphCatalog(), new RelationalCatalog() ); + } + + + private PolyCatalog( + @Deserialize("document") DocumentCatalog document, + @Deserialize("graph") GraphCatalog graph, + @Deserialize("relational") RelationalCatalog relational ) { + this.document = document; + this.graph = graph; + this.relational = relational; catalogs = ImmutableList.of( this.relational, this.graph, this.document ); } @@ -238,99 +235,4 @@ private void addColumn( long id, String name, long entityId, AlgDataType type ) } - @Override - public void lookupOperatorOverloads( Identifier opName, FunctionCategory category, Syntax syntax, List operatorList ) { - - } - - - @Override - public List getOperatorList() { - return null; - } - - - @Override - public AlgDataType getNamedType( Identifier typeName ) { - return null; - } - - - @Override - public List getAllSchemaObjectNames( List names ) { - return null; - } - - - @Override - public List> getSchemaPaths() { - return null; - } - - - @Override - public NameMatcher nameMatcher() { - return null; - } - - - @Override - public AlgDataType createTypeFromProjection( AlgDataType type, List columnNameList ) { - return null; - } - - - @Override - public PolyphenyDbSchema getRootSchema() { - return null; - } - - - @Override - public AlgDataTypeFactory getTypeFactory() { - return null; - } - - - @Override - public void registerRules( AlgOptPlanner planner ) throws Exception { - - } - - - @Override - public PreparingTable getTableForMember( List names ) { - return null; - } - - - @Override - public CatalogReader withSchemaPath( List schemaPath ) { - return null; - } - - - @Override - public PreparingTable getTable( List names ) { - return null; - } - - - @Override - public AlgOptTable getCollection( List names ) { - return null; - } - - - @Override - public Graph getGraph( String name ) { - return null; - } - - - @Override - public C unwrap( Class aClass ) { - return null; - } - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java index 24c19f9e81..da12a7d78f 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java @@ -16,10 +16,16 @@ package org.polypheny.db.catalog; +import io.activej.codegen.DefiningClassLoader; import io.activej.serializer.BinarySerializer; +import io.activej.serializer.SerializerBuilder; +import java.util.function.Supplier; +import org.polypheny.db.plugins.PolyPluginManager; public interface SerializableCatalog { + Supplier builder = () -> SerializerBuilder.create( DefiningClassLoader.create( PolyPluginManager.getMainClassLoader() ) ); + BinarySerializer getSerializer(); default byte[] serialize( Class clazz ) { @@ -28,8 +34,8 @@ default byte[] serialize( Class clazz ) { return buffer; } - default SerializableCatalog deserialize( byte[] serialized, Class clazz ) { - return getSerializer().decode( serialized, 0 ); + default T deserialize( byte[] serialized, Class clazz ) { + return clazz.cast( getSerializer().decode( serialized, 0 ) ); } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java index 27bad69987..1c9d8f7007 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java @@ -17,7 +17,6 @@ package org.polypheny.db.catalog.document; import io.activej.serializer.BinarySerializer; -import io.activej.serializer.SerializerBuilder; import java.util.HashMap; import java.util.Map; import lombok.Getter; @@ -31,7 +30,7 @@ public class DocumentCatalog implements ModelCatalog, SerializableCatalog { Map collections = new HashMap<>(); @Getter - BinarySerializer serializer = SerializerBuilder.create().build( DocumentCatalog.class ); + BinarySerializer serializer = SerializableCatalog.builder.get().build( DocumentCatalog.class ); private boolean openChanges = false; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java index 419149307f..884250172c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java @@ -17,7 +17,6 @@ package org.polypheny.db.catalog.graph; import io.activej.serializer.BinarySerializer; -import io.activej.serializer.SerializerBuilder; import java.util.ArrayList; import java.util.List; import lombok.Getter; @@ -30,7 +29,7 @@ public class GraphCatalog implements ModelCatalog, SerializableCatalog { public List graphs = new ArrayList<>(); @Getter - BinarySerializer serializer = SerializerBuilder.create().build( GraphCatalog.class ); + BinarySerializer serializer = SerializableCatalog.builder.get().build( GraphCatalog.class ); private boolean openChanges = false; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java index 6eb0a84cd0..b0031c2b4d 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java @@ -16,7 +16,16 @@ package org.polypheny.db.catalog.relational; +import lombok.AllArgsConstructor; + +@AllArgsConstructor public class CatalogSchema { + private final long id; + + private final String name; + private final long databaseId; + + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java index 848cb655ce..9df32dfb5d 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java @@ -17,7 +17,6 @@ package org.polypheny.db.catalog.relational; import io.activej.serializer.BinarySerializer; -import io.activej.serializer.SerializerBuilder; import java.util.HashMap; import java.util.Map; import lombok.Getter; @@ -29,7 +28,7 @@ public class RelationalCatalog implements ModelCatalog, SerializableCatalog { @Getter - BinarySerializer serializer = SerializerBuilder.create().build( RelationalCatalog.class ); + BinarySerializer serializer = SerializableCatalog.builder.get().build( RelationalCatalog.class ); private Map schemas = new HashMap<>(); @@ -61,6 +60,7 @@ public boolean hasUncommittedChanges() { public void addSchema( long id, String name, long databaseId, NamespaceType namespaceType ) { + schemas.put( id, new CatalogSchema( id, name, databaseId ) ); } diff --git a/settings.gradle b/settings.gradle index 4035a25c69..bbbb3aa437 100644 --- a/settings.gradle +++ b/settings.gradle @@ -36,8 +36,8 @@ include 'plugins:http-interface' include 'plugins:hsqldb-adapter' include 'plugins:neo4j-adapter' include 'plugins:cottontail-adapter' -include 'plugins:ethereum-adapter' -include 'plugins:cassandra-adapter' +//include 'plugins:ethereum-adapter' +//include 'plugins:cassandra-adapter' include 'plugins:csv-adapter' include 'plugins:mysql-adapter' include 'plugins:postgres-adapter' From f4ec529dadd1eeacbc229ada542d56f5f1b793c4 Mon Sep 17 00:00:00 2001 From: datomo Date: Thu, 16 Feb 2023 16:42:29 +0100 Subject: [PATCH 007/436] fixed serialization of new catalog, removed unused code --- .../validate/ValidatorCatalogReader.java | 6 - .../org/polypheny/db/plan/AlgOptSchema.java | 11 - .../db/plan/volcano/VolcanoPlanner.java | 8 +- .../polypheny/db/prepare/AlgOptTableImpl.java | 5 +- .../db/prepare/PolyphenyDbCatalogReader.java | 90 +------- .../org/polypheny/db/prepare/Prepare.java | 43 +--- .../db/schema/AbstractPolyphenyDbSchema.java | 72 ++----- .../db/schema/PolyphenyDbSchema.java | 15 -- .../java/org/polypheny/db/schema/Wrapper.java | 17 +- .../org/polypheny/db/util/ValidatorUtil.java | 4 +- .../db/catalog/MockCatalogReader.java | 60 ++++-- dbms/build.gradle | 1 + plugins/poly-catalog/build.gradle | 3 +- .../polypheny/db/catalog/CatalogDatabase.java | 43 ++++ .../polypheny/db/catalog/CatalogPlugin.java | 7 +- .../org/polypheny/db/catalog/IdBuilder.java | 70 ++++++ .../org/polypheny/db/catalog/PolyCatalog.java | 157 +++++++++++--- .../db/catalog/entities/CatalogUser.java | 38 ++++ .../mappings/CatalogDocumentMapping.java | 10 +- .../catalog/mappings/CatalogGraphMapping.java | 40 +++- .../catalog/mappings/CatalogModelMapping.java | 5 +- .../mappings/CatalogRelationalMapping.java | 10 +- .../db/catalog/relational/CatalogSchema.java | 22 +- .../catalog/relational/RelationalCatalog.java | 28 ++- .../db/sql/language/ddl/SqlDropObject.java | 36 ---- .../language/util/ListSqlOperatorTable.java | 9 +- .../language/validate/AbstractNamespace.java | 2 +- .../language/validate/DelegatingScope.java | 2 +- .../validate/IdentifierNamespace.java | 2 +- .../db/sql/language/validate/ListScope.java | 4 +- .../validate/MatchRecognizeScope.java | 2 +- .../sql/language/validate/OrderByScope.java | 4 +- .../language/validate/SqlValidatorImpl.java | 40 ++-- .../language/validate/SqlValidatorUtil.java | 8 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 203 +++++++++++++++--- .../db/sql/language/SqlToAlgTestBase.java | 13 +- 36 files changed, 674 insertions(+), 416 deletions(-) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogDatabase.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogUser.java diff --git a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java index 8c59c40aed..3ef6531285 100644 --- a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java @@ -23,7 +23,6 @@ import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.Moniker; -import org.polypheny.db.util.NameMatcher; /** @@ -75,11 +74,6 @@ public interface ValidatorCatalogReader extends Wrapper { */ List> getSchemaPaths(); - /** - * Returns an implementation of {@link NameMatcher} that matches the case-sensitivity policy. - */ - NameMatcher nameMatcher(); - AlgDataType createTypeFromProjection( AlgDataType type, List columnNameList ); /** diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java b/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java index 1082a4657f..347b22aedf 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java @@ -35,7 +35,6 @@ import java.util.List; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; /** @@ -54,14 +53,4 @@ public interface AlgOptSchema { */ AlgOptTable getTableForMember( List names ); - /** - * Returns the {@link AlgDataTypeFactory type factory} used to generate types for this schema. - */ - AlgDataTypeFactory getTypeFactory(); - - /** - * Registers all of the rules supported by this schema. Only called by {@link AlgOptPlanner#registerSchema}. - */ - void registerRules( AlgOptPlanner planner ) throws Exception; - } diff --git a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java index e6abfe2f19..af8b8c8f9a 100644 --- a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java +++ b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java @@ -859,13 +859,7 @@ public void registerAbstractRelationalRules() { @Override public void registerSchema( AlgOptSchema schema ) { - if ( registeredSchemas.add( schema ) ) { - try { - schema.registerRules( this ); - } catch ( Exception e ) { - throw new AssertionError( "While registering schema " + schema, e ); - } - } + registeredSchemas.add( schema ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java index 80077f8b17..2540e7fe29 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java @@ -55,6 +55,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeFactoryImpl; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.algebra.type.AlgRecordType; @@ -225,7 +226,7 @@ public Expression getExpression( Class clazz ) { @Override protected AlgOptTable extend( Table extendedTable ) { - final AlgDataType extendedRowType = extendedTable.getRowType( getRelOptSchema().getTypeFactory() ); + final AlgDataType extendedRowType = extendedTable.getRowType( AlgDataTypeFactory.DEFAULT ); return new AlgOptTableImpl( getRelOptSchema(), extendedRowType, @@ -453,7 +454,7 @@ public static AlgDataType realRowType( AlgOptTable table ) { if ( !strategies.contains( ColumnStrategy.VIRTUAL ) ) { return rowType; } - final AlgDataTypeFactory.Builder builder = table.getRelOptSchema().getTypeFactory().builder(); + final AlgDataTypeFactory.Builder builder = AlgDataTypeFactoryImpl.DEFAULT.builder(); for ( AlgDataTypeField field : rowType.getFieldList() ) { if ( strategies.get( field.getIndex() ) != ColumnStrategy.VIRTUAL ) { builder.add( field ); diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index a4f5369b74..a011c20945 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -34,10 +34,7 @@ package org.polypheny.db.prepare; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.NavigableSet; @@ -48,26 +45,20 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.schema.Function; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Table; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.util.Moniker; import org.polypheny.db.util.MonikerImpl; -import org.polypheny.db.util.NameMatcher; -import org.polypheny.db.util.NameMatchers; -import org.polypheny.db.util.Util; import org.polypheny.db.util.ValidatorUtil; /** - * Implementation of {@link org.polypheny.db.prepare.Prepare.CatalogReader} and also {@link OperatorTable} based on + * Implementation of {@link Prepare.CatalogReader} and also {@link OperatorTable} based on * tables and functions defined schemas. */ public class PolyphenyDbCatalogReader implements Prepare.CatalogReader { @@ -75,34 +66,15 @@ public class PolyphenyDbCatalogReader implements Prepare.CatalogReader { protected final PolyphenyDbSchema rootSchema; protected final AlgDataTypeFactory typeFactory; private final List> schemaPaths; - protected final NameMatcher nameMatcher; public PolyphenyDbCatalogReader( PolyphenyDbSchema rootSchema, List defaultSchema, AlgDataTypeFactory typeFactory ) { - this( - rootSchema, - NameMatchers.withCaseSensitive( RuntimeConfig.RELATIONAL_CASE_SENSITIVE.getBoolean() ), - ImmutableList.of( Objects.requireNonNull( defaultSchema ), ImmutableList.of() ), - typeFactory ); - } - - - protected PolyphenyDbCatalogReader( PolyphenyDbSchema rootSchema, NameMatcher nameMatcher, List> schemaPaths, AlgDataTypeFactory typeFactory ) { this.rootSchema = Objects.requireNonNull( rootSchema ); - this.nameMatcher = nameMatcher; - this.schemaPaths = - Util.immutableCopy( Util.isDistinct( schemaPaths ) - ? schemaPaths - : new LinkedHashSet<>( schemaPaths ) ); + this.schemaPaths = ImmutableList.of( Objects.requireNonNull( defaultSchema ), ImmutableList.of() ); this.typeFactory = typeFactory; } - @Override - public PolyphenyDbCatalogReader withSchemaPath( List schemaPath ) { - return new PolyphenyDbCatalogReader( rootSchema, nameMatcher, ImmutableList.of( schemaPath, ImmutableList.of() ), typeFactory ); - } - @Override public Prepare.PreparingTable getTable( final List names ) { @@ -141,35 +113,6 @@ public Graph getGraph( final String name ) { } - public Collection getFunctionsFrom( List names ) { - final List functions2 = new ArrayList<>(); - final List> schemaNameList = new ArrayList<>(); - if ( names.size() > 1 ) { - // Name qualified: ignore path. But we do look in "/catalog" and "/", the last 2 items in the path. - if ( schemaPaths.size() > 1 ) { - schemaNameList.addAll( Util.skip( schemaPaths ) ); - } else { - schemaNameList.addAll( schemaPaths ); - } - } else { - for ( List schemaPath : schemaPaths ) { - PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, schemaPath, nameMatcher ); - if ( schema != null ) { - schemaNameList.addAll( schema.getPath() ); - } - } - } - for ( List schemaNames : schemaNameList ) { - PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, Iterables.concat( schemaNames, Util.skipLast( names ) ), nameMatcher ); - if ( schema != null ) { - final String name = Util.last( names ); - functions2.addAll( schema.getFunctions( name, true ) ); - } - } - return functions2; - } - - @Override public AlgDataType getNamedType( Identifier typeName ) { PolyphenyDbSchema.TypeEntry typeEntry = ValidatorUtil.getTypeEntry( getRootSchema(), typeName ); @@ -183,7 +126,7 @@ public AlgDataType getNamedType( Identifier typeName ) { @Override public List getAllSchemaObjectNames( List names ) { - final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, names, nameMatcher ); + final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, names, Wrapper.nameMatcher ); if ( schema == null ) { return ImmutableList.of(); } @@ -235,7 +178,7 @@ public Prepare.PreparingTable getTableForMember( List names ) { @Override public AlgDataType createTypeFromProjection( final AlgDataType type, final List columnNameList ) { - return ValidatorUtil.createTypeFromProjection( type, columnNameList, typeFactory, nameMatcher.isCaseSensitive() ); + return ValidatorUtil.createTypeFromProjection( type, columnNameList, typeFactory, Wrapper.nameMatcher.isCaseSensitive() ); } @@ -257,30 +200,5 @@ public PolyphenyDbSchema getRootSchema() { } - @Override - public AlgDataTypeFactory getTypeFactory() { - return typeFactory; - } - - - @Override - public void registerRules( AlgOptPlanner planner ) { - } - - - @Override - public NameMatcher nameMatcher() { - return nameMatcher; - } - - - @Override - public C unwrap( Class aClass ) { - if ( aClass.isInstance( this ) ) { - return aClass.cast( this ); - } - return null; - } - } diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index 9c95408fce..abf73b7c04 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -242,11 +242,6 @@ public interface CatalogReader extends AlgOptSchema, ValidatorCatalogReader, Ope @Override PreparingTable getTableForMember( List names ); - /** - * Returns a catalog reader the same as this one but with a possibly different schema path. - */ - CatalogReader withSchemaPath( List schemaPath ); - @Override PreparingTable getTable( List names ); @@ -351,12 +346,6 @@ public boolean isDml() { } - @Override - public LogicalModify.Operation getTableModOp() { - return null; - } - - @Override public List> getFieldOrigins() { return Collections.singletonList( Collections.nCopies( 4, null ) ); @@ -381,12 +370,6 @@ public interface PreparedResult { */ boolean isDml(); - /** - * Returns the table modification operation corresponding to this statement if it is a table modification statement; - * otherwise null. - */ - LogicalModify.Operation getTableModOp(); - /** * Returns a list describing, for each result field, the origin of the field as a 4-element list * of (database, schema, table, column). @@ -414,7 +397,7 @@ public interface PreparedResult { */ public abstract static class PreparedResultImpl implements PreparedResult, Typed { - protected final AlgNode rootRel; + protected final AlgNode rootAlg; protected final AlgDataType parameterRowType; protected final AlgDataType rowType; protected final boolean isDml; @@ -428,14 +411,14 @@ public PreparedResultImpl( AlgDataType parameterRowType, List> fieldOrigins, List collations, - AlgNode rootRel, + AlgNode rootAlg, LogicalModify.Operation tableModOp, boolean isDml ) { this.rowType = Objects.requireNonNull( rowType ); this.parameterRowType = Objects.requireNonNull( parameterRowType ); this.fieldOrigins = Objects.requireNonNull( fieldOrigins ); this.collations = ImmutableList.copyOf( collations ); - this.rootRel = Objects.requireNonNull( rootRel ); + this.rootAlg = Objects.requireNonNull( rootAlg ); this.tableModOp = tableModOp; this.isDml = isDml; } @@ -447,12 +430,6 @@ public boolean isDml() { } - @Override - public LogicalModify.Operation getTableModOp() { - return tableModOp; - } - - @Override public List> getFieldOrigins() { return fieldOrigins; @@ -465,23 +442,9 @@ public AlgDataType getParameterRowType() { } - /** - * Returns the physical row type of this prepared statement. May not be identical to the row type returned by the - * validator; for example, the field names may have been made unique. - */ - public AlgDataType getPhysicalRowType() { - return rowType; - } - - @Override public abstract Type getElementType(); - - public AlgNode getRootRel() { - return rootRel; - } - } } diff --git a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java index e4d61e0bda..d8d1fdb4cb 100644 --- a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java @@ -38,12 +38,24 @@ import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Lists; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.NavigableMap; +import java.util.NavigableSet; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; import lombok.Getter; import lombok.Setter; -import org.apache.calcite.linq4j.function.Experimental; import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.algebra.type.*; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; +import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; +import org.polypheny.db.algebra.type.AlgDataTypeImpl; +import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; @@ -52,9 +64,6 @@ import org.polypheny.db.util.NameSet; import org.polypheny.db.util.Pair; -import java.util.*; -import java.util.stream.Collectors; - /** * Schema. @@ -449,26 +458,6 @@ public final NavigableSet getFunctionNames() { } - /** - * Returns tables derived from explicit and implicit functions that take zero parameters. - */ - @Override - public final NavigableMap getTablesBasedOnNullaryFunctions() { - ImmutableSortedMap.Builder builder = new ImmutableSortedMap.Builder<>( NameSet.COMPARATOR ); - for ( Map.Entry entry : nullaryFunctionMap.map().entrySet() ) { - final Function function = entry.getValue().getFunction(); - if ( function instanceof TableMacro ) { - assert function.getParameters().isEmpty(); - final Table table = ((TableMacro) function).apply( ImmutableList.of() ); - builder.put( entry.getKey(), table ); - } - } - // add tables derived from implicit functions - addImplicitTablesBasedOnNullaryFunctionsToBuilder( builder ); - return builder.build(); - } - - /** * Returns a tables derived from explicit and implicit functions that take zero parameters. */ @@ -486,39 +475,6 @@ public final TableEntry getTableBasedOnNullaryFunction( String tableName, boolea } - @Override - @Experimental - public boolean removeSubSchema( String name ) { - return subSchemaMap.remove( name ) != null; - } - - - @Override - @Experimental - public boolean removeTable( String name ) { - return tableMap.remove( name ) != null; - } - - - @Override - @Experimental - public boolean removeFunction( String name ) { - final FunctionEntry remove = nullaryFunctionMap.remove( name ); - if ( remove == null ) { - return false; - } - functionMap.remove( name, remove ); - return true; - } - - - @Override - @Experimental - public boolean removeType( String name ) { - return typeMap.remove( name ) != null; - } - - /** * Implementation of {@link SchemaPlus} based on a {@link AbstractPolyphenyDbSchema}. */ diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 0ba4b5763d..aef72d2d36 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -21,7 +21,6 @@ import java.util.NavigableMap; import java.util.NavigableSet; import java.util.Objects; -import org.apache.calcite.linq4j.function.Experimental; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.adapter.DataContext; @@ -83,22 +82,8 @@ static PolyphenyDbSchema from( SchemaPlus plus ) { NavigableSet getFunctionNames(); - NavigableMap getTablesBasedOnNullaryFunctions(); - TableEntry getTableBasedOnNullaryFunction( String tableName, boolean caseSensitive ); - @Experimental - boolean removeSubSchema( String name ); - - @Experimental - boolean removeTable( String name ); - - @Experimental - boolean removeFunction( String name ); - - @Experimental - boolean removeType( String name ); - NameMap getTableMap(); diff --git a/core/src/main/java/org/polypheny/db/schema/Wrapper.java b/core/src/main/java/org/polypheny/db/schema/Wrapper.java index da803f2e5b..c2e6df31ff 100644 --- a/core/src/main/java/org/polypheny/db/schema/Wrapper.java +++ b/core/src/main/java/org/polypheny/db/schema/Wrapper.java @@ -34,14 +34,23 @@ package org.polypheny.db.schema; +import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.util.NameMatcher; +import org.polypheny.db.util.NameMatchers; + /** * Mix-in interface that allows you to find sub-objects. */ public interface Wrapper { - /** - * Finds an instance of an interface implemented by this object, or returns null if this object does not support that interface. - */ - C unwrap( Class aClass ); + NameMatcher nameMatcher = NameMatchers.withCaseSensitive( RuntimeConfig.RELATIONAL_CASE_SENSITIVE.getBoolean() ); + + default C unwrap( Class aClass ) { + if ( aClass.isInstance( this ) ) { + return aClass.cast( this ); + } + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java index 19fb10f567..bfd6abcc68 100644 --- a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java +++ b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java @@ -303,11 +303,11 @@ public static TableEntry getTableEntry( CatalogReader catalogReader, List Column (Relational), does not exist (Graph), Field (Document) */ @Slf4j -public class PolyCatalog implements SerializableCatalog { +public class PolyCatalog implements SerializableCatalog, CatalogReader { @Getter - BinarySerializer serializer = SerializableCatalog.builder.get().build( PolyCatalog.class ); + public final BinarySerializer serializer = SerializableCatalog.builder.get().build( PolyCatalog.class ); @Serialize public final RelationalCatalog relational; @@ -59,30 +70,38 @@ public class PolyCatalog implements SerializableCatalog { public final DocumentCatalog document; private final ImmutableList catalogs; + @Serialize + public final Map users; - private final Map users = new HashMap<>(); - - private final Map mappings = new HashMap<>(); + @Serialize + public final Map databases; - private final AtomicLong namespaceIdBuilder = new AtomicLong( 0 ); - private final AtomicLong entityIdBuilder = new AtomicLong( 0 ); + @Serialize + public final Map mappings; - private final AtomicLong fieldIdBuilder = new AtomicLong( 0 ); + private final IdBuilder idBuilder = new IdBuilder(); public PolyCatalog() { - this( new DocumentCatalog(), new GraphCatalog(), new RelationalCatalog() ); + this( new DocumentCatalog(), new GraphCatalog(), new RelationalCatalog(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); } - private PolyCatalog( + public PolyCatalog( @Deserialize("document") DocumentCatalog document, @Deserialize("graph") GraphCatalog graph, - @Deserialize("relational") RelationalCatalog relational ) { + @Deserialize("relational") RelationalCatalog relational, + @Deserialize("users") Map users, + @Deserialize("databases") Map databases, + @Deserialize("mappings") Map mappings ) { this.document = document; this.graph = graph; this.relational = relational; + this.users = users; + this.databases = databases; + this.mappings = mappings; + catalogs = ImmutableList.of( this.relational, this.graph, this.document ); } @@ -99,19 +118,36 @@ public void rollback() { } - public long addNamespace( String name, long databaseId, int ownerId, NamespaceType namespaceType ) { - long id = namespaceIdBuilder.getAndIncrement(); + public long addUser( @NonNull String name ) { + long id = idBuilder.getNewUserId(); + + users.put( id, new CatalogUser( id, name ) ); + + return id; + } + + + public long addDatabase( String name, long ownerId ) { + long id = idBuilder.getNewDatabaseId(); + + databases.put( id, new CatalogDatabase( id, name, ownerId ) ); + return id; + } + + + public long addNamespace( String name, long databaseId, long ownerId, NamespaceType namespaceType ) { + long id = idBuilder.getNewNamespaceId(); CatalogModelMapping mapping = null; switch ( namespaceType ) { case RELATIONAL: - mapping = addRelationalNamespace( id, name, databaseId, namespaceType ); + mapping = addRelationalNamespace( id, name, databaseId, namespaceType, ownerId ); break; case DOCUMENT: - mapping = addDocumentNamespace( id, name, databaseId, namespaceType ); + mapping = addDocumentNamespace( id, name, databaseId, namespaceType, ownerId ); break; case GRAPH: - mapping = addGraphNamespace( id, name, databaseId, namespaceType ); + mapping = addGraphNamespace( id, name, databaseId, namespaceType, ownerId ); break; } @@ -121,15 +157,15 @@ public long addNamespace( String name, long databaseId, int ownerId, NamespaceTy } - private CatalogModelMapping addGraphNamespace( long id, String name, long databaseId, NamespaceType namespaceType ) { + private CatalogModelMapping addGraphNamespace( long id, String name, long databaseId, NamespaceType namespaceType, long ownerId ) { // add to model catalog graph.addGraph( id, name, databaseId, namespaceType ); // add substitutions for other models - long nodeId = entityIdBuilder.getAndIncrement(); - long nPropertiesId = entityIdBuilder.getAndIncrement(); - long edgeId = entityIdBuilder.getAndIncrement(); - long ePropertiesId = entityIdBuilder.getAndIncrement(); + long nodeId = idBuilder.getNewEntityId(); + long nPropertiesId = idBuilder.getNewEntityId(); + long edgeId = idBuilder.getNewEntityId(); + long ePropertiesId = idBuilder.getNewEntityId(); // add relational relational.addSchema( id, name, databaseId, namespaceType ); @@ -149,7 +185,7 @@ private CatalogModelMapping addGraphNamespace( long id, String name, long databa } - private CatalogModelMapping addDocumentNamespace( long id, String name, long databaseId, NamespaceType namespaceType ) { + private CatalogModelMapping addDocumentNamespace( long id, String name, long databaseId, NamespaceType namespaceType, long ownerId ) { // add to model catalog document.addDatabase( id, name, databaseId, namespaceType ); @@ -161,7 +197,7 @@ private CatalogModelMapping addDocumentNamespace( long id, String name, long dat } - private CatalogModelMapping addRelationalNamespace( long id, String name, long databaseId, NamespaceType namespaceType ) { + private CatalogModelMapping addRelationalNamespace( long id, String name, long databaseId, NamespaceType namespaceType, long ownerId ) { // add to model catalog relational.addSchema( id, name, databaseId, namespaceType ); @@ -174,7 +210,7 @@ private CatalogModelMapping addRelationalNamespace( long id, String name, long d public long addEntity( String name, long namespaceId, NamespaceType type, int ownerId ) { - long id = entityIdBuilder.getAndIncrement(); + long id = idBuilder.getNewEntityId(); switch ( type ) { case RELATIONAL: @@ -214,7 +250,7 @@ private void addRelationalEntity( long id, String name, long namespaceId ) { public long addField( String name, long entityId, AlgDataType type, NamespaceType namespaceType ) { - long id = fieldIdBuilder.getAndIncrement(); + long id = idBuilder.getNewFieldId(); switch ( namespaceType ) { case RELATIONAL: @@ -235,4 +271,69 @@ private void addColumn( long id, String name, long entityId, AlgDataType type ) } + @Override + public void lookupOperatorOverloads( Identifier opName, FunctionCategory category, Syntax syntax, List operatorList ) { + + } + + + @Override + public List getOperatorList() { + return null; + } + + + @Override + public AlgDataType getNamedType( Identifier typeName ) { + return null; + } + + + @Override + public List getAllSchemaObjectNames( List names ) { + return null; + } + + + @Override + public List> getSchemaPaths() { + return null; + } + + + @Override + public AlgDataType createTypeFromProjection( AlgDataType type, List columnNameList ) { + return null; + } + + + @Override + public PolyphenyDbSchema getRootSchema() { + return null; + } + + + @Override + public PreparingTable getTableForMember( List names ) { + return null; + } + + + @Override + public PreparingTable getTable( List names ) { + return null; + } + + + @Override + public AlgOptTable getCollection( List names ) { + return null; + } + + + @Override + public Graph getGraph( String name ) { + return null; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogUser.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogUser.java new file mode 100644 index 0000000000..30902a6024 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogUser.java @@ -0,0 +1,38 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entities; + +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; + +public class CatalogUser { + + @Serialize + public final String name; + + @Serialize + public final long id; + + + public CatalogUser( + @Deserialize("id") long id, + @Deserialize("name") String name ) { + this.id = id; + this.name = name; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java index a3dbf8fe55..4c0e18307d 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java @@ -16,12 +16,16 @@ package org.polypheny.db.catalog.mappings; -public class CatalogDocumentMapping extends CatalogModelMapping { +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; - private final long id; +public class CatalogDocumentMapping implements CatalogModelMapping { + @Serialize + public final long id; - public CatalogDocumentMapping( long id ) { + + public CatalogDocumentMapping( @Deserialize("id") long id ) { this.id = id; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java index 2b551fdf0d..71f96b863c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java @@ -16,16 +16,40 @@ package org.polypheny.db.catalog.mappings; -import lombok.AllArgsConstructor; -@AllArgsConstructor -public class CatalogGraphMapping extends CatalogModelMapping { +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; - private final long id; - private final long nodesId; - private final long nodesPropertiesId; - private final long edgesId; - private final long edgesPropertiesId; +public class CatalogGraphMapping implements CatalogModelMapping { + + @Serialize + public final long id; + + @Serialize + public final long nodesId; + + @Serialize + public final long nodesPropertiesId; + + @Serialize + public final long edgesId; + + @Serialize + public final long edgesPropertiesId; + + + public CatalogGraphMapping( + @Deserialize("id") long id, + @Deserialize("nodesId") long nodesId, + @Deserialize("nodesPropertiesId") long nodesPropertiesId, + @Deserialize("edgesId") long edgesId, + @Deserialize("edgesPropertiesId") long edgesPropertiesId ) { + this.id = id; + this.nodesId = nodesId; + this.nodesPropertiesId = nodesPropertiesId; + this.edgesId = edgesId; + this.edgesPropertiesId = edgesPropertiesId; + } @Override diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java index ee1e30eece..ee3cb9a9dd 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java @@ -16,8 +16,9 @@ package org.polypheny.db.catalog.mappings; -public abstract class CatalogModelMapping { +public interface CatalogModelMapping { + + String getGraphLabel(); - public abstract String getGraphLabel(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java index c8302523aa..73dc6ed7e7 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java @@ -16,12 +16,16 @@ package org.polypheny.db.catalog.mappings; -public class CatalogRelationalMapping extends CatalogModelMapping { +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; - private final long id; +public class CatalogRelationalMapping implements CatalogModelMapping { + @Serialize + public final long id; - public CatalogRelationalMapping( long id ) { + + public CatalogRelationalMapping( @Deserialize("id") long id ) { this.id = id; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java index b0031c2b4d..819128d941 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java @@ -16,16 +16,28 @@ package org.polypheny.db.catalog.relational; -import lombok.AllArgsConstructor; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; -@AllArgsConstructor public class CatalogSchema { + @Serialize + public final long id; - private final long id; + @Serialize + public final String name; - private final String name; - private final long databaseId; + @Serialize + public final long databaseId; + public CatalogSchema( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("databaseId") long databaseId ) { + this.id = id; + this.name = name; + this.databaseId = databaseId; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java index 9df32dfb5d..fe129e36a1 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java @@ -17,6 +17,8 @@ package org.polypheny.db.catalog.relational; import io.activej.serializer.BinarySerializer; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.util.HashMap; import java.util.Map; import lombok.Getter; @@ -28,17 +30,35 @@ public class RelationalCatalog implements ModelCatalog, SerializableCatalog { @Getter - BinarySerializer serializer = SerializableCatalog.builder.get().build( RelationalCatalog.class ); + public final BinarySerializer serializer = SerializableCatalog.builder.get().build( RelationalCatalog.class ); - private Map schemas = new HashMap<>(); + @Serialize + public final Map schemas; - private Map tables = new HashMap<>(); + @Serialize + public final Map tables; - private Map columns = new HashMap<>(); + @Serialize + public final Map columns; private boolean openChanges = false; + public RelationalCatalog( + @Deserialize("schemas") Map schemas, + @Deserialize("tables") Map tables, + @Deserialize("columns") Map columns ) { + this.schemas = schemas; + this.tables = tables; + this.columns = columns; + } + + + public RelationalCatalog() { + this( new HashMap<>(), new HashMap<>(), new HashMap<>() ); + } + + @Override public void commit() { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropObject.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropObject.java index 6e252f17c7..36d497dbaa 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropObject.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropObject.java @@ -66,40 +66,4 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { } name.unparse( writer, leftPrec, rightPrec ); } - - - /* - public void execute( Context context ) { - final List path = context.getDefaultSchemaPath(); - PolyphenyDbSchema schema = context.getRootSchema(); - for ( String p : path ) { - schema = schema.getSubSchema( p, true ); - } - final boolean existed; - switch ( getKind() ) { - case DROP_TABLE: - case DROP_MATERIALIZED_VIEW: - existed = schema.removeTable( name.getSimple() ); - if ( !existed && !ifExists ) { - throw SqlUtil.newContextException( name.getParserPosition(), RESOURCE.tableNotFound( name.getSimple() ) ); - } - break; - case DROP_VIEW: - // Not quite right: removes any other functions with the same name - existed = schema.removeFunction( name.getSimple() ); - if ( !existed && !ifExists ) { - throw SqlUtil.newContextException( name.getParserPosition(), RESOURCE.viewNotFound( name.getSimple() ) ); - } - break; - case DROP_TYPE: - existed = schema.removeType( name.getSimple() ); - if ( !existed && !ifExists ) { - throw SqlUtil.newContextException( name.getParserPosition(), RESOURCE.typeNotFound( name.getSimple() ) ); - } - break; - case OTHER_DDL: - default: - throw new AssertionError( getKind() ); - } - }*/ } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/ListSqlOperatorTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/ListSqlOperatorTable.java index 2a1f9b565d..d4eae8d867 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/ListSqlOperatorTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/ListSqlOperatorTable.java @@ -19,7 +19,7 @@ import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; +import lombok.Getter; import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.operators.OperatorTable; @@ -34,6 +34,7 @@ */ public class ListSqlOperatorTable implements OperatorTable { + @Getter private final List operatorList; @@ -77,10 +78,4 @@ protected static FunctionCategory category( SqlOperator operator ) { } } - - @Override - public List getOperatorList() { - return operatorList.stream().map( o -> o ).collect( Collectors.toList() ); - } - } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/AbstractNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/AbstractNamespace.java index 7e6d0d2e5c..cedabb514d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/AbstractNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/AbstractNamespace.java @@ -161,7 +161,7 @@ public SqlValidatorNamespace lookupChild( String name ) { @Override public boolean fieldExists( String name ) { final AlgDataType rowType = getRowType(); - return validator.catalogReader.nameMatcher().field( rowType, name ) != null; + return validator.catalogReader.nameMatcher.field( rowType, name ) != null; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index 6dd7a076b3..dcbc42ed10 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -231,7 +231,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { } final SqlIdentifier previous = identifier; - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher(); + final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; String columnName; final String tableName; final SqlValidatorNamespace namespace; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java index ab9fbb36a5..f9a9298726 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java @@ -94,7 +94,7 @@ protected static Pair split( SqlNode node ) { private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher(); + final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); final List names = SqlIdentifier.toStar( id.names ); try { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java index d3e537b820..45028b96bc 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java @@ -134,7 +134,7 @@ public void findAliases( Collection result ) { @Override public Pair findQualifyingTableName( final String columnName, SqlNode ctx ) { - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher(); + final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; final Map map = findQualifyingTableNames( columnName, ctx, nameMatcher ); switch ( map.size() ) { case 0: @@ -215,7 +215,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, @Override public AlgDataType resolveColumn( String columnName, SqlNode ctx ) { - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher(); + final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; int found = 0; AlgDataType type = null; for ( ScopeChild child : children ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java index bba482c9cd..599753fdef 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java @@ -47,7 +47,7 @@ public class MatchRecognizeScope extends ListScope { public MatchRecognizeScope( SqlValidatorScope parent, SqlMatchRecognize matchRecognize ) { super( parent ); this.matchRecognize = matchRecognize; - patternVars = validator.getCatalogReader().nameMatcher().createSet(); + patternVars = validator.getCatalogReader().nameMatcher.createSet(); patternVars.add( STAR ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/OrderByScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/OrderByScope.java index d7b4bf6ec4..27191f69d6 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/OrderByScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/OrderByScope.java @@ -77,7 +77,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { final SqlValidatorNamespace selectNs = validator.getSqlNamespace( select ); final AlgDataType rowType = selectNs.getRowType(); - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher(); + final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; final AlgDataTypeField field = nameMatcher.field( rowType, name ); final int aliasCount = aliasCount( nameMatcher, name ); if ( aliasCount > 1 ) { @@ -112,7 +112,7 @@ private int aliasCount( NameMatcher nameMatcher, String name ) { public AlgDataType resolveColumn( String name, SqlNode ctx ) { final SqlValidatorNamespace selectNs = validator.getSqlNamespace( select ); final AlgDataType rowType = selectNs.getRowType(); - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher(); + final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; final AlgDataTypeField field = nameMatcher.field( rowType, name ); if ( field != null ) { return field.getType(); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index 7b63e09d8f..aa359f75c3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -368,7 +368,7 @@ public SqlNodeList expandStar( SqlNodeList selectList, SqlSelect select, boolean select, Util.first( originalType, unknownType ), list, - catalogReader.nameMatcher().createSet(), + catalogReader.nameMatcher.createSet(), types, includeSystemVars ); } @@ -545,7 +545,7 @@ private boolean expandStar( List selectItems, Set aliases, List default: final SqlIdentifier prefixId = identifier.skipLast( 1 ); final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); - final NameMatcher nameMatcher = scope.validator.catalogReader.nameMatcher(); + final NameMatcher nameMatcher = scope.validator.catalogReader.nameMatcher; scope.resolve( prefixId.names, nameMatcher, true, resolved ); if ( resolved.count() == 0 ) { // e.g. "select s.t.* from e" or "select r.* from e" @@ -774,7 +774,7 @@ public final void lookupNameCompletionHints( SqlValidatorScope scope, List naturalColumnNames = SqlValidatorUtil.deriveNaturalJoinColumnList( nameMatcher, leftRowType, rightRowType ); // Check compatibility of the chosen columns. @@ -3143,7 +3143,7 @@ private AlgDataType validateUsingCol( SqlIdentifier id, SqlNode leftOrRight ) { String name = id.names.get( 0 ); final SqlValidatorNamespace namespace = getSqlNamespace( leftOrRight ); final AlgDataType rowType = namespace.getRowType(); - final NameMatcher nameMatcher = catalogReader.nameMatcher(); + final NameMatcher nameMatcher = catalogReader.nameMatcher; final AlgDataTypeField field = nameMatcher.field( rowType, name ); if ( field != null ) { if ( nameMatcher.frequency( rowType.getFieldNames(), name ) > 1 ) { @@ -3192,7 +3192,7 @@ protected void validateSelect( SqlSelect select, AlgDataType targetRowType ) { // Make sure that items in FROM clause have distinct aliases. final SelectScope fromScope = (SelectScope) getFromScope( select ); List names = fromScope.getChildNames(); - if ( !catalogReader.nameMatcher().isCaseSensitive() ) { + if ( !catalogReader.nameMatcher.isCaseSensitive() ) { names = names.stream().map( s -> s.toUpperCase( Locale.ROOT ) ).collect( Collectors.toList() ); } final int duplicateAliasOrdinal = Util.firstDuplicate( names ); @@ -3424,7 +3424,7 @@ private Table findTable( String alias ) { } for ( ScopeChild child : tableScope.children ) { - if ( catalogReader.nameMatcher().matches( child.name, alias ) ) { + if ( catalogReader.nameMatcher.matches( child.name, alias ) ) { names = ((SqlIdentifier) child.namespace.getNode()).names; break; } @@ -3432,7 +3432,7 @@ private Table findTable( String alias ) { if ( names == null || names.size() == 0 ) { return null; } else if ( names.size() == 1 ) { - return findTable( catalogReader.getRootSchema(), names.get( 0 ), catalogReader.nameMatcher().isCaseSensitive() ); + return findTable( catalogReader.getRootSchema(), names.get( 0 ), catalogReader.nameMatcher.isCaseSensitive() ); } PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( (CatalogReader) catalogReader, names ); @@ -3687,7 +3687,7 @@ public void validateWithItem( SqlWithItem withItem ) { public void validateSequenceValue( SqlValidatorScope scope, SqlIdentifier id ) { // Resolve identifier as a table. final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); - scope.resolveTable( id.names, catalogReader.nameMatcher(), SqlValidatorScope.Path.EMPTY, resolved ); + scope.resolveTable( id.names, catalogReader.nameMatcher, SqlValidatorScope.Path.EMPTY, resolved ); if ( resolved.count() != 1 ) { throw newValidationError( id, RESOURCE.tableNameNotFound( id.toString() ) ); } @@ -4658,7 +4658,7 @@ public void setOriginal( SqlNode expr, SqlNode original ) { SqlValidatorNamespace lookupFieldNamespace( AlgDataType rowType, String name ) { - final NameMatcher nameMatcher = catalogReader.nameMatcher(); + final NameMatcher nameMatcher = catalogReader.nameMatcher; final AlgDataTypeField field = nameMatcher.field( rowType, name ); if ( field == null ) { return null; @@ -4892,7 +4892,7 @@ private SqlNode navigationInMeasure( SqlNode node, boolean allRows ) { private void validateDefinitions( SqlMatchRecognize mr, MatchRecognizeScope scope ) { - final Set aliases = catalogReader.nameMatcher().createSet(); + final Set aliases = catalogReader.nameMatcher.createSet(); for ( SqlNode item : mr.getPatternDefList().getSqlList() ) { final String alias = alias( item ); if ( !aliases.add( alias ) ) { @@ -5429,7 +5429,7 @@ public AlgDataType visit( Identifier id ) { // REVIEW jvs: The name resolution rules used here are supposed to match SQL:2003 Part 2 Section 6.6 (identifier chain), but we don't currently have enough // information to get everything right. In particular, routine parameters are currently looked up via resolve; we could do a better job if they were looked up via resolveColumn. - final NameMatcher nameMatcher = catalogReader.nameMatcher(); + final NameMatcher nameMatcher = catalogReader.nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve( id.getNames().subList( 0, i ), nameMatcher, false, resolved ); if ( resolved.count() == 1 ) { @@ -5468,7 +5468,7 @@ public AlgDataType visit( Identifier id ) { name = "*"; field = null; } else { - final NameMatcher nameMatcher = catalogReader.nameMatcher(); + final NameMatcher nameMatcher = catalogReader.nameMatcher; field = nameMatcher.field( type, name ); } if ( field == null ) { @@ -5647,7 +5647,7 @@ public SqlNode visit( Identifier id ) { String alias = id.getSimple(); final SqlValidatorNamespace selectNs = getSqlNamespace( select ); final AlgDataType rowType = selectNs.getRowTypeSansSystemColumns(); - final NameMatcher nameMatcher = catalogReader.nameMatcher(); + final NameMatcher nameMatcher = catalogReader.nameMatcher; AlgDataTypeField field = nameMatcher.field( rowType, alias ); if ( field != null ) { return nthSelectItem( field.getIndex(), id.getPos() ); @@ -5696,7 +5696,7 @@ public SqlNode visit( Identifier id ) { : validator.getConformance().isGroupByAlias()) ) { String name = id.getSimple(); SqlNode expr = null; - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher(); + final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; int n = 0; for ( Node s : select.getSqlSelectList() ) { final String alias = SqlValidatorUtil.getAlias( (SqlNode) s, -1 ); @@ -6173,7 +6173,7 @@ private class Permute { private AlgDataTypeField field( String name ) { - return catalogReader.nameMatcher().field( rowType, name ); + return catalogReader.nameMatcher.field( rowType, name ); } @@ -6184,7 +6184,7 @@ private List usingNames( SqlJoin join ) { switch ( join.getConditionType() ) { case USING: final ImmutableList.Builder list = ImmutableList.builder(); - final Set names = catalogReader.nameMatcher().createSet(); + final Set names = catalogReader.nameMatcher.createSet(); for ( Node node : (SqlNodeList) join.getCondition() ) { final String name = ((SqlIdentifier) node).getSimple(); if ( names.add( name ) ) { @@ -6196,7 +6196,7 @@ private List usingNames( SqlJoin join ) { if ( join.isNatural() ) { final AlgDataType t0 = getValidatedNodeType( join.getLeft() ); final AlgDataType t1 = getValidatedNodeType( join.getRight() ); - return SqlValidatorUtil.deriveNaturalJoinColumnList( catalogReader.nameMatcher(), t0, t1 ); + return SqlValidatorUtil.deriveNaturalJoinColumnList( catalogReader.nameMatcher, t0, t1 ); } } return null; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 1fac080f17..75cd97a235 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -99,7 +99,7 @@ public static AlgOptTable getAlgOptTable( SqlValidatorNamespace namespace, Prepa if ( resolvedNamespace.isWrapperFor( TableNamespace.class ) ) { final TableNamespace tableNamespace = resolvedNamespace.unwrap( TableNamespace.class ); final ValidatorTable validatorTable = tableNamespace.getTable(); - final AlgDataTypeFactory typeFactory = catalogReader.getTypeFactory(); + final AlgDataTypeFactory typeFactory = AlgDataTypeFactory.DEFAULT; final List extendedFields = dmlNamespace.extendList == null ? ImmutableList.of() @@ -284,7 +284,7 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF final Table t = table == null ? null : table.unwrap( Table.class ); if ( !(t instanceof CustomColumnResolvingTable) ) { - final NameMatcher nameMatcher = catalogReader.nameMatcher(); + final NameMatcher nameMatcher = catalogReader.nameMatcher; AlgDataTypeField typeField = nameMatcher.field( rowType, id.getSimple() ); if ( typeField == null && isDocument ) { @@ -312,7 +312,7 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF */ public static SqlValidatorNamespace lookup( SqlValidatorScope scope, List names ) { assert names.size() > 0; - final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher(); + final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve( ImmutableList.of( names.get( 0 ) ), nameMatcher, false, resolved ); assert resolved.count() == 1; @@ -505,7 +505,7 @@ private static ImmutableBitSet analyzeGroupExpr( SqlValidatorScope scope, GroupA String originalRelName = expr.names.get( 0 ); String originalFieldName = expr.names.get( 1 ); - final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher(); + final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve( ImmutableList.of( originalRelName ), nameMatcher, false, resolved ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 832b11f4f0..1ed8c2d9f7 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -40,15 +40,76 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; +import java.lang.reflect.Type; +import java.math.BigDecimal; +import java.util.AbstractList; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Deque; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.TreeSet; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.apache.calcite.avatica.util.Spaces; import org.apache.calcite.linq4j.Ord; -import org.polypheny.db.algebra.*; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollationTraitDef; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgDecorrelator; +import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgFieldCollation.Direction; -import org.polypheny.db.algebra.constant.*; -import org.polypheny.db.algebra.core.*; +import org.polypheny.db.algebra.AlgFieldTrimmer; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.AlgStructuredTypeFlattener; +import org.polypheny.db.algebra.SingleAlg; +import org.polypheny.db.algebra.constant.ExplainFormat; +import org.polypheny.db.algebra.constant.ExplainLevel; +import org.polypheny.db.algebra.constant.JoinConditionType; +import org.polypheny.db.algebra.constant.JoinType; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.constant.Monotonicity; +import org.polypheny.db.algebra.constant.SemiJoinType; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.AlgFactories.FilterFactory; +import org.polypheny.db.algebra.core.Collect; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.core.Filter; +import org.polypheny.db.algebra.core.Join; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.JoinInfo; +import org.polypheny.db.algebra.core.Project; +import org.polypheny.db.algebra.core.Sample; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.Uncollect; +import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.fun.AggFunction; -import org.polypheny.db.algebra.logical.relational.*; +import org.polypheny.db.algebra.logical.relational.LogicalAggregate; +import org.polypheny.db.algebra.logical.relational.LogicalCorrelate; +import org.polypheny.db.algebra.logical.relational.LogicalFilter; +import org.polypheny.db.algebra.logical.relational.LogicalIntersect; +import org.polypheny.db.algebra.logical.relational.LogicalJoin; +import org.polypheny.db.algebra.logical.relational.LogicalMatch; +import org.polypheny.db.algebra.logical.relational.LogicalMinus; +import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; +import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalSort; +import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; +import org.polypheny.db.algebra.logical.relational.LogicalUnion; +import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.metadata.AlgColumnMapping; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.metadata.JaninoRelMetadataProvider; @@ -63,33 +124,121 @@ import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; -import org.polypheny.db.nodes.*; +import org.polypheny.db.nodes.BasicNodeVisitor; +import org.polypheny.db.nodes.Call; +import org.polypheny.db.nodes.DataTypeSpec; +import org.polypheny.db.nodes.DynamicParam; +import org.polypheny.db.nodes.Identifier; +import org.polypheny.db.nodes.IntervalQualifier; +import org.polypheny.db.nodes.Literal; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.nodes.NodeList; +import org.polypheny.db.nodes.NodeVisitor; +import org.polypheny.db.nodes.Operator; import org.polypheny.db.nodes.validate.ValidatorTable; -import org.polypheny.db.plan.*; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptSamplingParameters; +import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptUtil; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.AlgOptTableImpl; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.rex.*; -import org.polypheny.db.schema.*; -import org.polypheny.db.sql.language.*; -import org.polypheny.db.sql.language.fun.*; -import org.polypheny.db.sql.language.validate.*; +import org.polypheny.db.rex.RexBuilder; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexCallBinding; +import org.polypheny.db.rex.RexCorrelVariable; +import org.polypheny.db.rex.RexDynamicParam; +import org.polypheny.db.rex.RexFieldAccess; +import org.polypheny.db.rex.RexFieldCollation; +import org.polypheny.db.rex.RexInputRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexPatternFieldRef; +import org.polypheny.db.rex.RexRangeRef; +import org.polypheny.db.rex.RexShuttle; +import org.polypheny.db.rex.RexSubQuery; +import org.polypheny.db.rex.RexUtil; +import org.polypheny.db.rex.RexWindowBound; +import org.polypheny.db.schema.ColumnStrategy; +import org.polypheny.db.schema.LogicalRelView; +import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.Wrapper; +import org.polypheny.db.sql.language.SqlAggFunction; +import org.polypheny.db.sql.language.SqlBasicCall; +import org.polypheny.db.sql.language.SqlCall; +import org.polypheny.db.sql.language.SqlCallBinding; +import org.polypheny.db.sql.language.SqlDelete; +import org.polypheny.db.sql.language.SqlDynamicParam; +import org.polypheny.db.sql.language.SqlFunction; +import org.polypheny.db.sql.language.SqlIdentifier; +import org.polypheny.db.sql.language.SqlInsert; +import org.polypheny.db.sql.language.SqlIntervalQualifier; +import org.polypheny.db.sql.language.SqlJoin; +import org.polypheny.db.sql.language.SqlLiteral; +import org.polypheny.db.sql.language.SqlMatchRecognize; +import org.polypheny.db.sql.language.SqlMerge; +import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlNodeList; +import org.polypheny.db.sql.language.SqlNumericLiteral; +import org.polypheny.db.sql.language.SqlOperator; +import org.polypheny.db.sql.language.SqlOrderBy; +import org.polypheny.db.sql.language.SqlSampleSpec; +import org.polypheny.db.sql.language.SqlSelect; +import org.polypheny.db.sql.language.SqlSelectKeyword; +import org.polypheny.db.sql.language.SqlSetOperator; +import org.polypheny.db.sql.language.SqlUnnestOperator; +import org.polypheny.db.sql.language.SqlUpdate; +import org.polypheny.db.sql.language.SqlUtil; +import org.polypheny.db.sql.language.SqlValuesOperator; +import org.polypheny.db.sql.language.SqlWindow; +import org.polypheny.db.sql.language.SqlWith; +import org.polypheny.db.sql.language.SqlWithItem; +import org.polypheny.db.sql.language.fun.SqlCase; +import org.polypheny.db.sql.language.fun.SqlCountAggFunction; +import org.polypheny.db.sql.language.fun.SqlInOperator; +import org.polypheny.db.sql.language.fun.SqlQuantifyOperator; +import org.polypheny.db.sql.language.fun.SqlRowOperator; +import org.polypheny.db.sql.language.fun.SqlStdOperatorTable; +import org.polypheny.db.sql.language.validate.AggregatingSelectScope; +import org.polypheny.db.sql.language.validate.CollectNamespace; +import org.polypheny.db.sql.language.validate.DelegatingScope; +import org.polypheny.db.sql.language.validate.ListScope; +import org.polypheny.db.sql.language.validate.MatchRecognizeScope; +import org.polypheny.db.sql.language.validate.ParameterScope; +import org.polypheny.db.sql.language.validate.SelectScope; +import org.polypheny.db.sql.language.validate.SqlQualified; +import org.polypheny.db.sql.language.validate.SqlUserDefinedTableFunction; +import org.polypheny.db.sql.language.validate.SqlUserDefinedTableMacro; +import org.polypheny.db.sql.language.validate.SqlValidator; +import org.polypheny.db.sql.language.validate.SqlValidatorImpl; +import org.polypheny.db.sql.language.validate.SqlValidatorNamespace; +import org.polypheny.db.sql.language.validate.SqlValidatorScope; +import org.polypheny.db.sql.language.validate.SqlValidatorUtil; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeUtil; import org.polypheny.db.type.inference.PolyReturnTypeInference; import org.polypheny.db.type.inference.TableFunctionReturnTypeInference; -import org.polypheny.db.util.*; +import org.polypheny.db.util.CoreUtil; +import org.polypheny.db.util.ImmutableBitSet; +import org.polypheny.db.util.ImmutableIntList; +import org.polypheny.db.util.InitializerContext; +import org.polypheny.db.util.InitializerExpressionFactory; +import org.polypheny.db.util.Litmus; +import org.polypheny.db.util.NameMatcher; +import org.polypheny.db.util.NlsString; +import org.polypheny.db.util.NullInitializerExpressionFactory; +import org.polypheny.db.util.NumberUtil; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Util; +import org.polypheny.db.util.ValidatorUtil; import org.polypheny.db.util.trace.PolyphenyDbTrace; import org.slf4j.Logger; -import javax.annotation.Nonnull; -import java.lang.reflect.Type; -import java.math.BigDecimal; -import java.util.*; -import java.util.function.Supplier; -import java.util.stream.Collectors; - /** * Converts a SQL parse tree (consisting of {@link SqlNode} objects) into an algebra expression (consisting of {@link AlgNode} objects). @@ -261,7 +410,7 @@ private void checkConvertedType( Node query, AlgNode result ) { Pair.right( validatedFields ), ValidatorUtil.uniquify( Pair.left( validatedFields ), - catalogReader.nameMatcher().isCaseSensitive() ) ); + catalogReader.nameMatcher.isCaseSensitive() ) ); /*int diff = validatedFields.size() - result.getRowType().getFieldList().size(); if ( diff > 0 ) { for ( int i = 0; i < diff; i++ ) { @@ -1717,7 +1866,7 @@ protected void convertFrom( Blackboard bb, SqlNode from ) { if ( isNatural ) { final AlgDataType leftRowType = leftNamespace.getRowType(); final AlgDataType rightRowType = rightNamespace.getRowType(); - final List columnList = SqlValidatorUtil.deriveNaturalJoinColumnList( catalogReader.nameMatcher(), leftRowType, rightRowType ); + final List columnList = SqlValidatorUtil.deriveNaturalJoinColumnList( catalogReader.nameMatcher, leftRowType, rightRowType ); conditionExp = convertUsing( leftNamespace, rightNamespace, columnList ); } else { conditionExp = @@ -2102,7 +2251,7 @@ private CorrelationUse getCorrelationUse( Blackboard bb, final AlgNode r0 ) { String originalRelName = lookup.getOriginalRelName(); String originalFieldName = fieldAccess.getField().getName(); - final NameMatcher nameMatcher = bb.getValidator().getCatalogReader().nameMatcher(); + final NameMatcher nameMatcher = bb.getValidator().getCatalogReader().nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); lookup.bb.scope.resolve( ImmutableList.of( originalRelName ), nameMatcher, false, resolved ); assert resolved.count() == 1; @@ -2193,7 +2342,7 @@ private boolean isSubQueryNonCorrelated( AlgNode subq, Blackboard bb ) { DeferredLookup lookup = mapCorrelToDeferred.get( correlName ); String originalRelName = lookup.getOriginalRelName(); - final NameMatcher nameMatcher = lookup.bb.scope.getValidator().getCatalogReader().nameMatcher(); + final NameMatcher nameMatcher = lookup.bb.scope.getValidator().getCatalogReader().nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); lookup.bb.scope.resolve( ImmutableList.of( originalRelName ), nameMatcher, false, resolved ); @@ -2261,7 +2410,7 @@ private RexNode convertJoinCondition( Blackboard bb, SqlValidatorNamespace leftN */ private @Nonnull RexNode convertUsing( SqlValidatorNamespace leftNamespace, SqlValidatorNamespace rightNamespace, List nameList ) { - final NameMatcher nameMatcher = catalogReader.nameMatcher(); + final NameMatcher nameMatcher = catalogReader.nameMatcher; final List list = new ArrayList<>(); for ( String name : nameList ) { List operands = new ArrayList<>(); @@ -2838,7 +2987,7 @@ protected AlgNode convertColumnList( final SqlInsert call, AlgNode source ) { // Walk the name list and place the associated value in the expression list according to the ordinal value returned from the table construct, leaving nulls in the list for columns // that are not referenced. - final NameMatcher nameMatcher = catalogReader.nameMatcher(); + final NameMatcher nameMatcher = catalogReader.nameMatcher; for ( Pair p : Pair.zip( targetColumnNames, columnExprs ) ) { AlgDataTypeField field = nameMatcher.field( targetRowType, p.left ); @@ -3356,7 +3505,7 @@ private void convertSelectList( Blackboard bb, SqlSelect select, List o fieldNames.add( deriveAlias( expr, aliases, i ) ); } - fieldNames = ValidatorUtil.uniquify( fieldNames, catalogReader.nameMatcher().isCaseSensitive() ); + fieldNames = ValidatorUtil.uniquify( fieldNames, catalogReader.nameMatcher.isCaseSensitive() ); algBuilder.push( bb.root ).projectNamed( exprs, fieldNames, true ); bb.setRoot( algBuilder.build(), false ); @@ -3706,7 +3855,7 @@ Pair> lookupExp( SqlQualified qualified ) { } return Pair.of( node, null ); } - final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher(); + final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve( qualified.prefix(), nameMatcher, false, resolved ); if ( !(resolved.count() == 1) ) { diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java index 7894e0a544..6548715920 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java @@ -346,17 +346,6 @@ protected MockColumnSet createColumnSet( ValidatorTable table, List name } - @Override - public AlgDataTypeFactory getTypeFactory() { - return typeFactory; - } - - - @Override - public void registerRules( AlgOptPlanner planner ) throws Exception { - } - - /** * Mock column set. */ @@ -456,7 +445,7 @@ public Expression getExpression( Class clazz ) { @Override public AlgOptTable extend( List extendedFields ) { - final AlgDataType extendedRowType = getRelOptSchema().getTypeFactory().builder() + final AlgDataType extendedRowType = AlgDataTypeFactory.DEFAULT.builder() .addAll( rowType.getFieldList() ) .addAll( extendedFields ) .build(); From e8a6ab147b72dcfe0e1308f6fa21dd2d65faf89a Mon Sep 17 00:00:00 2001 From: datomo Date: Thu, 16 Feb 2023 23:13:31 +0100 Subject: [PATCH 008/436] testing of new catalog layout --- .../db/algebra/operators/OperatorTable.java | 2 +- .../polypheny/db/catalog/CatalogPlugin.java | 4 ++- .../org/polypheny/db/catalog/PolyCatalog.java | 2 +- .../db/catalog/SerializableCatalog.java | 2 +- .../catalog/document/CatalogDocDatabase.java | 32 +++++++++++++++++++ .../db/catalog/document/DocumentCatalog.java | 29 ++++++++++++++--- .../db/catalog/graph/CatalogGraph.java | 28 ++++++++++++++++ .../db/catalog/graph/GraphCatalog.java | 24 +++++++++++--- .../db/catalog/relational/CatalogSchema.java | 25 ++++++++++++--- .../db/catalog/relational/CatalogTable.java | 24 ++++++++++++++ .../catalog/relational/RelationalCatalog.java | 4 ++- 11 files changed, 157 insertions(+), 19 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/algebra/operators/OperatorTable.java b/core/src/main/java/org/polypheny/db/algebra/operators/OperatorTable.java index 2e3eb3daeb..7fe2d2c143 100644 --- a/core/src/main/java/org/polypheny/db/algebra/operators/OperatorTable.java +++ b/core/src/main/java/org/polypheny/db/algebra/operators/OperatorTable.java @@ -44,7 +44,7 @@ public interface OperatorTable { * * @return list of SqlOperator objects */ - List getOperatorList(); + List getOperatorList(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java index 315902e4cb..681914b1b6 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java @@ -41,7 +41,9 @@ public void start() { long database = catalog.addDatabase( "APP", user ); catalog.addNamespace( "test", database, user, NamespaceType.RELATIONAL ); - catalog.addNamespace( "test2", database, user, NamespaceType.RELATIONAL ); + long namespaceId = catalog.addNamespace( "test2", database, user, NamespaceType.RELATIONAL ); + + catalog.addEntity( "testTable", namespaceId, NamespaceType.RELATIONAL, user ); byte[] buffer = catalog.serialize( PolyCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 0d2eef6095..cd89d8d4e5 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -209,7 +209,7 @@ private CatalogModelMapping addRelationalNamespace( long id, String name, long d } - public long addEntity( String name, long namespaceId, NamespaceType type, int ownerId ) { + public long addEntity( String name, long namespaceId, NamespaceType type, long ownerId ) { long id = idBuilder.getNewEntityId(); switch ( type ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java index da12a7d78f..d1ac380d58 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java @@ -29,7 +29,7 @@ public interface SerializableCatalog { BinarySerializer getSerializer(); default byte[] serialize( Class clazz ) { - byte[] buffer = new byte[200]; + byte[] buffer = new byte[1000]; getSerializer().encode( buffer, 0, this ); return buffer; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java index aee0a55a93..d28bddd8ab 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java @@ -16,6 +16,38 @@ package org.polypheny.db.catalog.document; +import com.google.common.collect.ImmutableMap; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.Map; +import org.polypheny.db.catalog.Catalog.NamespaceType; + public class CatalogDocDatabase { + @Serialize + public final long id; + @Serialize + public final String name; + @Serialize + public final long databaseId; + @Serialize + public final NamespaceType namespaceType; + + @Serialize + public final ImmutableMap collections; + + + public CatalogDocDatabase( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("databaseId") long databaseId, + @Deserialize("namespaceType") NamespaceType namespaceType, + @Deserialize("collections") Map collections ) { + this.id = id; + this.name = name; + this.databaseId = databaseId; + this.namespaceType = namespaceType; + this.collections = ImmutableMap.copyOf( collections ); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java index 1c9d8f7007..f2beeb2752 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java @@ -17,8 +17,10 @@ package org.polypheny.db.catalog.document; import io.activej.serializer.BinarySerializer; -import java.util.HashMap; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.ModelCatalog; @@ -26,11 +28,27 @@ public class DocumentCatalog implements ModelCatalog, SerializableCatalog { - Map databases = new HashMap<>(); - Map collections = new HashMap<>(); - @Getter - BinarySerializer serializer = SerializableCatalog.builder.get().build( DocumentCatalog.class ); + public final BinarySerializer serializer = SerializableCatalog.builder.get().build( DocumentCatalog.class ); + + @Serialize + public final Map databases; + @Serialize + public final Map collections; + + + public DocumentCatalog() { + this( new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); + } + + + public DocumentCatalog( + @Deserialize("databases") Map databases, + @Deserialize("collections") Map collections ) { + this.databases = databases; + this.collections = collections; + } + private boolean openChanges = false; @@ -55,6 +73,7 @@ public boolean hasUncommittedChanges() { public void addDatabase( long id, String name, long databaseId, NamespaceType namespaceType ) { + databases.put( id, new CatalogDocDatabase( id, name, databaseId, namespaceType, collections ) ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java index bfd3a88c56..2b692f63ea 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java @@ -16,6 +16,34 @@ package org.polypheny.db.catalog.graph; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import org.polypheny.db.catalog.Catalog.NamespaceType; + public class CatalogGraph { + @Serialize + public final long id; + + @Serialize + public final String name; + + @Serialize + public final long databaseId; + + @Serialize + public final NamespaceType namespaceType; + + + public CatalogGraph( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("databaseId") long databaseId, + @Deserialize("namespaceType") NamespaceType namespaceType ) { + this.id = id; + this.name = name; + this.databaseId = databaseId; + this.namespaceType = namespaceType; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java index 884250172c..05ab80b7cc 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java @@ -17,8 +17,10 @@ package org.polypheny.db.catalog.graph; import io.activej.serializer.BinarySerializer; -import java.util.ArrayList; -import java.util.List; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.ModelCatalog; @@ -26,14 +28,25 @@ public class GraphCatalog implements ModelCatalog, SerializableCatalog { - public List graphs = new ArrayList<>(); - @Getter - BinarySerializer serializer = SerializableCatalog.builder.get().build( GraphCatalog.class ); + public final BinarySerializer serializer = SerializableCatalog.builder.get().build( GraphCatalog.class ); + + @Serialize + public Map graphs; private boolean openChanges = false; + public GraphCatalog() { + this( new ConcurrentHashMap<>() ); + } + + + public GraphCatalog( @Deserialize("graphs") Map graphs ) { + this.graphs = graphs; + } + + @Override public void commit() { @@ -55,6 +68,7 @@ public boolean hasUncommittedChanges() { public void addGraph( long id, String name, long databaseId, NamespaceType namespaceType ) { + graphs.put( id, new CatalogGraph( id, name, databaseId, namespaceType ) ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java index 819128d941..931b1a60aa 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java @@ -16,28 +16,45 @@ package org.polypheny.db.catalog.relational; +import com.google.common.collect.ImmutableMap; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; +import java.util.HashMap; +import java.util.Map; +import lombok.Value; +@Value public class CatalogSchema { @Serialize - public final long id; + public long id; @Serialize - public final String name; + public String name; @Serialize - public final long databaseId; + public long databaseId; + + @Serialize + public ImmutableMap tables; public CatalogSchema( @Deserialize("id") long id, @Deserialize("name") String name, - @Deserialize("databaseId") long databaseId ) { + @Deserialize("databaseId") long databaseId, + @Deserialize("tables") Map tables ) { this.id = id; this.name = name; this.databaseId = databaseId; + this.tables = ImmutableMap.copyOf( tables ); + } + + + public CatalogSchema addTable( CatalogTable catalogTable ) { + Map newTables = new HashMap<>( tables ); + newTables.put( catalogTable.id, catalogTable ); + return new CatalogSchema( id, name, databaseId, newTables ); } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java index a18670dad5..2429b1b3df 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java @@ -16,6 +16,30 @@ package org.polypheny.db.catalog.relational; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import lombok.Value; + +@Value public class CatalogTable { + @Serialize + public long id; + + @Serialize + public String name; + + @Serialize + public long namespaceId; + + + public CatalogTable( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("namespaceId") long namespaceId ) { + this.id = id; + this.name = name; + this.namespaceId = namespaceId; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java index fe129e36a1..23f67e71b5 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java @@ -80,15 +80,17 @@ public boolean hasUncommittedChanges() { public void addSchema( long id, String name, long databaseId, NamespaceType namespaceType ) { - schemas.put( id, new CatalogSchema( id, name, databaseId ) ); + schemas.put( id, new CatalogSchema( id, name, databaseId, tables ) ); } public void addTable( long id, String name, long namespaceId ) { + schemas.put( id, schemas.get( namespaceId ).addTable( new CatalogTable( id, name, namespaceId ) ) ); } public void addColumn( long id, String name, long entityId, AlgDataType type ) { + } From c3d132120ce2023d1a803cffebd5e86af3d431d1 Mon Sep 17 00:00:00 2001 From: datomo Date: Fri, 17 Feb 2023 09:32:21 +0100 Subject: [PATCH 009/436] removed and move unused classes --- .../db/webui/UiTestingConfigPage.java | 7 - .../polypheny/db/config/ConfigServerTest.java | 5 +- .../db/algebra/core/AlgFactories.java | 12 - .../polypheny/db/algebra/core/Uncollect.java | 4 +- .../algebra/core/document/DocumentScan.java | 4 +- .../algebra/core/document/DocumentValues.java | 6 +- .../db/algebra/metadata/BuiltInMetadata.java | 7 - .../db/algebra/type/AlgCrossType.java | 12 + .../db/algebra/type/AlgDataType.java | 29 +- .../algebra/type/AlgDataTypeFactoryImpl.java | 12 + .../db/algebra/type/AlgDataTypeImpl.java | 16 +- ...AlgDocumentType.java => DocumentType.java} | 21 +- .../db/algebra/type/DynamicRecordType.java | 1 + .../db/catalog/SchemaTypeVisitor.java | 76 --- .../db/catalog/entity/CatalogAdapter.java | 9 - .../catalog/entity/CatalogQueryInterface.java | 8 - .../db/catalog/entity/CatalogUser.java | 9 - .../db/ddl/exception/NoColumnsException.java | 21 - .../polypheny/db/interpreter/Interpreter.java | 127 ----- .../polypheny/db/plan/AlgOptConnection.java | 50 -- .../db/processing/JsonRelProcessor.java | 26 - .../java/org/polypheny/db/rex/RexBuilder.java | 2 +- .../java/org/polypheny/db/schema/Member.java | 92 ---- .../polypheny/db/tools/RoutedAlgBuilder.java | 13 +- .../polypheny/db/type/AbstractPolyType.java | 12 + .../db/type/PolyTypeFactoryImpl.java | 14 +- .../db/type/checker/OperandsTypeChecking.java | 26 - .../db/util/BarfingInvocationHandler.java | 1 - .../db/util/DelegatingInvocationHandler.java | 98 ---- .../db/util/background/BackgroundTask.java | 22 - .../polypheny/db/util/mapping/Mapping.java | 1 - .../polypheny/db/util/mapping/Mappings.java | 8 - .../db/catalog/MockCatalogReader.java | 157 ------ .../polypheny/db/schema/BookstoreSchema.java | 176 ------- .../db/routing/routers/IcarusRouter.java | 11 - .../java/org/polypheny/db/test/CsvTest.java | 76 +-- .../db/cypher/CypherProcessorImpl.java | 42 -- .../db/adapter/elasticsearch}/Closer.java | 21 +- .../EmbeddedElasticsearchPolicy.java | 1 - .../polypheny/db/http/model/QueryRequest.java | 41 -- .../polypheny/db/http/model/UIRequest.java | 79 --- .../polypheny/db/adapter/jdbc/JdbcUtils.java | 66 +-- .../db/sql/language/SqlFactoryProvider.java | 28 -- .../db/sql/language/SqlUnnestOperator.java | 4 +- .../db/sql/language/fun/SqlItemOperator.java | 5 +- .../java/org/polypheny/db/sql/Smalls.java | 456 ------------------ .../org/polypheny/db/webui/models/Debug.java | 50 -- 47 files changed, 126 insertions(+), 1838 deletions(-) rename core/src/main/java/org/polypheny/db/algebra/type/{AlgDocumentType.java => DocumentType.java} (88%) delete mode 100644 core/src/main/java/org/polypheny/db/catalog/SchemaTypeVisitor.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/NoColumnsException.java delete mode 100644 core/src/main/java/org/polypheny/db/plan/AlgOptConnection.java delete mode 100644 core/src/main/java/org/polypheny/db/processing/JsonRelProcessor.java delete mode 100644 core/src/main/java/org/polypheny/db/schema/Member.java delete mode 100644 core/src/main/java/org/polypheny/db/type/checker/OperandsTypeChecking.java delete mode 100644 core/src/main/java/org/polypheny/db/util/DelegatingInvocationHandler.java delete mode 100644 core/src/test/java/org/polypheny/db/schema/BookstoreSchema.java rename {core/src/main/java/org/polypheny/db/util => plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch}/Closer.java (61%) delete mode 100644 plugins/http-interface/src/main/java/org/polypheny/db/http/model/QueryRequest.java delete mode 100644 plugins/http-interface/src/main/java/org/polypheny/db/http/model/UIRequest.java delete mode 100644 plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlFactoryProvider.java delete mode 100644 webui/src/main/java/org/polypheny/db/webui/models/Debug.java diff --git a/config/src/main/java/org/polypheny/db/webui/UiTestingConfigPage.java b/config/src/main/java/org/polypheny/db/webui/UiTestingConfigPage.java index 0da3a32c33..597fe6d298 100644 --- a/config/src/main/java/org/polypheny/db/webui/UiTestingConfigPage.java +++ b/config/src/main/java/org/polypheny/db/webui/UiTestingConfigPage.java @@ -62,13 +62,6 @@ private static class BarImplementation extends TestClass { } - private static class FooBarImplementation extends TestClass { - - int d; - - } - - static { ConfigManager cm = ConfigManager.getInstance(); diff --git a/config/src/test/java/org/polypheny/db/config/ConfigServerTest.java b/config/src/test/java/org/polypheny/db/config/ConfigServerTest.java index 40782c99af..1b75743172 100644 --- a/config/src/test/java/org/polypheny/db/config/ConfigServerTest.java +++ b/config/src/test/java/org/polypheny/db/config/ConfigServerTest.java @@ -152,10 +152,9 @@ private static class FooImplementation extends TestClass { int b; } private static class BarImplementation extends TestClass { + int c; - } - private static class FooBarImplementation extends TestClass { - int d; + } private enum TestEnum { diff --git a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java index f09ee96bbb..06f73d190e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java @@ -79,7 +79,6 @@ import org.polypheny.db.schema.TranslatableTable; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.AlgBuilderFactory; -import org.polypheny.db.type.PolyType; import org.polypheny.db.util.ImmutableBitSet; @@ -322,17 +321,6 @@ public AlgNode createAggregate( } - public interface TransformerFactory { - - AlgNode createTransformer( - AlgNode input, - AlgDataType rowType, - List unsupportedTypes, - PolyType substituteType ); - - } - - /** * Can create a {@link LogicalFilter} of the appropriate type for this rule's calling convention. */ diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Uncollect.java b/core/src/main/java/org/polypheny/db/algebra/core/Uncollect.java index b11052ff76..4e9c8d5661 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Uncollect.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Uncollect.java @@ -151,8 +151,8 @@ public static AlgDataType deriveUncollectRowType( AlgNode alg, boolean withOrdin for ( AlgDataTypeField field : fields ) { if ( field.getType() instanceof MapPolyType ) { - builder.add( UnnestOperator.MAP_KEY_COLUMN_NAME, null, field.getType().getKeyType() ); - builder.add( UnnestOperator.MAP_VALUE_COLUMN_NAME, null, field.getType().getValueType() ); + builder.add( UnnestOperator.MAP_KEY_COLUMN_NAME, null, field.getType().unwrap( MapPolyType.class ).getKeyType() ); + builder.add( UnnestOperator.MAP_VALUE_COLUMN_NAME, null, field.getType().unwrap( MapPolyType.class ).getValueType() ); } else { AlgDataType ret = field.getType().getComponentType(); assert null != ret; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java index fc3f58b738..c4be1cf954 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java @@ -23,8 +23,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; -import org.polypheny.db.algebra.type.AlgDocumentType; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptTable; @@ -49,7 +49,7 @@ public DocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable co AlgDataType docType = cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ); // todo dl: change after RowType refactor if ( this.collection.getTable().getSchemaType() == NamespaceType.DOCUMENT ) { - this.rowType = new AlgDocumentType(); + this.rowType = new DocumentType(); } else { List list = collection.getRowType().getFieldList().stream() .map( f -> new AlgDataTypeFieldImpl( f.getName(), f.getIndex(), cluster.getTypeFactory().createPolyType( PolyType.ANY ) ) ) diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java index bdd7381163..816bb18f21 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java @@ -29,7 +29,7 @@ import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalValues; -import org.polypheny.db.algebra.type.AlgDocumentType; +import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; @@ -51,7 +51,7 @@ public abstract class DocumentValues extends AbstractAlgNode implements Document */ public DocumentValues( AlgOptCluster cluster, AlgTraitSet traitSet, ImmutableList documentTuples ) { super( cluster, traitSet ); - this.rowType = new AlgDocumentType(); + this.rowType = new DocumentType(); this.documentTuples = validate( documentTuples ); } @@ -126,7 +126,7 @@ public LogicalValues getRelationalEquivalent() { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder() ); - return new LogicalValues( cluster, out, ((AlgDocumentType) rowType).asRelational(), relationalize( documentTuples, cluster.getRexBuilder() ) ); + return new LogicalValues( cluster, out, ((DocumentType) rowType).asRelational(), relationalize( documentTuples, cluster.getRexBuilder() ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/BuiltInMetadata.java b/core/src/main/java/org/polypheny/db/algebra/metadata/BuiltInMetadata.java index bdbf8b6731..f60f4ae381 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/BuiltInMetadata.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/BuiltInMetadata.java @@ -755,12 +755,5 @@ interface Handler extends MetadataHandler { } - /** - * The built-in forms of metadata. - */ - interface All extends Selectivity, UniqueKeys, RowCount, DistinctRowCount, PercentageOriginalRows, ColumnUniqueness, ColumnOrigin, Predicates, Collation, Distribution, Size, Parallelism, Memory, AllPredicates, ExpressionLineage, TableReferences, NodeTypes { - - } - } diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgCrossType.java b/core/src/main/java/org/polypheny/db/algebra/type/AlgCrossType.java index 59a440f349..71fcb183c6 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/AlgCrossType.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/AlgCrossType.java @@ -92,5 +92,17 @@ protected void generateTypeString( StringBuilder sb, boolean withDetail ) { sb.append( ")" ); } + + public AlgDataType getKeyType() { + // this is not a map type + return null; + } + + + public AlgDataType getValueType() { + // this is not a map type + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataType.java b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataType.java index 64f1ce8936..72004d3d89 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataType.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataType.java @@ -42,7 +42,7 @@ /** - * RelDataType represents the type of a scalar expression or entire row returned from a relational expression. + * RelDataType represents the type of scalar expression or entire row returned from a relational expression. *

* This is a somewhat "fat" interface which unions the attributes of many different type classes into one. Inelegant, * but since our type system was defined before the advent of Java generics, it avoids a lot of typecasting. @@ -73,6 +73,11 @@ public interface AlgDataType { */ List getFieldNames(); + + List getIds(); + + Long getId(); + /** * Returns the number of fields in a struct type. * @@ -118,20 +123,6 @@ public interface AlgDataType { */ AlgDataType getComponentType(); - /** - * Gets the key type if this type is a map, otherwise null. - * - * @return canonical type descriptor for key - */ - AlgDataType getKeyType(); - - /** - * Gets the value type if this type is a map, otherwise null. - * - * @return canonical type descriptor for value - */ - AlgDataType getValueType(); - /** * Gets this type's character set, or null if this type cannot carry a character set or has no character set defined. * @@ -227,5 +218,13 @@ public interface AlgDataType { */ boolean isDynamicStruct(); + + default T unwrap( Class clazz ) { + if ( this.getClass().isInstance( clazz ) ) { + return clazz.cast( this ); + } + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeFactoryImpl.java b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeFactoryImpl.java index b92eb4cff7..da1fe1c876 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeFactoryImpl.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeFactoryImpl.java @@ -677,6 +677,18 @@ public PolyType getPolyType() { return Objects.requireNonNullElse( typeName, PolyType.OTHER ); } + + public AlgDataType getKeyType() { + // this is not a map type + return null; + } + + + public AlgDataType getValueType() { + // this is not a map type + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeImpl.java b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeImpl.java index 0a183c1d8b..6593a9bc04 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeImpl.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeImpl.java @@ -60,7 +60,7 @@ public abstract class AlgDataTypeImpl implements AlgDataType, AlgDataTypeFamily /** - * Creates a RelDataTypeImpl. + * Creates a AlgDataTypeImpl. * * @param fieldList List of fields */ @@ -190,20 +190,6 @@ public AlgDataType getComponentType() { } - @Override - public AlgDataType getKeyType() { - // this is not a map type - return null; - } - - - @Override - public AlgDataType getValueType() { - // this is not a map type - return null; - } - - @Override public boolean isStruct() { return fieldList != null; diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgDocumentType.java b/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java similarity index 88% rename from core/src/main/java/org/polypheny/db/algebra/type/AlgDocumentType.java rename to core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java index 7504869de8..bf64671e36 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/AlgDocumentType.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java @@ -28,7 +28,7 @@ import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Collation; -public class AlgDocumentType implements Serializable, AlgDataType, AlgDataTypeFamily, AlgDataTypeField { +public class DocumentType implements Serializable, AlgDataType, AlgDataTypeFamily, AlgDataTypeField { @Getter private final StructKind structKind; @@ -42,16 +42,15 @@ public class AlgDocumentType implements Serializable, AlgDataType, AlgDataTypeFa private String physicalName = null; - public AlgDocumentType( @Nullable String name, @Nonnull List fixedFields ) { + public DocumentType( @Nullable String name, @Nonnull List fixedFields ) { this.name = name; this.structKind = fixedFields.isEmpty() ? StructKind.NONE : StructKind.SEMI; - assert fixedFields != null; this.fixedFields = ImmutableList.copyOf( fixedFields ); } - public AlgDocumentType() { - this( null, List.of( new AlgDocumentType( "_id_", List.of() ) ) ); + public DocumentType() { + this( null, List.of( new DocumentType( "_id_", List.of() ) ) ); } @@ -106,18 +105,6 @@ public AlgDataType getComponentType() { } - @Override - public AlgDataType getKeyType() { - return null; - } - - - @Override - public AlgDataType getValueType() { - return null; - } - - @Override public Charset getCharset() { return null; diff --git a/core/src/main/java/org/polypheny/db/algebra/type/DynamicRecordType.java b/core/src/main/java/org/polypheny/db/algebra/type/DynamicRecordType.java index 9202a1fc05..592570d605 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/DynamicRecordType.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/DynamicRecordType.java @@ -56,5 +56,6 @@ public static boolean isDynamicStarColName( String name ) { return name.startsWith( DYNAMIC_STAR_PREFIX ); } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/SchemaTypeVisitor.java b/core/src/main/java/org/polypheny/db/catalog/SchemaTypeVisitor.java deleted file mode 100644 index 97b9916cde..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/SchemaTypeVisitor.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog; - -import java.util.ArrayList; -import java.util.List; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.AlgShuttleImpl; -import org.polypheny.db.algebra.core.Scan; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.schema.LogicalTable; -import org.polypheny.db.schema.Table; - -public class SchemaTypeVisitor extends AlgShuttleImpl { - - private final List namespaceTypes = new ArrayList<>(); - - - public NamespaceType getSchemaTypes() { - if ( namespaceTypes.stream().allMatch( s -> s == NamespaceType.RELATIONAL ) ) { - return NamespaceType.RELATIONAL; - } else if ( namespaceTypes.stream().allMatch( s -> s == NamespaceType.DOCUMENT ) ) { - return NamespaceType.DOCUMENT; - } else { - //mixed - return null; - } - } - - - @Override - public AlgNode visit( Scan scan ) { - try { - List names = scan.getTable().getQualifiedName(); - CatalogSchema schema; - if ( names.size() == 3 ) { - schema = Catalog.getInstance().getSchema( names.get( 0 ), names.get( 1 ) ); - } else if ( names.size() == 2 ) { - if ( names.get( 0 ).contains( "_" ) ) { - schema = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, names.get( 0 ).split( "_" )[names.size() - 1] ); - } else { - schema = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, names.get( 0 ) ); - } - } else { - Table logicalTable = scan.getTable().getTable(); - if ( logicalTable instanceof LogicalTable ) { - schema = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, ((LogicalTable) logicalTable).getLogicalSchemaName() ); - } else { - throw new RuntimeException( "The used table did not use a full name." ); - } - } - namespaceTypes.add( schema.getNamespaceType() ); - } catch ( UnknownSchemaException | UnknownDatabaseException e ) { - throw new RuntimeException( "The was an error on retrieval of the data model." ); - } - return super.visit( scan ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java index 8cbd7897f7..5e0967ffdd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java @@ -23,7 +23,6 @@ import java.util.Map; import lombok.EqualsAndHashCode; import lombok.NonNull; -import lombok.RequiredArgsConstructor; import org.polypheny.db.adapter.Adapter.AdapterProperties; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog.NamespaceType; @@ -88,12 +87,4 @@ public Serializable[] getParameterArray() { return new Serializable[]{ uniqueName }; } - - @RequiredArgsConstructor - public static class PrimitiveCatalogAdapter { - - public final String name; - - } - } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java index 19255b8bd7..7a4bfc62e9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java @@ -22,7 +22,6 @@ import java.util.Map; import lombok.EqualsAndHashCode; import lombok.NonNull; -import lombok.RequiredArgsConstructor; @EqualsAndHashCode @@ -51,11 +50,4 @@ public Serializable[] getParameterArray() { } - @RequiredArgsConstructor - public static class PrimitiveCatalogQueryInterface { - - public final String name; - - } - } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogUser.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogUser.java index db9a680bbc..311b8a633b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogUser.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogUser.java @@ -19,7 +19,6 @@ import java.io.Serializable; import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; @EqualsAndHashCode @@ -54,12 +53,4 @@ public int compareTo( CatalogUser o ) { return -1; } - - @RequiredArgsConstructor - public static class PrimitiveCatalogUser { - - public final String name; - - } - } diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/NoColumnsException.java b/core/src/main/java/org/polypheny/db/ddl/exception/NoColumnsException.java deleted file mode 100644 index 022bdddb22..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/NoColumnsException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class NoColumnsException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/interpreter/Interpreter.java b/core/src/main/java/org/polypheny/db/interpreter/Interpreter.java index 5ed8aab70c..434db25dde 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Interpreter.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Interpreter.java @@ -40,7 +40,6 @@ import com.google.common.collect.LinkedHashMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; -import java.math.BigDecimal; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; @@ -48,7 +47,6 @@ import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; @@ -62,7 +60,6 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgVisitor; -import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.rules.CalcSplitRule; import org.polypheny.db.algebra.rules.FilterScanRule; import org.polypheny.db.algebra.rules.ProjectScanRule; @@ -73,9 +70,6 @@ import org.polypheny.db.plan.hep.HepPlanner; import org.polypheny.db.plan.hep.HepProgram; import org.polypheny.db.plan.hep.HepProgramBuilder; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexInputRef; -import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; import org.polypheny.db.util.Pair; import org.polypheny.db.util.ReflectUtil; @@ -164,127 +158,6 @@ private void start() { public void close() { } - - /** - * Not used. - */ - private class FooCompiler implements ScalarCompiler { - - @Override - public Scalar compile( List nodes, AlgDataType inputRowType, DataContext dataContext ) { - final RexNode node = nodes.get( 0 ); - if ( node instanceof RexCall ) { - final RexCall call = (RexCall) node; - final Scalar argScalar = compile( call.getOperands(), inputRowType, dataContext ); - return new Scalar() { - final Object[] args = new Object[call.getOperands().size()]; - - - @Override - public void execute( final Context context, Object[] results ) { - results[0] = execute( context ); - } - - - @Override - public Object execute( Context context ) { - Comparable o0; - Comparable o1; - switch ( call.getKind() ) { - case LESS_THAN: - case LESS_THAN_OR_EQUAL: - case GREATER_THAN: - case GREATER_THAN_OR_EQUAL: - case EQUALS: - case NOT_EQUALS: - argScalar.execute( context, args ); - o0 = (Comparable) args[0]; - if ( o0 == null ) { - return null; - } - o1 = (Comparable) args[1]; - if ( o1 == null ) { - return null; - } - if ( o0 instanceof BigDecimal ) { - if ( o1 instanceof Double || o1 instanceof Float ) { - o1 = new BigDecimal( ((Number) o1).doubleValue() ); - } else { - o1 = new BigDecimal( ((Number) o1).longValue() ); - } - } - if ( o1 instanceof BigDecimal ) { - if ( o0 instanceof Double || o0 instanceof Float ) { - o0 = new BigDecimal( ((Number) o0).doubleValue() ); - } else { - o0 = new BigDecimal( ((Number) o0).longValue() ); - } - } - final int c = o0.compareTo( o1 ); - switch ( call.getKind() ) { - case LESS_THAN: - return c < 0; - case LESS_THAN_OR_EQUAL: - return c <= 0; - case GREATER_THAN: - return c > 0; - case GREATER_THAN_OR_EQUAL: - return c >= 0; - case EQUALS: - return c == 0; - case NOT_EQUALS: - return c != 0; - default: - throw new AssertionError( "unknown expression " + call ); - } - default: - if ( call.getOperator().getOperatorName() == OperatorName.UPPER ) { - argScalar.execute( context, args ); - String s0 = (String) args[0]; - if ( s0 == null ) { - return null; - } - return s0.toUpperCase( Locale.ROOT ); - } - if ( call.getOperator().getOperatorName() == OperatorName.SUBSTRING ) { - argScalar.execute( context, args ); - String s0 = (String) args[0]; - Number i1 = (Number) args[1]; - Number i2 = (Number) args[2]; - if ( s0 == null || i1 == null || i2 == null ) { - return null; - } - return s0.substring( i1.intValue() - 1, i1.intValue() - 1 + i2.intValue() ); - } - throw new AssertionError( "unknown expression " + call ); - } - } - }; - } - return new Scalar() { - @Override - public void execute( Context context, Object[] results ) { - results[0] = execute( context ); - } - - - @Override - public Object execute( Context context ) { - switch ( node.getKind() ) { - case LITERAL: - return ((RexLiteral) node).getValueAs( Comparable.class ); - case INPUT_REF: - return context.values[((RexInputRef) node).getIndex()]; - default: - throw new RuntimeException( "unknown expression type " + node ); - } - } - }; - } - - } - - /** * Information about a node registered in the data flow graph. */ diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptConnection.java b/core/src/main/java/org/polypheny/db/plan/AlgOptConnection.java deleted file mode 100644 index 9588bba9c6..0000000000 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptConnection.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.plan; - - -/** - * The planner's view of a connection to a database. - * - * A connection contains a {@link AlgOptSchema}, via which the query planner can access {@link AlgOptTable} objects.

- */ -public interface AlgOptConnection { - - /** - * Returns the schema underlying this connection. - */ - AlgOptSchema getAlgOptSchema(); - -} - diff --git a/core/src/main/java/org/polypheny/db/processing/JsonRelProcessor.java b/core/src/main/java/org/polypheny/db/processing/JsonRelProcessor.java deleted file mode 100644 index 1f338bcae5..0000000000 --- a/core/src/main/java/org/polypheny/db/processing/JsonRelProcessor.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.processing; - -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.transaction.Statement; - -public interface JsonRelProcessor { - - AlgNode parseJsonRel( Statement statement, String json ); - -} diff --git a/core/src/main/java/org/polypheny/db/rex/RexBuilder.java b/core/src/main/java/org/polypheny/db/rex/RexBuilder.java index 37fca611c4..ee3562f957 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexBuilder.java +++ b/core/src/main/java/org/polypheny/db/rex/RexBuilder.java @@ -1332,7 +1332,7 @@ private RexLiteral makeMap( Map value, AlgDataType type, boolean final Map map = value .entrySet() .stream() - .collect( Collectors.toMap( e -> makeLiteral( e.getKey(), mapType.getKeyType(), allowCast ), e -> makeLiteral( e.getValue(), mapType.getValueType(), allowCast ) ) ); + .collect( Collectors.toMap( e -> makeLiteral( e.getKey(), mapType.unwrap( MapPolyType.class ).getKeyType(), allowCast ), e -> makeLiteral( e.getValue(), mapType.unwrap( MapPolyType.class ).getValueType(), allowCast ) ) ); return makeMap( type, map ); } diff --git a/core/src/main/java/org/polypheny/db/schema/Member.java b/core/src/main/java/org/polypheny/db/schema/Member.java deleted file mode 100644 index 48eeccfccb..0000000000 --- a/core/src/main/java/org/polypheny/db/schema/Member.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.schema; - - -import java.util.List; -import org.apache.calcite.linq4j.Queryable; -import org.polypheny.db.algebra.type.AlgDataType; - - -/** - * A named expression in a schema. - * - *

Examples of members

- * - * Several kinds of members crop up in real life. They all implement the {@code Member} interface, but tend to be treated - * differently by the back-end system if not by Polypheny-DB. - * - * A member that has zero arguments and a type that is a collection of records is referred to as a relation. - * In schemas backed by a relational database, tables and views will appear as relations. - * - * A member that has one or more arguments and a type that is a collection of records is referred to as a - * parameterized relation. Some relational databases support these; for example, Oracle calls them "table functions". - * - * Members may be also more typical of programming-language functions: they take zero or more arguments, and return a result - * of arbitrary type. - * - * From the above definitions, you can see that a member is a special kind of function. This makes sense, because even though - * it has no arguments, it is "evaluated" each time it is used in a query. - */ -public interface Member { - - /** - * The name of this function. - */ - String getName(); - - /** - * Returns the parameters of this member. - * - * @return Parameters; never null - */ - List getParameters(); - - /** - * Returns the type of this function's result. - * - * @return Type of result; never null - */ - AlgDataType getType(); - - /** - * Evaluates this member to yield a result. The result is a {@link org.apache.calcite.linq4j.Queryable}. - * - * @param schemaInstance Object that is an instance of the containing {@link Schema} - * @param arguments List of arguments to the call; must match {@link #getParameters() parameters} in number and type - * @return An instance of this schema object, as a Queryable - */ - Queryable evaluate( Object schemaInstance, List arguments ); - -} diff --git a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java index f93b1c8f01..cde4d2dbb5 100644 --- a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import lombok.AllArgsConstructor; import lombok.Getter; import org.bson.BsonValue; import org.polypheny.db.algebra.AlgNode; @@ -110,14 +109,4 @@ private List> map( List catalogCols } - @AllArgsConstructor - @Getter - public static class SelectedAdapterInfo { - - public final String uniqueName; - public final String physicalSchemaName; - public final String physicalTableName; - - } - -} + } diff --git a/core/src/main/java/org/polypheny/db/type/AbstractPolyType.java b/core/src/main/java/org/polypheny/db/type/AbstractPolyType.java index 2a8180f70e..9162042401 100644 --- a/core/src/main/java/org/polypheny/db/type/AbstractPolyType.java +++ b/core/src/main/java/org/polypheny/db/type/AbstractPolyType.java @@ -97,5 +97,17 @@ public AlgDataTypePrecedenceList getPrecedenceList() { return super.getPrecedenceList(); } + + public AlgDataType getKeyType() { + // this is not a map type + return null; + } + + + public AlgDataType getValueType() { + // this is not a map type + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/type/PolyTypeFactoryImpl.java b/core/src/main/java/org/polypheny/db/type/PolyTypeFactoryImpl.java index fa28b716a6..6bab1308d5 100644 --- a/core/src/main/java/org/polypheny/db/type/PolyTypeFactoryImpl.java +++ b/core/src/main/java/org/polypheny/db/type/PolyTypeFactoryImpl.java @@ -34,14 +34,18 @@ package org.polypheny.db.type; -import org.polypheny.db.algebra.type.*; +import java.nio.charset.Charset; +import java.util.List; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeFactoryImpl; +import org.polypheny.db.algebra.type.AlgDataTypeFamily; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.nodes.IntervalQualifier; import org.polypheny.db.util.Collation; import org.polypheny.db.util.Util; -import java.nio.charset.Charset; -import java.util.List; - /** * SqlTypeFactoryImpl provides a default implementation of {@link AlgDataTypeFactory} which supports SQL types. @@ -503,7 +507,7 @@ private AlgDataType copyArrayType( AlgDataType type, boolean nullable ) { private AlgDataType copyMapType( AlgDataType type, boolean nullable ) { - MapPolyType mt = (MapPolyType) type; + MapPolyType mt = type.unwrap( MapPolyType.class ); AlgDataType keyType = copyType( mt.getKeyType() ); AlgDataType valueType = copyType( mt.getValueType() ); return new MapPolyType( keyType, valueType, nullable ); diff --git a/core/src/main/java/org/polypheny/db/type/checker/OperandsTypeChecking.java b/core/src/main/java/org/polypheny/db/type/checker/OperandsTypeChecking.java deleted file mode 100644 index 9fed58191b..0000000000 --- a/core/src/main/java/org/polypheny/db/type/checker/OperandsTypeChecking.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.type.checker; - - -/** - * Strategies to check for allowed operand types of an operator call. - */ -public abstract class OperandsTypeChecking { - -} - diff --git a/core/src/main/java/org/polypheny/db/util/BarfingInvocationHandler.java b/core/src/main/java/org/polypheny/db/util/BarfingInvocationHandler.java index ea49fb6f1d..f36c171826 100644 --- a/core/src/main/java/org/polypheny/db/util/BarfingInvocationHandler.java +++ b/core/src/main/java/org/polypheny/db/util/BarfingInvocationHandler.java @@ -44,7 +44,6 @@ * * It is useful when you are prototyping code. You can rapidly create a prototype class which implements the important methods in an interface, then implement other methods as they are called. * - * @see DelegatingInvocationHandler */ public class BarfingInvocationHandler implements InvocationHandler { diff --git a/core/src/main/java/org/polypheny/db/util/DelegatingInvocationHandler.java b/core/src/main/java/org/polypheny/db/util/DelegatingInvocationHandler.java deleted file mode 100644 index dfa902c7eb..0000000000 --- a/core/src/main/java/org/polypheny/db/util/DelegatingInvocationHandler.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.util; - - -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; - - -/** - * A class derived from DelegatingInvocationHandler handles a method call by looking for a method in itself with identical parameters. If - * no such method is found, it forwards the call to a fallback object, which must implement all of the interfaces which this proxy implements. - * - * It is useful in creating a wrapper class around an interface which may change over time. - * - * Example: - * - *
- *
import java.sql.Connection;
- * Connection connection = ...;
- * Connection tracingConnection = (Connection) Proxy.newProxyInstance(
- *     null,
- *     new Class[] {Connection.class},
- *     new DelegatingInvocationHandler() {
- *         protected Object getTarget() {
- *             return connection;
- *         }
- *         Statement createStatement() {
- *             System.out.println("statement created");
- *             return connection.createStatement();
- *         }
- *     });
- *
- */ -public abstract class DelegatingInvocationHandler implements InvocationHandler { - - @Override - public Object invoke( Object proxy, Method method, Object[] args ) throws Throwable { - Class clazz = getClass(); - Method matchingMethod; - try { - matchingMethod = clazz.getMethod( method.getName(), method.getParameterTypes() ); - } catch ( NoSuchMethodException | SecurityException e ) { - matchingMethod = null; - } - try { - if ( matchingMethod != null ) { - // Invoke the method in the derived class. - return matchingMethod.invoke( this, args ); - } else { - // Invoke the method on the proxy. - return method.invoke( getTarget(), args ); - } - } catch ( InvocationTargetException e ) { - throw e.getTargetException(); - } - } - - - /** - * Returns the object to forward method calls to, should the derived class not implement the method. Generally, this object will be a member of the - * derived class, supplied as a parameter to its constructor. - */ - protected abstract Object getTarget(); -} - diff --git a/core/src/main/java/org/polypheny/db/util/background/BackgroundTask.java b/core/src/main/java/org/polypheny/db/util/background/BackgroundTask.java index c04ac999e8..e5a4c48d12 100644 --- a/core/src/main/java/org/polypheny/db/util/background/BackgroundTask.java +++ b/core/src/main/java/org/polypheny/db/util/background/BackgroundTask.java @@ -1,10 +1,6 @@ package org.polypheny.db.util.background; -import com.google.gson.TypeAdapter; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; -import java.io.IOException; import lombok.Getter; @@ -48,24 +44,6 @@ enum TaskSchedulingType { this.delayType = delayType; } - - public static class TaskSchedulingTypeAdapter extends TypeAdapter { - - @Override - public void write( JsonWriter out, TaskSchedulingType value ) throws IOException { - out.beginObject(); - out.value( value.name() ); - out.endObject(); - } - - - @Override - public TaskSchedulingType read( JsonReader in ) throws IOException { - return TaskSchedulingType.valueOf( in.nextString() ); - } - - } - } diff --git a/core/src/main/java/org/polypheny/db/util/mapping/Mapping.java b/core/src/main/java/org/polypheny/db/util/mapping/Mapping.java index a403e516e7..14ec72d1c2 100644 --- a/core/src/main/java/org/polypheny/db/util/mapping/Mapping.java +++ b/core/src/main/java/org/polypheny/db/util/mapping/Mapping.java @@ -44,7 +44,6 @@ * For instance: * *
    - *
  • If a target has more than one source, then the method {@link #getSource(int)} will throw {@link Mappings.TooManyElementsException}.
  • *
  • If a source has no targets, then the method {@link #getTarget} will throw {@link Mappings.NoElementException}.
  • *
*/ diff --git a/core/src/main/java/org/polypheny/db/util/mapping/Mappings.java b/core/src/main/java/org/polypheny/db/util/mapping/Mappings.java index 255743c2dc..e23decdfb1 100644 --- a/core/src/main/java/org/polypheny/db/util/mapping/Mappings.java +++ b/core/src/main/java/org/polypheny/db/util/mapping/Mappings.java @@ -1050,14 +1050,6 @@ public void remove() { } - /** - * Thrown when a mapping is expected to return one element but returns several. - */ - public static class TooManyElementsException extends RuntimeException { - - } - - /** * Thrown when a mapping is expected to return one element but returns none. */ diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index a40c8ceb3e..a3b7c69d67 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -37,7 +37,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import java.lang.reflect.Type; -import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; @@ -59,18 +58,14 @@ import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.logical.relational.LogicalScan; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeComparability; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeFamily; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.algebra.type.AlgDataTypePrecedenceList; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordTypeImpl; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.nodes.Call; -import org.polypheny.db.nodes.IntervalQualifier; import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgOptTable; @@ -89,9 +84,7 @@ import org.polypheny.db.schema.Wrapper; import org.polypheny.db.schema.impl.AbstractSchema; import org.polypheny.db.test.JdbcTest; -import org.polypheny.db.type.PolyType; import org.polypheny.db.util.AccessType; -import org.polypheny.db.util.Collation; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.InitializerExpressionFactory; import org.polypheny.db.util.NameMatchers; @@ -629,156 +622,6 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { } - /** - * Struct type based on another struct type. - */ - private static class DelegateStructType implements AlgDataType { - - private AlgDataType delegate; - private StructKind structKind; - - - DelegateStructType( AlgDataType delegate, StructKind structKind ) { - assert delegate.isStruct(); - this.delegate = delegate; - this.structKind = structKind; - } - - - @Override - public boolean isStruct() { - return delegate.isStruct(); - } - - - @Override - public boolean isDynamicStruct() { - return delegate.isDynamicStruct(); - } - - - @Override - public List getFieldList() { - return delegate.getFieldList(); - } - - - @Override - public List getFieldNames() { - return delegate.getFieldNames(); - } - - - @Override - public int getFieldCount() { - return delegate.getFieldCount(); - } - - - @Override - public StructKind getStructKind() { - return structKind; - } - - - @Override - public AlgDataTypeField getField( String fieldName, boolean caseSensitive, boolean elideRecord ) { - return delegate.getField( fieldName, caseSensitive, elideRecord ); - } - - - @Override - public boolean isNullable() { - return delegate.isNullable(); - } - - - @Override - public AlgDataType getComponentType() { - return delegate.getComponentType(); - } - - - @Override - public AlgDataType getKeyType() { - return delegate.getKeyType(); - } - - - @Override - public AlgDataType getValueType() { - return delegate.getValueType(); - } - - - @Override - public Charset getCharset() { - return delegate.getCharset(); - } - - - @Override - public Collation getCollation() { - return delegate.getCollation(); - } - - - @Override - public IntervalQualifier getIntervalQualifier() { - return delegate.getIntervalQualifier(); - } - - - @Override - public int getPrecision() { - return delegate.getPrecision(); - } - - - @Override - public int getRawPrecision() { - return delegate.getRawPrecision(); - } - - - @Override - public int getScale() { - return delegate.getScale(); - } - - - @Override - public PolyType getPolyType() { - return delegate.getPolyType(); - } - - - @Override - public String getFullTypeString() { - return delegate.getFullTypeString(); - } - - - @Override - public AlgDataTypeFamily getFamily() { - return delegate.getFamily(); - } - - - @Override - public AlgDataTypePrecedenceList getPrecedenceList() { - return delegate.getPrecedenceList(); - } - - - @Override - public AlgDataTypeComparability getComparability() { - return delegate.getComparability(); - } - - } - - /** * Wrapper around a {@link MockTable}, giving it a {@link Table} interface. You can get the {@code MockTable} by calling {@link #unwrap(Class)}. */ diff --git a/core/src/test/java/org/polypheny/db/schema/BookstoreSchema.java b/core/src/test/java/org/polypheny/db/schema/BookstoreSchema.java deleted file mode 100644 index a93410b02e..0000000000 --- a/core/src/test/java/org/polypheny/db/schema/BookstoreSchema.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.schema; - - -import java.math.BigDecimal; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - - -/** - * A Schema representing a bookstore. - * - * It contains a single table with various levels/types of nesting, and is used mainly for testing parts of code that rely on nested structures. - * - * New authors can be added but attention should be made to update appropriately tests that might fail. - * - * The Schema is meant to be used with {@link org.polypheny.db.adapter.java.ReflectiveSchema} thus all fields, and methods, should be public. - */ -public final class BookstoreSchema { - - public final Author[] authors = { - new Author( 1, - "Victor Hugo", - new Place( - new Coordinate( BigDecimal.valueOf( 47.24 ), BigDecimal.valueOf( 6.02 ) ), - "Besançon", - "France" ), - Collections.singletonList( - new Book( "Les Misérables", - 1862, - Collections.singletonList( new Page( 1, "Contents" ) ) ) ) ), - new Author( 2, - "Nikos Kazantzakis", - new Place( - new Coordinate( BigDecimal.valueOf( 35.3387 ), BigDecimal.valueOf( 25.1442 ) ), - "Heraklion", - "Greece" ), - Arrays.asList( - new Book( - "Zorba the Greek", - 1946, - Arrays.asList( new Page( 1, "Contents" ), new Page( 2, "Acknowledgements" ) ) ), - new Book( - "The Last Temptation of Christ", - 1955, - Collections.singletonList( new Page( 1, "Contents" ) ) ) ) ), - new Author( 3, - "Homer", - new Place( null, "Ionia", "Greece" ), - Collections.emptyList() ) - }; - - - /** - * - */ - public static class Author { - - public final int aid; - public final String name; - public final Place birthPlace; - @org.polypheny.db.adapter.java.Array(component = Book.class) - public final List books; - - - public Author( int aid, String name, Place birthPlace, List books ) { - this.aid = aid; - this.name = name; - this.birthPlace = birthPlace; - this.books = books; - } - } - - - /** - * - */ - public static class Place { - - public final Coordinate coords; - public final String city; - public final String country; - - - public Place( Coordinate coords, String city, String country ) { - this.coords = coords; - this.city = city; - this.country = country; - } - - } - - - /** - * - */ - public static class Coordinate { - - public final BigDecimal latitude; - public final BigDecimal longtitude; - - - public Coordinate( BigDecimal latitude, BigDecimal longtitude ) { - this.latitude = latitude; - this.longtitude = longtitude; - } - } - - - /** - * - */ - public static class Book { - - public final String title; - public final int publishYear; - @org.polypheny.db.adapter.java.Array(component = Page.class) - public final List pages; - - - public Book( String title, int publishYear, List pages ) { - this.title = title; - this.publishYear = publishYear; - this.pages = pages; - } - } - - - /** - * - */ - public static class Page { - - public final int pageNo; - public final String contentType; - - - public Page( int pageNo, String contentType ) { - this.pageNo = pageNo; - this.contentType = contentType; - } - } -} diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index c6c00b6938..2b280888c1 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -31,8 +31,6 @@ import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.routing.LogicalQueryInformation; -import org.polypheny.db.routing.Router; -import org.polypheny.db.routing.factories.RouterFactory; import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; @@ -134,13 +132,4 @@ protected List handleNonePartitioning( AlgNode node, CatalogTa } - public static class IcarusRouterFactory extends RouterFactory { - - @Override - public Router createInstance() { - return new IcarusRouter(); - } - - } - } diff --git a/plugins/csv-adapter/src/test/java/org/polypheny/db/test/CsvTest.java b/plugins/csv-adapter/src/test/java/org/polypheny/db/test/CsvTest.java index 86f4734caa..1b89038f7e 100644 --- a/plugins/csv-adapter/src/test/java/org/polypheny/db/test/CsvTest.java +++ b/plugins/csv-adapter/src/test/java/org/polypheny/db/test/CsvTest.java @@ -65,6 +65,29 @@ import org.polypheny.db.util.Sources; import org.polypheny.db.util.Util; +import java.io.PrintStream; +import java.io.PrintWriter; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.Callable; +import java.util.function.Consumer; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; +import org.polypheny.db.sql.sql2alg.SqlToAlgConverter; +import org.polypheny.db.util.Sources; +import org.polypheny.db.util.Util; + /** * Unit test of the Polypheny-DB CSV adapter. @@ -884,59 +907,6 @@ private Void output( ResultSet resultSet ) { } - /** - * Receives commands on a queue and executes them on its own thread. Call {@link #close} to terminate. - * - * @param Result value of commands - */ - private static class Worker implements Runnable, AutoCloseable { - - /** - * Queue of commands. - */ - final BlockingQueue> queue = new ArrayBlockingQueue<>( 5 ); - /** - * The poison pill command. - */ - final Callable end = () -> null; - /** - * Value returned by the most recent command. - */ - private E v; - /** - * Exception thrown by a command or queue wait. - */ - private Exception e; - - - @Override - public void run() { - try { - for ( ; ; ) { - final Callable c = queue.take(); - if ( c == end ) { - return; - } - this.v = c.call(); - } - } catch ( Exception e ) { - this.e = e; - } - } - - - @Override - public void close() { - try { - queue.put( end ); - } catch ( InterruptedException e ) { - // ignore - } - } - - } - - /** * Fluent API to perform test actions. */ diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java index 3b8f9eab90..784b6f14d3 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java @@ -16,10 +16,7 @@ package org.polypheny.db.cypher; -import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; -import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.time.StopWatch; import org.polypheny.db.algebra.AlgDecorrelator; @@ -28,13 +25,9 @@ import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.cypher.CypherNode.CypherKind; -import org.polypheny.db.cypher.CypherNode.CypherVisitor; -import org.polypheny.db.cypher.clause.CypherCreate; import org.polypheny.db.cypher.cypher2alg.CypherToAlgConverter; import org.polypheny.db.cypher.parser.CypherParser; import org.polypheny.db.cypher.parser.CypherParser.CypherParserConfig; -import org.polypheny.db.cypher.pattern.CypherEveryPathPattern; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.languages.QueryParameters; @@ -181,39 +174,4 @@ public boolean needsDdlGeneration( Node query, QueryParameters parameters ) { return false; } - - @Getter - public static class LabelExtractor extends CypherVisitor { - - final List nodeLabels = new ArrayList<>(); - final List relationshipLabels = new ArrayList<>(); - - - @Override - public void visit( CypherCreate create ) { - - List paths = create - .getPatterns() - .stream() - .filter( p -> p.getCypherKind() == CypherKind.PATH ) - .map( p -> (CypherEveryPathPattern) p ) - .collect( Collectors.toList() ); - - relationshipLabels.addAll( paths - .stream() - .map( CypherEveryPathPattern::getEdges ) - .flatMap( rs -> rs.stream().flatMap( r -> r.getLabels().stream() ) ) - .collect( Collectors.toSet() ) ); - - nodeLabels.addAll( paths - .stream() - .map( CypherEveryPathPattern::getNodes ) - .flatMap( rs -> rs.stream().flatMap( n -> n.getLabels().stream() ) ) - .collect( Collectors.toSet() ) ); - - super.visit( create ); - } - - } - } diff --git a/core/src/main/java/org/polypheny/db/util/Closer.java b/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/Closer.java similarity index 61% rename from core/src/main/java/org/polypheny/db/util/Closer.java rename to plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/Closer.java index 47bacb65e4..20ac48dd92 100644 --- a/core/src/main/java/org/polypheny/db/util/Closer.java +++ b/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/Closer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,26 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.util; +package org.polypheny.db.adapter.elasticsearch; import java.io.IOException; diff --git a/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/EmbeddedElasticsearchPolicy.java b/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/EmbeddedElasticsearchPolicy.java index efdaaf52de..fdb22a85fb 100644 --- a/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/EmbeddedElasticsearchPolicy.java +++ b/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/EmbeddedElasticsearchPolicy.java @@ -50,7 +50,6 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.common.transport.TransportAddress; import org.junit.rules.ExternalResource; -import org.polypheny.db.util.Closer; /** diff --git a/plugins/http-interface/src/main/java/org/polypheny/db/http/model/QueryRequest.java b/plugins/http-interface/src/main/java/org/polypheny/db/http/model/QueryRequest.java deleted file mode 100644 index 9644e272a9..0000000000 --- a/plugins/http-interface/src/main/java/org/polypheny/db/http/model/QueryRequest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.http.model; - -public class QueryRequest extends UIRequest { - - /** - * A query from the SQL console - */ - public String query; - - /** - * TRUE if information about the query execution should be added to the Query Analyzer (InformationManager) - */ - public boolean analyze; - - /** - * This flag defines which language was used for this query - */ - public String language; - - /** - * This flag defines the default database to use - */ - public String database; - -} diff --git a/plugins/http-interface/src/main/java/org/polypheny/db/http/model/UIRequest.java b/plugins/http-interface/src/main/java/org/polypheny/db/http/model/UIRequest.java deleted file mode 100644 index 2ea231937f..0000000000 --- a/plugins/http-interface/src/main/java/org/polypheny/db/http/model/UIRequest.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.http.model; - - -import java.util.Map; - -/** - * Required to parse a request coming from the UI using Gson - */ -public class UIRequest { - - /** - * ExpressionType of a request, e.g. QueryRequest or RelAlgRequest - */ - public String requestType; - - /** - * The name of the table the data should be fetched from - */ - public String tableId; - - /** - * Information about the pagination, - * what current page should be loaded - */ - public int currentPage; - - /** - * Data that should be inserted - */ - public Map data; - - /** - * For each column: If it should be filtered empty string if it should not be filtered - */ - public Map filter; - - /** - * For each column: If and how it should be sorted - */ - public Map sortState; - - /** - * Request to fetch a result without a limit. Default false. - */ - public boolean noLimit; - - - public String getSchemaName() { - if ( tableId != null ) { - return tableId.split( "\\." )[0]; - } - return null; - } - - - public String getTableName() { - if ( tableId != null ) { - return tableId.split( "\\." )[1]; - } - return null; - } - -} diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcUtils.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcUtils.java index 19c38922e0..62d4a46794 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcUtils.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcUtils.java @@ -33,31 +33,39 @@ package org.polypheny.db.adapter.jdbc; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Ints; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Arrays; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.TimeZone; +import javax.sql.DataSource; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.linq4j.function.Function0; import org.apache.calcite.linq4j.function.Function1; -import org.apache.commons.dbcp2.BasicDataSource; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; -import org.polypheny.db.information.*; +import org.polypheny.db.information.Information; +import org.polypheny.db.information.InformationGraph; import org.polypheny.db.information.InformationGraph.GraphData; import org.polypheny.db.information.InformationGraph.GraphType; +import org.polypheny.db.information.InformationGroup; +import org.polypheny.db.information.InformationPage; +import org.polypheny.db.information.InformationTable; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlDialectFactory; -import org.polypheny.db.util.ImmutableNullableList; import org.polypheny.db.util.Pair; -import javax.annotation.Nonnull; -import javax.sql.DataSource; -import java.sql.Date; -import java.sql.*; -import java.util.*; - /** * Utilities for the JDBC provider. @@ -225,40 +233,6 @@ private static Date shift( Date v ) { } - /** - * Ensures that if two data sources have the same definition, they will use the same object. - * - * This in turn makes it easier to cache {@link SqlDialect} objects. Otherwise, each time we see a new data source, - * we have to open a connection to find out what database product and version it is. - */ - public static class DataSourcePool { - - public static final DataSourcePool INSTANCE = new DataSourcePool(); - - private final LoadingCache, BasicDataSource> cache = CacheBuilder.newBuilder().softValues().build( CacheLoader.from( DataSourcePool::dataSource ) ); - - - private static @Nonnull - BasicDataSource dataSource( @Nonnull List key ) { - BasicDataSource dataSource = new BasicDataSource(); - dataSource.setUrl( key.get( 0 ) ); - dataSource.setUsername( key.get( 1 ) ); - dataSource.setPassword( key.get( 2 ) ); - dataSource.setDriverClassName( key.get( 3 ) ); - return dataSource; - } - - - public DataSource get( String url, String driverClassName, String username, String password ) { - // Get data source objects from a cache, so that we don't have to sniff out what kind of - // database they are quite as often. - final List key = ImmutableNullableList.of( url, username, password, driverClassName ); - return cache.getUnchecked( key ); - } - - } - - /** * Builds and adds an new information group, observing the connection pool, to the provided information objects * diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlFactoryProvider.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlFactoryProvider.java deleted file mode 100644 index 7f05092308..0000000000 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlFactoryProvider.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.sql.language; - -import org.polypheny.db.languages.ParserFactory; -import org.polypheny.db.languages.sql.parser.impl.SqlParserImpl; - -public interface SqlFactoryProvider { - - static ParserFactory getSqlParserFactory() { - return SqlParserImpl.FACTORY; - } - -} diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUnnestOperator.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUnnestOperator.java index 5d528dc8b6..22acc461f3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUnnestOperator.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUnnestOperator.java @@ -80,8 +80,8 @@ public AlgDataType inferReturnType( OperatorBinding opBinding ) { assert type instanceof ArrayType || type instanceof MultisetPolyType || type instanceof MapPolyType; if ( type instanceof MapPolyType ) { - builder.add( UnnestOperator.MAP_KEY_COLUMN_NAME, null, type.getKeyType() ); - builder.add( UnnestOperator.MAP_VALUE_COLUMN_NAME, null, type.getValueType() ); + builder.add( UnnestOperator.MAP_KEY_COLUMN_NAME, null, type.unwrap( MapPolyType.class ).getKeyType() ); + builder.add( UnnestOperator.MAP_VALUE_COLUMN_NAME, null, type.unwrap( MapPolyType.class ).getValueType() ); } else { if ( type.getComponentType().isStruct() ) { builder.addAll( type.getComponentType().getFieldList() ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/fun/SqlItemOperator.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/fun/SqlItemOperator.java index 0822fd8af0..5e4fdf49e2 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/fun/SqlItemOperator.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/fun/SqlItemOperator.java @@ -31,6 +31,7 @@ import org.polypheny.db.sql.language.SqlSpecialOperator; import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.type.ArrayType; +import org.polypheny.db.type.MapPolyType; import org.polypheny.db.type.OperandCountRange; import org.polypheny.db.type.PolyOperandCountRanges; import org.polypheny.db.type.PolyType; @@ -126,7 +127,7 @@ private PolySingleOperandTypeChecker getChecker( AlgDataType operandType ) { case ARRAY: return OperandTypes.family( PolyTypeFamily.INTEGER ); case MAP: - return OperandTypes.family( operandType.getKeyType().getPolyType().getFamily() ); + return OperandTypes.family( operandType.unwrap( MapPolyType.class ).getKeyType().getPolyType().getFamily() ); case ANY: case DYNAMIC_STAR: return OperandTypes.or( @@ -169,7 +170,7 @@ public AlgDataType inferReturnType( OperatorBinding opBinding ) { return typeFactory.createTypeWithNullability( operandType.getComponentType(), true ); } case MAP: - return typeFactory.createTypeWithNullability( operandType.getValueType(), true ); + return typeFactory.createTypeWithNullability( operandType.unwrap( MapPolyType.class ).getValueType(), true ); case ANY: case DYNAMIC_STAR: return typeFactory.createTypeWithNullability( typeFactory.createPolyType( PolyType.ANY ), true ); diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java index f534ab28a9..6612745a11 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java @@ -19,10 +19,6 @@ import com.google.common.collect.ImmutableList; import java.lang.reflect.Method; -import java.math.BigDecimal; -import java.sql.Date; -import java.sql.Time; -import java.sql.Timestamp; import java.util.AbstractList; import java.util.List; import java.util.Locale; @@ -43,7 +39,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; -import org.polypheny.db.runtime.functions.Functions; import org.polypheny.db.schema.QueryableTable; import org.polypheny.db.schema.ScannableTable; import org.polypheny.db.schema.Schema; @@ -589,190 +584,6 @@ public static int eval( int x, int y ) { } - /** - * Example of a UDF class that needs to be instantiated but cannot be. - */ - public abstract static class AwkwardFunction { - - private AwkwardFunction() { - } - - - public int eval( int x ) { - return 0; - } - - } - - - /** - * UDF class that has multiple methods, some overloaded. - */ - public static class MultipleFunction { - - private MultipleFunction() { - } - - - // Three overloads - public static String fun1( String x ) { - return x.toLowerCase( Locale.ROOT ); - } - - - public static int fun1( int x ) { - return x * 2; - } - - - public static int fun1( int x, int y ) { - return x + y; - } - - - // Another method - public static int fun2( int x ) { - return x * 3; - } - - - // Non-static method cannot be used because constructor is private - public int nonStatic( int x ) { - return x * 3; - } - - } - - - /** - * UDF class that provides user-defined functions for each data type. - */ - @Deterministic - public static class AllTypesFunction { - - private AllTypesFunction() { - } - - // We use SqlFunctions.toLong(Date) ratter than Date.getTime(), and SqlFunctions.internalToTimestamp(long) rather than new Date(long), - // because the contract of JDBC (also used by UDFs) is to represent date-time values in the LOCAL time zone. - - - public static long dateFun( java.sql.Date v ) { - return v == null ? -1L : Functions.toLong( v ); - } - - - public static long timestampFun( java.sql.Timestamp v ) { - return v == null ? -1L : Functions.toLong( v ); - } - - - public static long timeFun( java.sql.Time v ) { - return v == null ? -1L : Functions.toLong( v ); - } - - - /** - * Overloaded, in a challenging way, with {@link #toDateFun(Long)}. - */ - public static java.sql.Date toDateFun( int v ) { - return Functions.internalToDate( v ); - } - - - public static java.sql.Date toDateFun( Long v ) { - return v == null ? null : Functions.internalToDate( v.intValue() ); - } - - - public static java.sql.Timestamp toTimestampFun( Long v ) { - return Functions.internalToTimestamp( v ); - } - - - public static java.sql.Time toTimeFun( Long v ) { - return v == null ? null : Functions.internalToTime( v.intValue() ); - } - - - /** - * for Overloaded user-defined functions that have Double and BigDecimal arguments will goes wrong - */ - public static double toDouble( BigDecimal var ) { - return var == null ? null : var.doubleValue(); - } - - - public static double toDouble( Double var ) { - return var == null ? 0.0d : var; - } - - - public static double toDouble( Float var ) { - return var == null ? 0.0d : Double.parseDouble( var.toString() ); - } - - - public static List arrayAppendFun( List v, Integer i ) { - if ( v == null || i == null ) { - return null; - } else { - v.add( i ); - return v; - } - } - - - /** - * Overloaded functions with DATE, TIMESTAMP and TIME arguments. - */ - public static long toLong( Date date ) { - return date == null ? 0 : Functions.toLong( date ); - } - - - public static long toLong( Timestamp timestamp ) { - return timestamp == null ? 0 : Functions.toLong( timestamp ); - } - - - public static long toLong( Time time ) { - return time == null ? 0 : Functions.toLong( time ); - } - - } - - - /** - * Example of a user-defined aggregate function (UDAF). - */ - public static class MySumFunction { - - public MySumFunction() { - } - - - public long init() { - return 0L; - } - - - public long add( long accumulator, int v ) { - return accumulator + v; - } - - - public long merge( long accumulator0, long accumulator1 ) { - return accumulator0 + accumulator1; - } - - - public long result( long accumulator ) { - return accumulator; - } - - } - /** * A generic interface for defining user defined aggregate functions @@ -794,245 +605,6 @@ private interface MyGenericAggFunction { } - /** - * Example of a user-defined aggregate function that implements a generic interface. - */ - public static class MySum3 implements MyGenericAggFunction { - - @Override - public Integer init() { - return 0; - } - - - @Override - public Integer add( Integer accumulator, Integer val ) { - return accumulator + val; - } - - - @Override - public Integer merge( Integer accumulator1, Integer accumulator2 ) { - return accumulator1 + accumulator2; - } - - - @Override - public Integer result( Integer accumulator ) { - return accumulator; - } - - } - - - /** - * Example of a user-defined aggregate function (UDAF), whose methods are static. - */ - public static class MyStaticSumFunction { - - public static long init() { - return 0L; - } - - - public static long add( long accumulator, int v ) { - return accumulator + v; - } - - - public static long merge( long accumulator0, long accumulator1 ) { - return accumulator0 + accumulator1; - } - - - public static long result( long accumulator ) { - return accumulator; - } - - } - - - /** - * Example of a user-defined aggregate function (UDAF). - */ - public static class MyTwoParamsSumFunctionFilter1 { - - public MyTwoParamsSumFunctionFilter1() { - } - - - public int init() { - return 0; - } - - - public int add( int accumulator, int v1, int v2 ) { - if ( v1 > v2 ) { - return accumulator + v1; - } - return accumulator; - } - - - public int merge( int accumulator0, int accumulator1 ) { - return accumulator0 + accumulator1; - } - - - public int result( int accumulator ) { - return accumulator; - } - - } - - - /** - * Example of a user-defined aggregate function (UDAF). - */ - public static class MyTwoParamsSumFunctionFilter2 { - - public MyTwoParamsSumFunctionFilter2() { - } - - - public long init() { - return 0L; - } - - - public long add( long accumulator, int v1, String v2 ) { - if ( v2.equals( "Eric" ) ) { - return accumulator + v1; - } - return accumulator; - } - - - public long merge( long accumulator0, long accumulator1 ) { - return accumulator0 + accumulator1; - } - - - public long result( long accumulator ) { - return accumulator; - } - - } - - - /** - * Example of a user-defined aggregate function (UDAF), whose methods are static. - */ - public static class MyThreeParamsSumFunctionWithFilter1 { - - public static long init() { - return 0L; - } - - - public static long add( long accumulator, int v1, String v2, String v3 ) { - if ( v2.equals( v3 ) ) { - return accumulator + v1; - } - return accumulator; - } - - - public static long merge( long accumulator0, long accumulator1 ) { - return accumulator0 + accumulator1; - } - - - public static long result( long accumulator ) { - return accumulator; - } - - } - - - /** - * Example of a user-defined aggregate function (UDAF), whose methods are static. olny for validate to get exact function by Polypheny-DB - */ - public static class MyThreeParamsSumFunctionWithFilter2 { - - public static long init() { - return 0L; - } - - - public static long add( long accumulator, int v1, int v2, int v3 ) { - if ( v3 > 250 ) { - return accumulator + v1 + v2; - } - return accumulator; - } - - - public static long merge( long accumulator0, long accumulator1 ) { - return accumulator0 + accumulator1; - } - - - public static long result( long accumulator ) { - return accumulator; - } - - } - - - /** - * User-defined function. - */ - public static class SumFunctionBadIAdd { - - public long init() { - return 0L; - } - - - public long add( short accumulator, int v ) { - return accumulator + v; - } - - } - - - /** - * A table function that returns a {@link QueryableTable}. - */ - public static class SimpleTableFunction { - - public QueryableTable eval( Integer s ) { - return generateStrings( s ); - } - - } - - - /** - * A table function that returns a {@link QueryableTable}. - */ - public static class MyTableFunction { - - public QueryableTable eval( String s ) { - return oneThreePlus( s ); - } - - } - - - /** - * A table function that returns a {@link QueryableTable} via a static method. - */ - public static class TestStaticTableFunction { - - public static QueryableTable eval( String s ) { - return oneThreePlus( s ); - } - - } - - /** * The real MazeTable may be found in example/function. This is a cut-down version to support a test. */ @@ -1051,15 +623,6 @@ public static ScannableTable generate( int width, int height, int seed ) { } - public static ScannableTable generate2( @Parameter(name = "WIDTH") int width, @Parameter(name = "HEIGHT") int height, @Parameter(name = "SEED", optional = true) Integer seed ) { - return new MazeTable( String.format( Locale.ROOT, "generate2(w=%d, h=%d, s=%d)", width, height, seed ) ); - } - - - public static ScannableTable generate3( @Parameter(name = "FOO") String foo ) { - return new MazeTable( String.format( Locale.ROOT, "generate3(foo=%s)", foo ) ); - } - @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @@ -1078,25 +641,6 @@ public Enumerable scan( DataContext root ) { } - /** - * Schema containing a {@code prod} table with a lot of columns. - */ - public static class WideSaleSchema { - - @Override - public String toString() { - return "WideSaleSchema"; - } - - - @SuppressWarnings("unused") - public final WideProductSale[] prod = { - new WideProductSale( 100, 10 ) - }; - - } - - /** * Table with a lot of columns. */ diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Debug.java b/webui/src/main/java/org/polypheny/db/webui/models/Debug.java deleted file mode 100644 index 0ad187efbc..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/models/Debug.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.models; - - -/** - * Infos about a query, e.g. number of affected rows - */ -public class Debug { - - private int affectedRows; - private String generatedQuery; - - - public Debug setAffectedRows( final int affectedRows ) { - this.affectedRows = affectedRows; - return this; - } - - - public Debug setGeneratedQuery( final String query ) { - this.generatedQuery = query; - return this; - } - - - public Debug update( final Debug debug ) { - if ( debug.affectedRows != 0 ) { - this.affectedRows = debug.affectedRows; - } - if ( debug.generatedQuery != null ) { - this.generatedQuery = debug.generatedQuery; - } - return this; - } -} From 33135140bf44b727e0719ec017560791babed391 Mon Sep 17 00:00:00 2001 From: datomo Date: Sat, 18 Feb 2023 00:49:59 +0100 Subject: [PATCH 010/436] restructuring logical into separate catalogs --- .../db/algebra/type/AlgDataType.java | 4 - .../org/polypheny/db/tools/AlgBuilder.java | 2 +- .../polypheny/db/catalog/CatalogPlugin.java | 10 +- .../org/polypheny/db/catalog/NCatalog.java | 51 ++++++ .../org/polypheny/db/catalog/PolyCatalog.java | 169 +++--------------- .../AllocationCatalog.java} | 22 ++- .../catalog/document/CatalogDocDatabase.java | 53 ------ .../{ => entities}/CatalogDatabase.java | 9 +- .../document/CatalogCollection.java | 2 +- .../document/DocumentCatalog.java | 41 ++--- .../{ => logical}/graph/CatalogGraph.java | 2 +- .../{ => logical}/graph/GraphCatalog.java | 41 +++-- .../relational/CatalogColumn.java} | 14 +- .../logical/relational/CatalogTable.java | 78 ++++++++ .../logical/relational/RelationalCatalog.java | 107 +++++++++++ .../mappings/CatalogDocumentMapping.java | 38 ---- .../catalog/mappings/CatalogGraphMapping.java | 60 ------- .../catalog/mappings/CatalogModelMapping.java | 24 --- .../mappings/CatalogRelationalMapping.java | 38 ---- .../db/catalog/relational/CatalogColumn.java | 21 --- .../db/catalog/relational/CatalogSchema.java | 60 ------- .../catalog/relational/RelationalCatalog.java | 100 ----------- 22 files changed, 338 insertions(+), 608 deletions(-) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{ModelCatalog.java => alocation/AllocationCatalog.java} (65%) delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{ => entities}/CatalogDatabase.java (81%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{ => logical}/document/CatalogCollection.java (92%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{ => logical}/document/DocumentCatalog.java (58%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{ => logical}/graph/CatalogGraph.java (96%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{ => logical}/graph/GraphCatalog.java (60%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{relational/CatalogTable.java => logical/relational/CatalogColumn.java} (81%) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogTable.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogColumn.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataType.java b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataType.java index 72004d3d89..7a586f33bd 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataType.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataType.java @@ -74,10 +74,6 @@ public interface AlgDataType { List getFieldNames(); - List getIds(); - - Long getId(); - /** * Returns the number of fields in a struct type. * diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 83c0f00abd..16b927be24 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -1870,7 +1870,7 @@ public AlgBuilder aggregate( GroupKey groupKey, Iterable aggCalls ) { final Kind kind = node.getKind(); switch ( kind ) { case INPUT_REF: - fields.add( frame.unstructured.get( ((RexInputRef) node).getIndex() ) ); + fields.add( frame.structured.get( ((RexInputRef) node).getIndex() ) ); break; default: String name = aggregateFields.get( i ).getName(); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java index 681914b1b6..235f6dd8ac 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java @@ -38,17 +38,19 @@ public CatalogPlugin( PluginWrapper wrapper ) { public void start() { PolyCatalog catalog = new PolyCatalog(); long user = catalog.addUser( "admin" ); - long database = catalog.addDatabase( "APP", user ); - catalog.addNamespace( "test", database, user, NamespaceType.RELATIONAL ); - long namespaceId = catalog.addNamespace( "test2", database, user, NamespaceType.RELATIONAL ); + catalog.addNamespace( "test", NamespaceType.RELATIONAL ); + long namespaceId = catalog.addNamespace( "test2", NamespaceType.RELATIONAL ); - catalog.addEntity( "testTable", namespaceId, NamespaceType.RELATIONAL, user ); + long tableId = catalog.addTable( "testTable", namespaceId ); + catalog.addColumn( "testColumn", namespaceId, tableId, null ); byte[] buffer = catalog.serialize( PolyCatalog.class ); PolyCatalog catalog1 = catalog.deserialize( buffer, PolyCatalog.class ); + catalog1.addColumn( "testColumn2", namespaceId, tableId, null ); + } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java new file mode 100644 index 0000000000..30b57ae1ed --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog; + +import org.polypheny.db.catalog.logical.document.DocumentCatalog; +import org.polypheny.db.catalog.logical.graph.GraphCatalog; +import org.polypheny.db.catalog.logical.relational.RelationalCatalog; + +public interface NCatalog { + + void commit(); + + void rollback(); + + boolean hasUncommittedChanges(); + + default RelationalCatalog asRelational() { + return unwrap( RelationalCatalog.class ); + } + + default DocumentCatalog asDocument() { + return unwrap( DocumentCatalog.class ); + } + + default GraphCatalog asGraph() { + return unwrap( GraphCatalog.class ); + } + + default T unwrap( Class clazz ) { + if ( !this.getClass().isAssignableFrom( clazz ) ) { + throw new RuntimeException( String.format( "Error on retrieval the %s catalog.", clazz.getSimpleName() ) ); + } + return clazz.cast( this ); + } + + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index cd89d8d4e5..d6b7ec1d5c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -16,7 +16,6 @@ package org.polypheny.db.catalog; -import com.google.common.collect.ImmutableList; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; @@ -30,15 +29,11 @@ import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.document.DocumentCatalog; import org.polypheny.db.catalog.entities.CatalogUser; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; -import org.polypheny.db.catalog.graph.GraphCatalog; -import org.polypheny.db.catalog.mappings.CatalogDocumentMapping; -import org.polypheny.db.catalog.mappings.CatalogGraphMapping; -import org.polypheny.db.catalog.mappings.CatalogModelMapping; -import org.polypheny.db.catalog.mappings.CatalogRelationalMapping; -import org.polypheny.db.catalog.relational.RelationalCatalog; +import org.polypheny.db.catalog.logical.document.DocumentCatalog; +import org.polypheny.db.catalog.logical.graph.GraphCatalog; +import org.polypheny.db.catalog.logical.relational.RelationalCatalog; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptTable; @@ -63,58 +58,45 @@ public class PolyCatalog implements SerializableCatalog, CatalogReader { public final BinarySerializer serializer = SerializableCatalog.builder.get().build( PolyCatalog.class ); @Serialize - public final RelationalCatalog relational; - @Serialize - public final GraphCatalog graph; - @Serialize - public final DocumentCatalog document; + public final Map relationals; - private final ImmutableList catalogs; @Serialize - public final Map users; + public final Map documents; @Serialize - public final Map databases; + public final Map graphs; @Serialize - public final Map mappings; + public final Map users; private final IdBuilder idBuilder = new IdBuilder(); public PolyCatalog() { - this( new DocumentCatalog(), new GraphCatalog(), new RelationalCatalog(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); + this( new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); } public PolyCatalog( - @Deserialize("document") DocumentCatalog document, - @Deserialize("graph") GraphCatalog graph, - @Deserialize("relational") RelationalCatalog relational, @Deserialize("users") Map users, - @Deserialize("databases") Map databases, - @Deserialize("mappings") Map mappings ) { - this.document = document; - this.graph = graph; - this.relational = relational; + @Deserialize("relationals") Map relationals, + @Deserialize("documents") Map documents, + @Deserialize("graphs") Map graphs ) { this.users = users; - this.databases = databases; - this.mappings = mappings; - - catalogs = ImmutableList.of( this.relational, this.graph, this.document ); + this.relationals = relationals; + this.documents = documents; + this.graphs = graphs; } public void commit() throws NoTablePrimaryKeyException { log.debug( "commit" ); - catalogs.stream().filter( ModelCatalog::hasUncommittedChanges ).forEach( ModelCatalog::commit ); } public void rollback() { log.debug( "rollback" ); - catalogs.stream().filter( ModelCatalog::hasUncommittedChanges ).forEach( ModelCatalog::rollback ); } @@ -127,150 +109,43 @@ public long addUser( @NonNull String name ) { } - public long addDatabase( String name, long ownerId ) { - long id = idBuilder.getNewDatabaseId(); - - databases.put( id, new CatalogDatabase( id, name, ownerId ) ); - return id; - } - - - public long addNamespace( String name, long databaseId, long ownerId, NamespaceType namespaceType ) { + public long addNamespace( String name, NamespaceType namespaceType ) { long id = idBuilder.getNewNamespaceId(); - CatalogModelMapping mapping = null; switch ( namespaceType ) { case RELATIONAL: - mapping = addRelationalNamespace( id, name, databaseId, namespaceType, ownerId ); + relationals.put( id, new RelationalCatalog( id, name ) ); break; case DOCUMENT: - mapping = addDocumentNamespace( id, name, databaseId, namespaceType, ownerId ); + documents.put( id, new DocumentCatalog( id, name ) ); break; case GRAPH: - mapping = addGraphNamespace( id, name, databaseId, namespaceType, ownerId ); + graphs.put( id, new GraphCatalog( id, name ) ); break; } - mappings.put( id, mapping ); - return id; } - private CatalogModelMapping addGraphNamespace( long id, String name, long databaseId, NamespaceType namespaceType, long ownerId ) { - // add to model catalog - graph.addGraph( id, name, databaseId, namespaceType ); - - // add substitutions for other models - long nodeId = idBuilder.getNewEntityId(); - long nPropertiesId = idBuilder.getNewEntityId(); - long edgeId = idBuilder.getNewEntityId(); - long ePropertiesId = idBuilder.getNewEntityId(); - - // add relational - relational.addSchema( id, name, databaseId, namespaceType ); - relational.addTable( nodeId, "_nodes_", id ); - relational.addTable( nPropertiesId, "_nProperties_", id ); - relational.addTable( edgeId, "_edges_", id ); - relational.addTable( ePropertiesId, "_eProperties_", id ); - - // add document - document.addDatabase( id, name, databaseId, namespaceType ); - document.addCollection( nodeId, "_nodes_", id ); - document.addCollection( nPropertiesId, "_nProperties_", id ); - document.addCollection( edgeId, "_edges_", id ); - document.addCollection( ePropertiesId, "_eProperties_", id ); - - return new CatalogGraphMapping( id, nodeId, nPropertiesId, edgeId, ePropertiesId ); - } - - - private CatalogModelMapping addDocumentNamespace( long id, String name, long databaseId, NamespaceType namespaceType, long ownerId ) { - // add to model catalog - document.addDatabase( id, name, databaseId, namespaceType ); - - // add substitutions to other models - relational.addSchema( id, name, databaseId, namespaceType ); - graph.addGraph( id, name, databaseId, namespaceType ); - - return new CatalogDocumentMapping( id ); - } - - - private CatalogModelMapping addRelationalNamespace( long id, String name, long databaseId, NamespaceType namespaceType, long ownerId ) { - // add to model catalog - relational.addSchema( id, name, databaseId, namespaceType ); - - // add substitutions to other models - document.addDatabase( id, name, databaseId, namespaceType ); - graph.addGraph( id, name, databaseId, namespaceType ); - - return new CatalogRelationalMapping( id ); - } - - - public long addEntity( String name, long namespaceId, NamespaceType type, long ownerId ) { + public long addTable( String name, long namespaceId ) { long id = idBuilder.getNewEntityId(); - switch ( type ) { - case RELATIONAL: - addRelationalEntity( id, name, namespaceId ); - break; - case DOCUMENT: - addDocumentEntity( id, name, namespaceId ); - break; - case GRAPH: - // do nothing - break; - } + relationals.get( namespaceId ).asRelational().addTable( id, name ); return id; } - private void addDocumentEntity( long id, String name, long namespaceId ) { - // add target data model entity - document.addCollection( id, name, namespaceId ); - - // add substitution entity - relational.addSubstitutionTable( id, name, namespaceId, NamespaceType.DOCUMENT ); - graph.addSubstitutionGraph( id, name, namespaceId, NamespaceType.DOCUMENT ); - } - - - private void addRelationalEntity( long id, String name, long namespaceId ) { - // add target data model entity - relational.addTable( id, name, namespaceId ); - - // add substitution entity - graph.addSubstitutionGraph( id, name, namespaceId, NamespaceType.RELATIONAL ); - document.addSubstitutionCollection( id, name, namespaceId, NamespaceType.RELATIONAL ); - - } - - - public long addField( String name, long entityId, AlgDataType type, NamespaceType namespaceType ) { + public long addColumn( String name, long namespaceId, long entityId, AlgDataType type ) { long id = idBuilder.getNewFieldId(); - switch ( namespaceType ) { - case RELATIONAL: - addColumn( id, name, entityId, type ); - break; - case DOCUMENT: - case GRAPH: - // not available for models - break; - } + relationals.get( namespaceId ).asRelational().addColumn( id, name, entityId ); return id; } - private void addColumn( long id, String name, long entityId, AlgDataType type ) { - relational.addColumn( id, name, entityId, type ); - } - - @Override public void lookupOperatorOverloads( Identifier opName, FunctionCategory category, Syntax syntax, List operatorList ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/alocation/AllocationCatalog.java similarity index 65% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/alocation/AllocationCatalog.java index 856ae1307c..66bc15414b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ModelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/alocation/AllocationCatalog.java @@ -14,15 +14,27 @@ * limitations under the License. */ -package org.polypheny.db.catalog; +package org.polypheny.db.catalog.alocation; -public interface ModelCatalog { +import org.polypheny.db.catalog.NCatalog; - void commit(); +public class AllocationCatalog implements NCatalog { - void rollback(); + @Override + public void commit() { - boolean hasUncommittedChanges(); + } + @Override + public void rollback() { + + } + + + @Override + public boolean hasUncommittedChanges() { + return false; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java deleted file mode 100644 index d28bddd8ab..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogDocDatabase.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.document; - -import com.google.common.collect.ImmutableMap; -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import java.util.Map; -import org.polypheny.db.catalog.Catalog.NamespaceType; - -public class CatalogDocDatabase { - - @Serialize - public final long id; - @Serialize - public final String name; - @Serialize - public final long databaseId; - @Serialize - public final NamespaceType namespaceType; - - @Serialize - public final ImmutableMap collections; - - - public CatalogDocDatabase( - @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("databaseId") long databaseId, - @Deserialize("namespaceType") NamespaceType namespaceType, - @Deserialize("collections") Map collections ) { - this.id = id; - this.name = name; - this.databaseId = databaseId; - this.namespaceType = namespaceType; - this.collections = ImmutableMap.copyOf( collections ); - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogDatabase.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogDatabase.java similarity index 81% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogDatabase.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogDatabase.java index 378cb1d7bf..e07c656b44 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogDatabase.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogDatabase.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog; +package org.polypheny.db.catalog.entities; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; @@ -27,17 +27,12 @@ public class CatalogDatabase { @Serialize public final String name; - @Serialize - public final long ownerId; - public CatalogDatabase( @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("ownerId") long ownerId ) { + @Deserialize("name") String name ) { this.id = id; this.name = name; - this.ownerId = ownerId; } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogCollection.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java similarity index 92% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogCollection.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java index 5496f86b35..60a5d202c9 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/CatalogCollection.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.document; +package org.polypheny.db.catalog.logical.document; public class CatalogCollection { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java similarity index 58% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java index f2beeb2752..65c3d19540 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.document; +package org.polypheny.db.catalog.logical.document; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; @@ -22,35 +22,44 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.ModelCatalog; +import lombok.Value; +import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.SerializableCatalog; -public class DocumentCatalog implements ModelCatalog, SerializableCatalog { +@Value +public class DocumentCatalog implements NCatalog, SerializableCatalog { @Getter - public final BinarySerializer serializer = SerializableCatalog.builder.get().build( DocumentCatalog.class ); + public BinarySerializer serializer = SerializableCatalog.builder.get().build( DocumentCatalog.class ); @Serialize - public final Map databases; + public Map collections; + + @Serialize + public String name; + @Serialize - public final Map collections; + public long id; - public DocumentCatalog() { - this( new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); + public DocumentCatalog( long id, String name ) { + this( id, name, new ConcurrentHashMap<>() ); } public DocumentCatalog( - @Deserialize("databases") Map databases, + @Deserialize("id") long id, + @Deserialize("name") String name, @Deserialize("collections") Map collections ) { - this.databases = databases; this.collections = collections; + this.id = id; + this.name = name; } - private boolean openChanges = false; + @NonFinal + boolean openChanges = false; @Override @@ -72,17 +81,9 @@ public boolean hasUncommittedChanges() { } - public void addDatabase( long id, String name, long databaseId, NamespaceType namespaceType ) { - databases.put( id, new CatalogDocDatabase( id, name, databaseId, namespaceType, collections ) ); - } - - public void addCollection( long id, String name, long namespaceId ) { } - public void addSubstitutionCollection( long id, String name, long namespaceId, NamespaceType relational ) { - } - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java similarity index 96% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java index 2b692f63ea..456047bce3 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/CatalogGraph.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.graph; +package org.polypheny.db.catalog.logical.graph; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java similarity index 60% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java index 05ab80b7cc..84200c7979 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.graph; +package org.polypheny.db.catalog.logical.graph; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; @@ -22,28 +22,43 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.ModelCatalog; +import lombok.Value; +import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.SerializableCatalog; -public class GraphCatalog implements ModelCatalog, SerializableCatalog { +@Value +public class GraphCatalog implements NCatalog, SerializableCatalog { @Getter - public final BinarySerializer serializer = SerializableCatalog.builder.get().build( GraphCatalog.class ); + public BinarySerializer serializer = SerializableCatalog.builder.get().build( GraphCatalog.class ); + + @Serialize + public long id; + + @Serialize + public String name; @Serialize public Map graphs; - private boolean openChanges = false; + @NonFinal + boolean openChanges = false; - public GraphCatalog() { - this( new ConcurrentHashMap<>() ); + public GraphCatalog( long id, String name ) { + this( id, name, new ConcurrentHashMap<>() ); } - public GraphCatalog( @Deserialize("graphs") Map graphs ) { + public GraphCatalog( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("graphs") Map graphs ) { this.graphs = graphs; + + this.id = id; + this.name = name; } @@ -67,12 +82,4 @@ public boolean hasUncommittedChanges() { } - public void addGraph( long id, String name, long databaseId, NamespaceType namespaceType ) { - graphs.put( id, new CatalogGraph( id, name, databaseId, namespaceType ) ); - } - - - public void addSubstitutionGraph( long id, String name, long namespaceId, NamespaceType document ) { - } - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogColumn.java similarity index 81% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogColumn.java index 2429b1b3df..2f418049b5 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogTable.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogColumn.java @@ -14,32 +14,32 @@ * limitations under the License. */ -package org.polypheny.db.catalog.relational; +package org.polypheny.db.catalog.logical.relational; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import lombok.Value; @Value -public class CatalogTable { +public class CatalogColumn { @Serialize public long id; @Serialize public String name; - @Serialize - public long namespaceId; + public long tableId; - public CatalogTable( + public CatalogColumn( @Deserialize("id") long id, @Deserialize("name") String name, - @Deserialize("namespaceId") long namespaceId ) { + @Deserialize("tableId") long tableId ) { this.id = id; this.name = name; - this.namespaceId = namespaceId; + this.tableId = tableId; + } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogTable.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogTable.java new file mode 100644 index 0000000000..3d69e3a8b0 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogTable.java @@ -0,0 +1,78 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logical.relational; + +import com.google.common.collect.ImmutableMap; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.HashMap; +import java.util.Map; +import lombok.Value; +import lombok.With; + +@Value +public class CatalogTable { + + @Serialize + @With + public long id; + + @Serialize + @With + public String name; + + @Serialize + @With + public long namespaceId; + + + @Serialize + @With + public ImmutableMap columns; + + + public CatalogTable( long id, String name, long namespaceId ) { + this( id, name, namespaceId, new HashMap<>() ); + } + + + public CatalogTable( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("namespaceId") long namespaceId, + @Deserialize("columns") Map columns ) { + this.id = id; + this.name = name; + this.namespaceId = namespaceId; + this.columns = ImmutableMap.copyOf( columns ); + } + + + public CatalogTable withAddedColumn( long id, String name ) { + Map columns = new HashMap<>( this.columns ); + columns.put( id, new CatalogColumn( id, name, this.id ) ); + return withColumns( ImmutableMap.copyOf( columns ) ); + } + + + public CatalogTable withDeletedColumn( long id ) { + Map columns = new HashMap<>( this.columns ); + columns.remove( id ); + return withColumns( ImmutableMap.copyOf( columns ) ); + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java new file mode 100644 index 0000000000..6ce61682ce --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java @@ -0,0 +1,107 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logical.relational; + +import io.activej.serializer.BinarySerializer; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.HashMap; +import java.util.Map; +import lombok.Getter; +import lombok.Value; +import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.NCatalog; +import org.polypheny.db.catalog.SerializableCatalog; + +@Value +public class RelationalCatalog implements NCatalog, SerializableCatalog { + + @Getter + public BinarySerializer serializer = SerializableCatalog.builder.get().build( RelationalCatalog.class ); + + @Serialize + public Map tables; + + @Serialize + public long id; + + @Serialize + public String name; + + @NonFinal + boolean openChanges = false; + + + public RelationalCatalog( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("tables") Map tables ) { + this.id = id; + this.name = name; + + this.tables = new HashMap<>( tables ); + } + + + public RelationalCatalog( long id, String name ) { + this( id, name, new HashMap<>() ); + } + + + @Override + public void commit() { + + openChanges = false; + } + + + @Override + public void rollback() { + + openChanges = false; + } + + + public void change() { + openChanges = true; + } + + + @Override + public boolean hasUncommittedChanges() { + return openChanges; + } + + + public void addTable( long id, String name ) { + tables.put( id, new CatalogTable( id, name, this.id ) ); + change(); + } + + + public void addColumn( long id, String name, long entityId ) { + tables.put( entityId, tables.get( entityId ).withAddedColumn( id, name ) ); + change(); + } + + + public void deleteColumn( long id, long entityId ) { + tables.put( entityId, tables.get( id ).withDeletedColumn( id ) ); + change(); + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java deleted file mode 100644 index 4c0e18307d..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogDocumentMapping.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.mappings; - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; - -public class CatalogDocumentMapping implements CatalogModelMapping { - - @Serialize - public final long id; - - - public CatalogDocumentMapping( @Deserialize("id") long id ) { - this.id = id; - } - - - @Override - public String getGraphLabel() { - return "_collection_" + id; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java deleted file mode 100644 index 71f96b863c..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogGraphMapping.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.mappings; - - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; - -public class CatalogGraphMapping implements CatalogModelMapping { - - @Serialize - public final long id; - - @Serialize - public final long nodesId; - - @Serialize - public final long nodesPropertiesId; - - @Serialize - public final long edgesId; - - @Serialize - public final long edgesPropertiesId; - - - public CatalogGraphMapping( - @Deserialize("id") long id, - @Deserialize("nodesId") long nodesId, - @Deserialize("nodesPropertiesId") long nodesPropertiesId, - @Deserialize("edgesId") long edgesId, - @Deserialize("edgesPropertiesId") long edgesPropertiesId ) { - this.id = id; - this.nodesId = nodesId; - this.nodesPropertiesId = nodesPropertiesId; - this.edgesId = edgesId; - this.edgesPropertiesId = edgesPropertiesId; - } - - - @Override - public String getGraphLabel() { - return null; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java deleted file mode 100644 index ee3cb9a9dd..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogModelMapping.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.mappings; - -public interface CatalogModelMapping { - - String getGraphLabel(); - - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java deleted file mode 100644 index 73dc6ed7e7..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/mappings/CatalogRelationalMapping.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.mappings; - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; - -public class CatalogRelationalMapping implements CatalogModelMapping { - - @Serialize - public final long id; - - - public CatalogRelationalMapping( @Deserialize("id") long id ) { - this.id = id; - } - - - @Override - public String getGraphLabel() { - return "_table_" + id; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogColumn.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogColumn.java deleted file mode 100644 index bdb4b17352..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogColumn.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.relational; - -public class CatalogColumn { - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java deleted file mode 100644 index 931b1a60aa..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/CatalogSchema.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.relational; - -import com.google.common.collect.ImmutableMap; -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import java.util.HashMap; -import java.util.Map; -import lombok.Value; - -@Value -public class CatalogSchema { - - @Serialize - public long id; - - @Serialize - public String name; - - @Serialize - public long databaseId; - - @Serialize - public ImmutableMap tables; - - - public CatalogSchema( - @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("databaseId") long databaseId, - @Deserialize("tables") Map tables ) { - this.id = id; - this.name = name; - this.databaseId = databaseId; - this.tables = ImmutableMap.copyOf( tables ); - } - - - public CatalogSchema addTable( CatalogTable catalogTable ) { - Map newTables = new HashMap<>( tables ); - newTables.put( catalogTable.id, catalogTable ); - return new CatalogSchema( id, name, databaseId, newTables ); - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java deleted file mode 100644 index 23f67e71b5..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/relational/RelationalCatalog.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.relational; - -import io.activej.serializer.BinarySerializer; -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import java.util.HashMap; -import java.util.Map; -import lombok.Getter; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.ModelCatalog; -import org.polypheny.db.catalog.SerializableCatalog; - -public class RelationalCatalog implements ModelCatalog, SerializableCatalog { - - @Getter - public final BinarySerializer serializer = SerializableCatalog.builder.get().build( RelationalCatalog.class ); - - @Serialize - public final Map schemas; - - @Serialize - public final Map tables; - - @Serialize - public final Map columns; - - private boolean openChanges = false; - - - public RelationalCatalog( - @Deserialize("schemas") Map schemas, - @Deserialize("tables") Map tables, - @Deserialize("columns") Map columns ) { - this.schemas = schemas; - this.tables = tables; - this.columns = columns; - } - - - public RelationalCatalog() { - this( new HashMap<>(), new HashMap<>(), new HashMap<>() ); - } - - - @Override - public void commit() { - - openChanges = false; - } - - - @Override - public void rollback() { - - openChanges = false; - } - - - @Override - public boolean hasUncommittedChanges() { - return openChanges; - } - - - public void addSchema( long id, String name, long databaseId, NamespaceType namespaceType ) { - schemas.put( id, new CatalogSchema( id, name, databaseId, tables ) ); - } - - - public void addTable( long id, String name, long namespaceId ) { - schemas.put( id, schemas.get( namespaceId ).addTable( new CatalogTable( id, name, namespaceId ) ) ); - } - - - public void addColumn( long id, String name, long entityId, AlgDataType type ) { - - } - - - public void addSubstitutionTable( long id, String name, long namespaceId, NamespaceType document ) { - } - -} From a6537ddebf32b1ce7565e34adba612671d45b6e3 Mon Sep 17 00:00:00 2001 From: datomo Date: Sat, 18 Feb 2023 23:51:54 +0100 Subject: [PATCH 011/436] added logical readers/peek --- .../org/polypheny/db/catalog/IdBuilder.java | 20 ++- .../org/polypheny/db/catalog/PolyCatalog.java | 10 +- ...alizableCatalog.java => Serializable.java} | 11 +- .../catalog/allocation/AllocationCatalog.java | 99 +++++++++++++++ .../allocation/HorizontalPartition.java | 40 ++++++ .../catalog/allocation/VerticalPartition.java | 40 ++++++ .../logical/document/CatalogCollection.java | 8 ++ .../logical/document/CatalogDatabase.java | 37 ++++++ .../logical/document/DocumentCatalog.java | 11 +- .../catalog/logical/graph/CatalogGraph.java | 14 +- .../catalog/logical/graph/GraphCatalog.java | 26 ++-- .../relational/CatalogSchema.java} | 27 ++-- .../logical/relational/RelationalCatalog.java | 12 +- .../polypheny/db/catalog/readers/Peek.java | 21 +++ .../readers/allocation/AllocationPeek.java | 23 ++++ .../readers/logical/LogicalDocumentPeek.java | 87 +++++++++++++ .../readers/logical/LogicalGraphPeek.java | 63 +++++++++ .../catalog/readers/logical/LogicalPeek.java | 24 ++++ .../logical/LogicalRelationalPeek.java | 120 ++++++++++++++++++ .../readers/physical/DocumentOnlyPeek.java | 21 +++ .../readers/physical/GraphOnlyPeek.java | 21 +++ .../readers/physical/PhysicalPeek.java | 23 ++++ .../readers/physical/RelationalOnlyPeek.java | 21 +++ 23 files changed, 716 insertions(+), 63 deletions(-) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{SerializableCatalog.java => Serializable.java} (80%) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/HorizontalPartition.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/VerticalPartition.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogDatabase.java rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{alocation/AllocationCatalog.java => logical/relational/CatalogSchema.java} (65%) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/Peek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/allocation/AllocationPeek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/DocumentOnlyPeek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/GraphOnlyPeek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/PhysicalPeek.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/RelationalOnlyPeek.java diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java index 5d4818bea3..5e4f764d7b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java @@ -27,19 +27,25 @@ public class IdBuilder { public final AtomicLong userId; + public final AtomicLong verticalId; + + public final AtomicLong horizontalId; + public IdBuilder() { - this( new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ) ); + this( new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ) ); } - public IdBuilder( AtomicLong databaseId, AtomicLong namespaceId, AtomicLong entityId, AtomicLong fieldId, AtomicLong userId ) { + public IdBuilder( AtomicLong databaseId, AtomicLong namespaceId, AtomicLong entityId, AtomicLong fieldId, AtomicLong userId, AtomicLong verticalId, AtomicLong horizontalId ) { this.databaseId = databaseId; this.namespaceId = namespaceId; this.entityId = entityId; this.fieldId = fieldId; this.userId = userId; + this.verticalId = verticalId; + this.horizontalId = horizontalId; } @@ -67,4 +73,14 @@ public long getNewUserId() { return userId.getAndIncrement(); } + + public long getNewVerticalId() { + return verticalId.getAndIncrement(); + } + + + public long getNewHorizontalId() { + return horizontalId.getAndIncrement(); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index d6b7ec1d5c..efbc0a6b8f 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -52,10 +52,10 @@ * Field -> Column (Relational), does not exist (Graph), Field (Document) */ @Slf4j -public class PolyCatalog implements SerializableCatalog, CatalogReader { +public class PolyCatalog implements Serializable, CatalogReader { @Getter - public final BinarySerializer serializer = SerializableCatalog.builder.get().build( PolyCatalog.class ); + public final BinarySerializer serializer = Serializable.builder.get().build( PolyCatalog.class ); @Serialize public final Map relationals; @@ -211,4 +211,10 @@ public Graph getGraph( String name ) { return null; } + + @Override + public PolyCatalog copy() { + return deserialize( serialize(), PolyCatalog.class ); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Serializable.java similarity index 80% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Serializable.java index d1ac380d58..d282ddee7b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/SerializableCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Serializable.java @@ -22,20 +22,23 @@ import java.util.function.Supplier; import org.polypheny.db.plugins.PolyPluginManager; -public interface SerializableCatalog { +public interface Serializable { Supplier builder = () -> SerializerBuilder.create( DefiningClassLoader.create( PolyPluginManager.getMainClassLoader() ) ); - BinarySerializer getSerializer(); + BinarySerializer getSerializer(); - default byte[] serialize( Class clazz ) { + default byte[] serialize() { byte[] buffer = new byte[1000]; getSerializer().encode( buffer, 0, this ); return buffer; } - default T deserialize( byte[] serialized, Class clazz ) { + default T deserialize( byte[] serialized, Class clazz ) { return clazz.cast( getSerializer().decode( serialized, 0 ) ); } + + Serializable copy(); + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java new file mode 100644 index 0000000000..445dfb6d4f --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java @@ -0,0 +1,99 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.allocation; + +import io.activej.serializer.BinarySerializer; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import lombok.Getter; +import org.polypheny.db.catalog.IdBuilder; +import org.polypheny.db.catalog.NCatalog; +import org.polypheny.db.catalog.Serializable; + +public class AllocationCatalog implements NCatalog, Serializable { + + + @Getter + public BinarySerializer serializer = Serializable.builder.get().build( AllocationCatalog.class ); + + + @Serialize + public final Map horizontals; // "rows" 1,1,1;2,2,2 -> 1,1,1 + 2,2,2 + + @Serialize + public final Map verticals; // "split-placements" a,b,c -> a,b + (a,)c + + + public final IdBuilder idBuilder = new IdBuilder(); + + + public AllocationCatalog() { + this( new HashMap<>(), new HashMap<>() ); + } + + + public AllocationCatalog( + @Deserialize("horizontals") Map horizontals, + @Deserialize("verticals") Map verticals ) { + this.horizontals = new ConcurrentHashMap<>( horizontals ); + this.verticals = new ConcurrentHashMap<>( verticals ); + } + + + @Override + public void commit() { + + } + + + @Override + public void rollback() { + + } + + + @Override + public boolean hasUncommittedChanges() { + return false; + } + + + @Override + public AllocationCatalog copy() { + return deserialize( serialize(), AllocationCatalog.class ); + } + + + public long addVerticalPlacement( long logicalId ) { + long id = idBuilder.getNewVerticalId(); + verticals.put( id, new VerticalPartition( id, logicalId ) ); + return id; + } + + + public long addHorizontalPlacement( long logicalId ) { + long id = idBuilder.getNewHorizontalId(); + + horizontals.put( id, new HorizontalPartition( id, logicalId ) ); + + return id; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/HorizontalPartition.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/HorizontalPartition.java new file mode 100644 index 0000000000..454e17fe71 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/HorizontalPartition.java @@ -0,0 +1,40 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.allocation; + +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import lombok.Value; + +@Value +public class HorizontalPartition { + + @Serialize + public long id; + + @Serialize + public long logicalId; + + + public HorizontalPartition( + @Deserialize("id") long id, + @Deserialize("logicalId") long logicalId ) { + this.id = id; + this.logicalId = logicalId; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/VerticalPartition.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/VerticalPartition.java new file mode 100644 index 0000000000..467b8a3488 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/VerticalPartition.java @@ -0,0 +1,40 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.allocation; + +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import lombok.Value; + +@Value +public class VerticalPartition { + + @Serialize + public long id; + + @Serialize + public long logicalId; + + + public VerticalPartition( + @Deserialize("id") long id, + @Deserialize("logicalId") long logicalId ) { + this.id = id; + this.logicalId = logicalId; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java index 60a5d202c9..b046be5b8c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java @@ -16,6 +16,14 @@ package org.polypheny.db.catalog.logical.document; +import lombok.AllArgsConstructor; +import lombok.Value; + +@Value +@AllArgsConstructor public class CatalogCollection { + public long id; + public String name; + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogDatabase.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogDatabase.java new file mode 100644 index 0000000000..3402e11703 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogDatabase.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logical.document; + +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import lombok.Value; + +@Value +public class CatalogDatabase { + + public String name; + public long id; + public ImmutableMap collections; + + + public CatalogDatabase( long id, String name, Map collections ) { + this.id = id; + this.name = name; + this.collections = ImmutableMap.copyOf( collections ); + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java index 65c3d19540..d678aa1fe4 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java @@ -25,13 +25,13 @@ import lombok.Value; import lombok.experimental.NonFinal; import org.polypheny.db.catalog.NCatalog; -import org.polypheny.db.catalog.SerializableCatalog; +import org.polypheny.db.catalog.Serializable; @Value -public class DocumentCatalog implements NCatalog, SerializableCatalog { +public class DocumentCatalog implements NCatalog, Serializable { @Getter - public BinarySerializer serializer = SerializableCatalog.builder.get().build( DocumentCatalog.class ); + public BinarySerializer serializer = Serializable.builder.get().build( DocumentCatalog.class ); @Serialize public Map collections; @@ -86,4 +86,9 @@ public void addCollection( long id, String name, long namespaceId ) { } + @Override + public DocumentCatalog copy() { + return deserialize( serialize(), DocumentCatalog.class ); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java index 456047bce3..516dec90e1 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java @@ -18,7 +18,6 @@ import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; -import org.polypheny.db.catalog.Catalog.NamespaceType; public class CatalogGraph { @@ -28,22 +27,11 @@ public class CatalogGraph { @Serialize public final String name; - @Serialize - public final long databaseId; - - @Serialize - public final NamespaceType namespaceType; - - public CatalogGraph( @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("databaseId") long databaseId, - @Deserialize("namespaceType") NamespaceType namespaceType ) { + @Deserialize("name") String name ) { this.id = id; this.name = name; - this.databaseId = databaseId; - this.namespaceType = namespaceType; } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java index 84200c7979..29e34ead93 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java @@ -19,19 +19,17 @@ import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; import org.polypheny.db.catalog.NCatalog; -import org.polypheny.db.catalog.SerializableCatalog; +import org.polypheny.db.catalog.Serializable; @Value -public class GraphCatalog implements NCatalog, SerializableCatalog { +public class GraphCatalog implements NCatalog, Serializable { @Getter - public BinarySerializer serializer = SerializableCatalog.builder.get().build( GraphCatalog.class ); + public BinarySerializer serializer = Serializable.builder.get().build( GraphCatalog.class ); @Serialize public long id; @@ -39,23 +37,13 @@ public class GraphCatalog implements NCatalog, SerializableCatalog { @Serialize public String name; - @Serialize - public Map graphs; - @NonFinal boolean openChanges = false; - public GraphCatalog( long id, String name ) { - this( id, name, new ConcurrentHashMap<>() ); - } - - public GraphCatalog( @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("graphs") Map graphs ) { - this.graphs = graphs; + @Deserialize("name") String name ) { this.id = id; this.name = name; @@ -64,7 +52,6 @@ public GraphCatalog( @Override public void commit() { - openChanges = false; } @@ -82,4 +69,9 @@ public boolean hasUncommittedChanges() { } + @Override + public GraphCatalog copy() { + return deserialize( serialize(), GraphCatalog.class ); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/alocation/AllocationCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogSchema.java similarity index 65% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/alocation/AllocationCatalog.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogSchema.java index 66bc15414b..16d51b9043 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/alocation/AllocationCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogSchema.java @@ -14,27 +14,16 @@ * limitations under the License. */ -package org.polypheny.db.catalog.alocation; +package org.polypheny.db.catalog.logical.relational; -import org.polypheny.db.catalog.NCatalog; +import lombok.AllArgsConstructor; +import lombok.Value; -public class AllocationCatalog implements NCatalog { +@AllArgsConstructor +@Value +public class CatalogSchema { - @Override - public void commit() { - - } - - - @Override - public void rollback() { - - } - - - @Override - public boolean hasUncommittedChanges() { - return false; - } + public long id; + public String name; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java index 6ce61682ce..e1179b79ae 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java @@ -25,13 +25,13 @@ import lombok.Value; import lombok.experimental.NonFinal; import org.polypheny.db.catalog.NCatalog; -import org.polypheny.db.catalog.SerializableCatalog; +import org.polypheny.db.catalog.Serializable; @Value -public class RelationalCatalog implements NCatalog, SerializableCatalog { +public class RelationalCatalog implements NCatalog, Serializable { @Getter - public BinarySerializer serializer = SerializableCatalog.builder.get().build( RelationalCatalog.class ); + public BinarySerializer serializer = Serializable.builder.get().build( RelationalCatalog.class ); @Serialize public Map tables; @@ -104,4 +104,10 @@ public void deleteColumn( long id, long entityId ) { change(); } + + @Override + public RelationalCatalog copy() { + return deserialize( serialize(), RelationalCatalog.class ); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/Peek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/Peek.java new file mode 100644 index 0000000000..35e2c90463 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/Peek.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers; + +public interface Peek { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/allocation/AllocationPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/allocation/AllocationPeek.java new file mode 100644 index 0000000000..b6cdca4c43 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/allocation/AllocationPeek.java @@ -0,0 +1,23 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.allocation; + +import org.polypheny.db.catalog.readers.Peek; + +public class AllocationPeek implements Peek { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java new file mode 100644 index 0000000000..d201028f5a --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java @@ -0,0 +1,87 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.logical; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Value; +import org.polypheny.db.catalog.logical.document.CatalogCollection; +import org.polypheny.db.catalog.logical.document.CatalogDatabase; +import org.polypheny.db.catalog.logical.document.DocumentCatalog; + +@Value +public class LogicalDocumentPeek implements LogicalPeek { + + ImmutableList catalogs; + public ImmutableList databases; + public ImmutableMap databaseIds; + public ImmutableMap databaseNames; + public ImmutableList collections; + public ImmutableMap collectionIds; + public ImmutableMap collectionNames; + + + public LogicalDocumentPeek( final List catalogs ) { + this.catalogs = ImmutableList.copyOf( catalogs.stream().map( DocumentCatalog::copy ).collect( Collectors.toList() ) ); + + this.databases = ImmutableList.copyOf( buildDatabases() ); + this.databaseIds = ImmutableMap.copyOf( buildDatabaseIds() ); + this.databaseNames = ImmutableMap.copyOf( buildDatabaseNames() ); + + this.collections = ImmutableList.copyOf( buildCollections() ); + this.collectionIds = ImmutableMap.copyOf( buildCollectionIds() ); + this.collectionNames = ImmutableMap.copyOf( buildCollectionNames() ); + } + + + private Map buildCollectionNames() { + return this.collections.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); + } + + + private Map buildCollectionIds() { + return this.collections.stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); + } + + + private List buildCollections() { + return this.databases.stream().flatMap( d -> d.collections.values().stream() ).collect( Collectors.toList() ); + } + + /////////////////////////// + ///// Database //////////// + /////////////////////////// + + + private Map buildDatabaseNames() { + return this.databases.stream().collect( Collectors.toMap( d -> d.name, d -> d ) ); + } + + + private Map buildDatabaseIds() { + return this.databases.stream().collect( Collectors.toMap( d -> d.id, d -> d ) ); + } + + + private List buildDatabases() { + return catalogs.stream().map( c -> new CatalogDatabase( c.id, c.name, c.collections ) ).collect( Collectors.toList() ); + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java new file mode 100644 index 0000000000..23f0da21b1 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java @@ -0,0 +1,63 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.logical; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Value; +import org.polypheny.db.catalog.logical.graph.CatalogGraph; +import org.polypheny.db.catalog.logical.graph.GraphCatalog; + +@Value +public class LogicalGraphPeek implements LogicalPeek { + + ImmutableList catalogs; + public ImmutableList graphs; + + public ImmutableMap graphIds; + public ImmutableMap graphNames; + + + public LogicalGraphPeek( final List catalogs ) { + this.catalogs = ImmutableList.copyOf( catalogs.stream().map( GraphCatalog::copy ).collect( Collectors.toList() ) ); + + this.graphs = ImmutableList.copyOf( buildGraphs() ); + this.graphIds = ImmutableMap.copyOf( buildGraphIds() ); + this.graphNames = ImmutableMap.copyOf( buildGraphNames() ); + + } + + + private List buildGraphs() { + return catalogs.stream().map( c -> new CatalogGraph( c.id, c.name ) ).collect( Collectors.toList() ); + } + + + private Map buildGraphIds() { + return graphs.stream().collect( Collectors.toMap( g -> g.id, g -> g ) ); + } + + + private Map buildGraphNames() { + return graphs.stream().collect( Collectors.toMap( g -> g.name, g -> g ) ); + } + + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java new file mode 100644 index 0000000000..df16eb0150 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java @@ -0,0 +1,24 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.logical; + +import org.polypheny.db.catalog.readers.Peek; + +public interface LogicalPeek extends Peek { + + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java new file mode 100644 index 0000000000..4eb96c0ca6 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java @@ -0,0 +1,120 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.logical; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Value; +import org.polypheny.db.catalog.logical.relational.CatalogColumn; +import org.polypheny.db.catalog.logical.relational.CatalogSchema; +import org.polypheny.db.catalog.logical.relational.CatalogTable; +import org.polypheny.db.catalog.logical.relational.RelationalCatalog; +import org.polypheny.db.catalog.readers.physical.PhysicalPeek; + +@Value +public class LogicalRelationalPeek implements PhysicalPeek { + + ImmutableList catalogs; + + public ImmutableList schemas; + public ImmutableMap schemaIds; + public ImmutableMap schemaNames; + public ImmutableList tables; + public ImmutableMap tableIds; + public ImmutableMap tableNames; + public ImmutableList columns; + public ImmutableMap columnIds; + public ImmutableMap columnNames; + + + public LogicalRelationalPeek( List catalogs ) { + this.catalogs = ImmutableList.copyOf( catalogs.stream().map( RelationalCatalog::copy ).collect( Collectors.toList() ) ); + + this.schemas = ImmutableList.copyOf( buildSchemas() ); + this.schemaIds = ImmutableMap.copyOf( buildSchemaIds() ); + this.schemaNames = ImmutableMap.copyOf( buildSchemaNames() ); + + this.tables = ImmutableList.copyOf( buildTables() ); + this.tableIds = ImmutableMap.copyOf( buildTableIds() ); + this.tableNames = ImmutableMap.copyOf( buildTableNames() ); + + this.columns = ImmutableList.copyOf( buildColumns() ); + this.columnIds = ImmutableMap.copyOf( buildColumnIds() ); + this.columnNames = ImmutableMap.copyOf( buildColumnNames() ); + } + + /////////////////////////// + ///// Columns ///////////// + /////////////////////////// + + + private List buildColumns() { + return tables.stream().flatMap( t -> t.columns.values().stream() ).collect( Collectors.toList() ); + } + + + private Map buildColumnIds() { + return columns.stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); + } + + + private Map buildColumnNames() { + return columns.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); + } + + /////////////////////////// + ///// Tables ////////////// + /////////////////////////// + + + private List buildTables() { + return catalogs.stream().flatMap( c -> c.tables.values().stream() ).collect( Collectors.toList() ); + } + + + private Map buildTableIds() { + return tables.stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); + } + + + private Map buildTableNames() { + return tables.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); + } + + /////////////////////////// + ///// Schema ////////////// + /////////////////////////// + + + private List buildSchemas() { + return catalogs.stream().map( c -> new CatalogSchema( c.id, c.name ) ).collect( Collectors.toList() ); + } + + + private Map buildSchemaIds() { + return schemas.stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); + } + + + private Map buildSchemaNames() { + return schemas.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/DocumentOnlyPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/DocumentOnlyPeek.java new file mode 100644 index 0000000000..0d7463e078 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/DocumentOnlyPeek.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.physical; + +public class DocumentOnlyPeek implements PhysicalPeek { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/GraphOnlyPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/GraphOnlyPeek.java new file mode 100644 index 0000000000..d6ead0bdad --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/GraphOnlyPeek.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.physical; + +public class GraphOnlyPeek implements PhysicalPeek { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/PhysicalPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/PhysicalPeek.java new file mode 100644 index 0000000000..42ea61be12 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/PhysicalPeek.java @@ -0,0 +1,23 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.physical; + +import org.polypheny.db.catalog.readers.Peek; + +public interface PhysicalPeek extends Peek { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/RelationalOnlyPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/RelationalOnlyPeek.java new file mode 100644 index 0000000000..f69047ee52 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/RelationalOnlyPeek.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.physical; + +public class RelationalOnlyPeek implements PhysicalPeek { + +} From a8709bf2ffe99a4d7e4e7c4da55a4d7b8db2f851 Mon Sep 17 00:00:00 2001 From: datomo Date: Mon, 20 Feb 2023 00:08:30 +0100 Subject: [PATCH 012/436] added prototype for logicalPeek --- .../polypheny/db/catalog/CatalogPlugin.java | 5 +- .../org/polypheny/db/catalog/NCatalog.java | 4 + .../org/polypheny/db/catalog/PolyCatalog.java | 40 +++++---- .../catalog/allocation/AllocationCatalog.java | 7 ++ .../logical/document/CatalogCollection.java | 15 +++- .../logical/document/DocumentCatalog.java | 7 ++ .../catalog/logical/graph/GraphCatalog.java | 7 ++ .../logical/relational/RelationalCatalog.java | 7 ++ .../readers/logical/LogicalDocumentPeek.java | 7 ++ .../readers/logical/LogicalFullPeek.java | 81 +++++++++++++++++++ .../readers/logical/LogicalGraphPeek.java | 6 ++ .../catalog/readers/logical/LogicalPeek.java | 3 + .../logical/LogicalRelationalPeek.java | 10 ++- 13 files changed, 172 insertions(+), 27 deletions(-) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalFullPeek.java diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java index 235f6dd8ac..5df57311cb 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java @@ -37,19 +37,20 @@ public CatalogPlugin( PluginWrapper wrapper ) { @Override public void start() { PolyCatalog catalog = new PolyCatalog(); - long user = catalog.addUser( "admin" ); catalog.addNamespace( "test", NamespaceType.RELATIONAL ); long namespaceId = catalog.addNamespace( "test2", NamespaceType.RELATIONAL ); long tableId = catalog.addTable( "testTable", namespaceId ); catalog.addColumn( "testColumn", namespaceId, tableId, null ); + catalog.commit(); - byte[] buffer = catalog.serialize( PolyCatalog.class ); + byte[] buffer = catalog.serialize(); PolyCatalog catalog1 = catalog.deserialize( buffer, PolyCatalog.class ); catalog1.addColumn( "testColumn2", namespaceId, tableId, null ); + catalog1.rollback(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java index 30b57ae1ed..d33d670973 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java @@ -16,10 +16,12 @@ package org.polypheny.db.catalog; +import io.activej.serializer.annotations.SerializeClass; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; +@SerializeClass(subclasses = { GraphCatalog.class, RelationalCatalog.class, DocumentCatalog.class }) // required for deserialization public interface NCatalog { void commit(); @@ -28,6 +30,8 @@ public interface NCatalog { boolean hasUncommittedChanges(); + Catalog.NamespaceType getType(); + default RelationalCatalog asRelational() { return unwrap( RelationalCatalog.class ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index efbc0a6b8f..926d40b810 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -30,10 +30,10 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entities.CatalogUser; -import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; +import org.polypheny.db.catalog.readers.logical.LogicalFullPeek; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptTable; @@ -58,40 +58,38 @@ public class PolyCatalog implements Serializable, CatalogReader { public final BinarySerializer serializer = Serializable.builder.get().build( PolyCatalog.class ); @Serialize - public final Map relationals; - - @Serialize - public final Map documents; - - @Serialize - public final Map graphs; + public final Map catalogs; @Serialize public final Map users; private final IdBuilder idBuilder = new IdBuilder(); + private LogicalFullPeek logicalPeek; public PolyCatalog() { - this( new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); + this( new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); } public PolyCatalog( @Deserialize("users") Map users, - @Deserialize("relationals") Map relationals, - @Deserialize("documents") Map documents, - @Deserialize("graphs") Map graphs ) { + @Deserialize("catalogs") Map catalogs ) { this.users = users; - this.relationals = relationals; - this.documents = documents; - this.graphs = graphs; + this.catalogs = catalogs; + updatePeeks(); + } + + + private void updatePeeks() { + this.logicalPeek = new LogicalFullPeek( catalogs ); } - public void commit() throws NoTablePrimaryKeyException { + public void commit() { log.debug( "commit" ); + updatePeeks(); } @@ -114,13 +112,13 @@ public long addNamespace( String name, NamespaceType namespaceType ) { switch ( namespaceType ) { case RELATIONAL: - relationals.put( id, new RelationalCatalog( id, name ) ); + catalogs.put( id, new RelationalCatalog( id, name ) ); break; case DOCUMENT: - documents.put( id, new DocumentCatalog( id, name ) ); + catalogs.put( id, new DocumentCatalog( id, name ) ); break; case GRAPH: - graphs.put( id, new GraphCatalog( id, name ) ); + catalogs.put( id, new GraphCatalog( id, name ) ); break; } @@ -131,7 +129,7 @@ public long addNamespace( String name, NamespaceType namespaceType ) { public long addTable( String name, long namespaceId ) { long id = idBuilder.getNewEntityId(); - relationals.get( namespaceId ).asRelational().addTable( id, name ); + catalogs.get( namespaceId ).asRelational().addTable( id, name ); return id; } @@ -140,7 +138,7 @@ public long addTable( String name, long namespaceId ) { public long addColumn( String name, long namespaceId, long entityId, AlgDataType type ) { long id = idBuilder.getNewFieldId(); - relationals.get( namespaceId ).asRelational().addColumn( id, name, entityId ); + catalogs.get( namespaceId ).asRelational().addColumn( id, name, entityId ); return id; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java index 445dfb6d4f..4511d95914 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java @@ -23,6 +23,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; @@ -75,6 +76,12 @@ public boolean hasUncommittedChanges() { } + @Override + public NamespaceType getType() { + return null; + } + + @Override public AllocationCatalog copy() { return deserialize( serialize(), AllocationCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java index b046be5b8c..0b057f53de 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java @@ -16,14 +16,25 @@ package org.polypheny.db.catalog.logical.document; -import lombok.AllArgsConstructor; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import lombok.Value; @Value -@AllArgsConstructor public class CatalogCollection { + @Serialize public long id; + + @Serialize public String name; + + public CatalogCollection( + @Deserialize("id") long id, + @Deserialize("name") String name ) { + this.id = id; + this.name = name; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java index d678aa1fe4..67f90735d7 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java @@ -24,6 +24,7 @@ import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; @@ -81,6 +82,12 @@ public boolean hasUncommittedChanges() { } + @Override + public NamespaceType getType() { + return NamespaceType.DOCUMENT; + } + + public void addCollection( long id, String name, long namespaceId ) { } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java index 29e34ead93..5c8926e3b6 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java @@ -22,6 +22,7 @@ import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; @@ -69,6 +70,12 @@ public boolean hasUncommittedChanges() { } + @Override + public NamespaceType getType() { + return NamespaceType.GRAPH; + } + + @Override public GraphCatalog copy() { return deserialize( serialize(), GraphCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java index e1179b79ae..04627ec839 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java @@ -24,6 +24,7 @@ import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; @@ -87,6 +88,12 @@ public boolean hasUncommittedChanges() { } + @Override + public NamespaceType getType() { + return NamespaceType.RELATIONAL; + } + + public void addTable( long id, String name ) { tables.put( id, new CatalogTable( id, name, this.id ) ); change(); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java index d201028f5a..6c93cec326 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java @@ -22,6 +22,7 @@ import java.util.Map; import java.util.stream.Collectors; import lombok.Value; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.logical.document.CatalogCollection; import org.polypheny.db.catalog.logical.document.CatalogDatabase; import org.polypheny.db.catalog.logical.document.DocumentCatalog; @@ -84,4 +85,10 @@ private List buildDatabases() { return catalogs.stream().map( c -> new CatalogDatabase( c.id, c.name, c.collections ) ).collect( Collectors.toList() ); } + + @Override + public NamespaceType getType() { + return NamespaceType.DOCUMENT; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalFullPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalFullPeek.java new file mode 100644 index 0000000000..a84f26a666 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalFullPeek.java @@ -0,0 +1,81 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.readers.logical; + +import com.google.common.collect.ImmutableMap; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.NCatalog; +import org.polypheny.db.catalog.logical.document.DocumentCatalog; +import org.polypheny.db.catalog.logical.graph.GraphCatalog; +import org.polypheny.db.catalog.logical.relational.RelationalCatalog; + +public class LogicalFullPeek implements LogicalPeek { + + private final ImmutableMap catalogs; + private final LogicalRelationalPeek relationalPeek; + private final LogicalGraphPeek graphPeek; + private final LogicalDocumentPeek documentPeek; + private final ImmutableMap ids; + private final ImmutableMap names; + + + public LogicalFullPeek( Map catalogs ) { + this.catalogs = ImmutableMap.copyOf( catalogs ); + + List relational = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.RELATIONAL ).map( NCatalog::asRelational ).collect( Collectors.toList() ); + List graph = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.GRAPH ).map( NCatalog::asGraph ).collect( Collectors.toList() ); + List document = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.DOCUMENT ).map( NCatalog::asDocument ).collect( Collectors.toList() ); + + this.relationalPeek = new LogicalRelationalPeek( relational ); + this.graphPeek = new LogicalGraphPeek( graph ); + this.documentPeek = new LogicalDocumentPeek( document ); + + this.ids = buildIds(); + this.names = buildNames(); + } + + + private ImmutableMap buildIds() { + Map ids = new HashMap<>(); + this.relationalPeek.schemaIds.keySet().forEach( id -> ids.put( id, relationalPeek ) ); + this.graphPeek.graphIds.keySet().forEach( id -> ids.put( id, graphPeek ) ); + this.documentPeek.databaseIds.keySet().forEach( id -> ids.put( id, documentPeek ) ); + + return ImmutableMap.copyOf( ids ); + } + + + private ImmutableMap buildNames() { + Map names = new HashMap<>(); + this.relationalPeek.schemaNames.keySet().forEach( name -> names.put( name, relationalPeek ) ); + this.graphPeek.graphNames.keySet().forEach( name -> names.put( name, graphPeek ) ); + this.documentPeek.databaseNames.keySet().forEach( name -> names.put( name, documentPeek ) ); + + return ImmutableMap.copyOf( names ); + } + + + @Override + public NamespaceType getType() { + return null; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java index 23f0da21b1..cc8298bc8e 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java @@ -22,6 +22,7 @@ import java.util.Map; import java.util.stream.Collectors; import lombok.Value; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.logical.graph.CatalogGraph; import org.polypheny.db.catalog.logical.graph.GraphCatalog; @@ -60,4 +61,9 @@ private Map buildGraphNames() { } + @Override + public NamespaceType getType() { + return NamespaceType.GRAPH; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java index df16eb0150..7d4eb20b79 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java @@ -16,9 +16,12 @@ package org.polypheny.db.catalog.readers.logical; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.readers.Peek; public interface LogicalPeek extends Peek { + NamespaceType getType(); + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java index 4eb96c0ca6..a7f0a0cc80 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java @@ -22,14 +22,14 @@ import java.util.Map; import java.util.stream.Collectors; import lombok.Value; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.logical.relational.CatalogColumn; import org.polypheny.db.catalog.logical.relational.CatalogSchema; import org.polypheny.db.catalog.logical.relational.CatalogTable; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; -import org.polypheny.db.catalog.readers.physical.PhysicalPeek; @Value -public class LogicalRelationalPeek implements PhysicalPeek { +public class LogicalRelationalPeek implements LogicalPeek { ImmutableList catalogs; @@ -117,4 +117,10 @@ private Map buildSchemaNames() { return schemas.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); } + + @Override + public NamespaceType getType() { + return NamespaceType.RELATIONAL; + } + } From 3ec508c586042ceb28c1a1859704582e263cb3df Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 21 Feb 2023 00:32:26 +0100 Subject: [PATCH 013/436] adding catalog table to AlgOptTable --- .../db/algebra/stream/StreamRules.java | 7 +- .../db/plan/AlgOptAbstractTable.java | 2 +- .../org/polypheny/db/plan/AlgOptTable.java | 5 +- .../polypheny/db/prepare/AlgOptTableImpl.java | 224 ++---------------- .../db/prepare/PolyphenyDbCatalogReader.java | 9 +- .../db/prepare/QueryableAlgBuilder.java | 4 + .../db/catalog/MockCatalogReader.java | 9 +- .../languages/mql2alg/MqlToAlgConverter.java | 3 + .../PhysicalPeek.java => Expressible.java} | 8 +- .../org/polypheny/db/catalog/PolyCatalog.java | 12 +- .../Peek.java => snapshot/Snapshot.java} | 4 +- .../allocation/AllocationSnapshot.java | 23 ++ .../logical/LogicalDocumentSnapshot.java} | 6 +- .../logical/LogicalFullSnapshot.java} | 30 +-- .../logical/LogicalGraphSnapshot.java} | 6 +- .../logical/LogicalRelationalSnapshot.java} | 6 +- .../logical/LogicalSnapshot.java} | 6 +- .../physical/DocumentOnlySnapshot.java} | 4 +- .../physical/GraphOnlySnapshot.java} | 4 +- .../physical/PhysicalSnapshot.java} | 6 +- .../physical/RelationalOnlySnapshot.java} | 4 +- .../db/sql/language/validate/EmptyScope.java | 5 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 5 +- .../db/sql/language/SqlToAlgTestBase.java | 4 +- .../db/sql/volcano/TraitPropagationTest.java | 7 + 25 files changed, 145 insertions(+), 258 deletions(-) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/physical/PhysicalPeek.java => Expressible.java} (80%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/Peek.java => snapshot/Snapshot.java} (89%) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/allocation/AllocationSnapshot.java rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/logical/LogicalDocumentPeek.java => snapshot/logical/LogicalDocumentSnapshot.java} (94%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/logical/LogicalFullPeek.java => snapshot/logical/LogicalFullSnapshot.java} (73%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/logical/LogicalGraphPeek.java => snapshot/logical/LogicalGraphSnapshot.java} (91%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/logical/LogicalRelationalPeek.java => snapshot/logical/LogicalRelationalSnapshot.java} (95%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/logical/LogicalPeek.java => snapshot/logical/LogicalSnapshot.java} (81%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/physical/GraphOnlyPeek.java => snapshot/physical/DocumentOnlySnapshot.java} (84%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/physical/RelationalOnlyPeek.java => snapshot/physical/GraphOnlySnapshot.java} (84%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/allocation/AllocationPeek.java => snapshot/physical/PhysicalSnapshot.java} (79%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{readers/physical/DocumentOnlyPeek.java => snapshot/physical/RelationalOnlySnapshot.java} (83%) diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java index ffd1fc99be..3ccbfd4b39 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java @@ -54,6 +54,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; @@ -271,9 +273,12 @@ public void onMatch( AlgOptRuleCall call ) { final StreamableTable streamableTable = algOptTable.unwrap( StreamableTable.class ); if ( streamableTable != null ) { final Table table1 = streamableTable.stream(); + final CatalogTable catalogTable = Catalog.getInstance().getTable( table1.getTableId() ); final AlgOptTable algOptTable2 = AlgOptTableImpl.create( algOptTable.getRelOptSchema(), - algOptTable.getRowType(), table1, + algOptTable.getRowType(), + table1, + catalogTable, ImmutableList.builder() .addAll( algOptTable.getQualifiedName() ) .add( "(STREAM)" ).build() ); diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractTable.java b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractTable.java index 1dfb2ae326..66bb64e1cb 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractTable.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractTable.java @@ -139,7 +139,7 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { @Override - public Expression getExpression( Class clazz ) { + public Expression getExpression( Class clazz ) { throw new UnsupportedOperationException(); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptTable.java b/core/src/main/java/org/polypheny/db/plan/AlgOptTable.java index 1fec3e212f..5b981cdacf 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptTable.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptTable.java @@ -44,6 +44,7 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.Table; import org.polypheny.db.schema.Wrapper; @@ -62,6 +63,8 @@ public interface AlgOptTable extends Wrapper { */ List getQualifiedName(); + CatalogTable getCatalogTable(); + /** * Returns an estimate of the number of rows in the table. */ @@ -119,7 +122,7 @@ public interface AlgOptTable extends Wrapper { * * @param clazz The desired collection class; for example {@code Queryable}. */ - Expression getExpression( Class clazz ); + Expression getExpression( Class clazz ); /** * Returns a table with the given extra fields. diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java index 2540e7fe29..29e5e446cf 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java @@ -36,11 +36,10 @@ import com.google.common.collect.ImmutableList; import java.util.AbstractList; -import java.util.Collection; import java.util.List; import java.util.Objects; -import java.util.Set; import java.util.function.Function; +import javax.annotation.Nullable; import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.enumerable.EnumerableScan; @@ -57,9 +56,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeFactoryImpl; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgOptTable; @@ -68,14 +66,11 @@ import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.FilterableTable; import org.polypheny.db.schema.ModifiableTable; -import org.polypheny.db.schema.Path; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.ProjectableFilterableTable; import org.polypheny.db.schema.QueryableTable; import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.Schema; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.StreamableTable; import org.polypheny.db.schema.Table; @@ -85,7 +80,6 @@ import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.InitializerExpressionFactory; import org.polypheny.db.util.NullInitializerExpressionFactory; -import org.polypheny.db.util.Pair; import org.polypheny.db.util.Util; @@ -97,8 +91,14 @@ public class AlgOptTableImpl extends Prepare.AbstractPreparingTable { private final transient AlgOptSchema schema; private final AlgDataType rowType; @Getter + @Nullable private final Table table; - private final transient Function expressionFunction; + + @Getter + @Nullable + private final CatalogTable catalogTable; + @Nullable + private final transient Function, Expression> expressionFunction; private final ImmutableList names; /** @@ -114,35 +114,25 @@ private AlgOptTableImpl( AlgDataType rowType, List names, Table table, - Function expressionFunction, + CatalogTable catalogTable, + Function, Expression> expressionFunction, Double rowCount ) { this.schema = schema; this.rowType = Objects.requireNonNull( rowType ); this.names = ImmutableList.copyOf( names ); this.table = table; // may be null + this.catalogTable = catalogTable; this.expressionFunction = expressionFunction; // may be null this.rowCount = rowCount; // may be null } public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, List names, Expression expression ) { - return new AlgOptTableImpl( schema, rowType, names, null, c -> expression, null ); - } - - - public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, Table table, Path path ) { - final SchemaPlus schemaPlus = MySchemaPlus.create( path ); - return new AlgOptTableImpl( - schema, - rowType, - Pair.left( path ), - table, - getClassExpressionFunction( schemaPlus, Util.last( path ).left, table ), - table.getStatistic().getRowCount() ); + return new AlgOptTableImpl( schema, rowType, names, null, null, c -> expression, null ); } - public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, final PolyphenyDbSchema.TableEntry tableEntry, Double count ) { + public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, final PolyphenyDbSchema.TableEntry tableEntry, CatalogTable catalogTable, Double count ) { final Table table = tableEntry.getTable(); Double rowCount; if ( count == null ) { @@ -151,7 +141,7 @@ public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, rowCount = count; } - return new AlgOptTableImpl( schema, rowType, tableEntry.path(), table, getClassExpressionFunction( tableEntry, table ), rowCount ); + return new AlgOptTableImpl( schema, rowType, tableEntry.path(), table, catalogTable, getClassExpressionFunction( tableEntry, table ), rowCount ); } @@ -159,16 +149,16 @@ public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, * Creates a copy of this RelOptTable. The new RelOptTable will have newRowType. */ public AlgOptTableImpl copy( AlgDataType newRowType ) { - return new AlgOptTableImpl( this.schema, newRowType, this.names, this.table, this.expressionFunction, this.rowCount ); + return new AlgOptTableImpl( this.schema, newRowType, this.names, this.table, this.catalogTable, this.expressionFunction, this.rowCount ); } - private static Function getClassExpressionFunction( PolyphenyDbSchema.TableEntry tableEntry, Table table ) { + private static Function, Expression> getClassExpressionFunction( PolyphenyDbSchema.TableEntry tableEntry, Table table ) { return getClassExpressionFunction( tableEntry.schema.plus(), tableEntry.name, table ); } - private static Function getClassExpressionFunction( final SchemaPlus schema, final String tableName, final Table table ) { + private static Function, Expression> getClassExpressionFunction( final SchemaPlus schema, final String tableName, final Table table ) { if ( table instanceof QueryableTable ) { final QueryableTable queryableTable = (QueryableTable) table; return clazz -> queryableTable.getExpression( schema, tableName, clazz ); @@ -186,11 +176,11 @@ private static Function getClassExpressionFunction( final Sch } - public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, Table table, ImmutableList names ) { + public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, Table table, CatalogTable catalogTable, ImmutableList names ) { assert table instanceof TranslatableTable || table instanceof ScannableTable || table instanceof ModifiableTable; - return new AlgOptTableImpl( schema, rowType, names, table, null, null ); + return new AlgOptTableImpl( schema, rowType, names, table, catalogTable, null, null ); } @@ -216,7 +206,7 @@ public T unwrap( Class clazz ) { @Override - public Expression getExpression( Class clazz ) { + public Expression getExpression( Class clazz ) { if ( expressionFunction == null ) { return null; } @@ -232,6 +222,7 @@ protected AlgOptTable extend( Table extendedTable ) { extendedRowType, getQualifiedName(), extendedTable, + null, expressionFunction, getRowCount() ); } @@ -292,7 +283,7 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { } } final AlgOptTable algOptTable = - new AlgOptTableImpl( this.schema, b.build(), this.names, this.table, this.expressionFunction, this.rowCount ) { + new AlgOptTableImpl( this.schema, b.build(), this.names, this.table, this.catalogTable, this.expressionFunction, this.rowCount ) { @Override public T unwrap( Class clazz ) { if ( clazz.isAssignableFrom( InitializerExpressionFactory.class ) ) { @@ -412,7 +403,7 @@ public static List columnStrategies( final AlgOptTable table ) { final InitializerExpressionFactory ief = Util.first( table.unwrap( InitializerExpressionFactory.class ), NullInitializerExpressionFactory.INSTANCE ); - return new AbstractList() { + return new AbstractList<>() { @Override public int size() { return fieldCount; @@ -463,172 +454,5 @@ public static AlgDataType realRowType( AlgOptTable table ) { return builder.build(); } - - /** - * Implementation of {@link SchemaPlus} that wraps a regular schema and knows its name and parent. - * - * It is read-only, and functionality is limited in other ways, it but allows table expressions to be generated. - */ - private static class MySchemaPlus implements SchemaPlus { - - private final SchemaPlus parent; - private final String name; - private final Schema schema; - - - MySchemaPlus( SchemaPlus parent, String name, Schema schema ) { - this.parent = parent; - this.name = name; - this.schema = schema; - } - - - public static MySchemaPlus create( Path path ) { - final Pair pair = Util.last( path ); - final SchemaPlus parent; - if ( path.size() == 1 ) { - parent = null; - } else { - parent = create( path.parent() ); - } - return new MySchemaPlus( parent, pair.left, pair.right ); - } - - - @Override - public PolyphenyDbSchema polyphenyDbSchema() { - return null; - } - - - @Override - public SchemaPlus getParentSchema() { - return parent; - } - - - @Override - public String getName() { - return name; - } - - - @Override - public SchemaPlus getSubSchema( String name ) { - final Schema subSchema = schema.getSubSchema( name ); - return subSchema == null ? null : new MySchemaPlus( this, name, subSchema ); - } - - - @Override - public SchemaPlus add( String name, Schema schema, NamespaceType namespaceType ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void add( String name, Table table ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void add( String name, org.polypheny.db.schema.Function function ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void add( String name, AlgProtoDataType type ) { - throw new UnsupportedOperationException(); - } - - - @Override - public boolean isMutable() { - return schema.isMutable(); - } - - - @Override - public T unwrap( Class clazz ) { - return null; - } - - - @Override - public void setPath( ImmutableList> path ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void setCacheEnabled( boolean cache ) { - throw new UnsupportedOperationException(); - } - - - @Override - public boolean isCacheEnabled() { - return false; - } - - - @Override - public Table getTable( String name ) { - return schema.getTable( name ); - } - - - @Override - public Set getTableNames() { - return schema.getTableNames(); - } - - - @Override - public AlgProtoDataType getType( String name ) { - return schema.getType( name ); - } - - - @Override - public Set getTypeNames() { - return schema.getTypeNames(); - } - - - @Override - public Collection - getFunctions( String name ) { - return schema.getFunctions( name ); - } - - - @Override - public Set getFunctionNames() { - return schema.getFunctionNames(); - } - - - @Override - public Set getSubSchemaNames() { - return schema.getSubSchemaNames(); - } - - - @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { - return schema.getExpression( parentSchema, name ); - } - - - @Override - public Schema snapshot( SchemaVersion version ) { - throw new UnsupportedOperationException(); - } - - } - } diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index a011c20945..1ea77c9a7d 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -45,6 +45,8 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptTable; @@ -75,20 +77,20 @@ public PolyphenyDbCatalogReader( PolyphenyDbSchema rootSchema, List defa } - @Override public Prepare.PreparingTable getTable( final List names ) { // First look in the default schema, if any. If not found, look in the root schema. PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); if ( entry != null ) { final Table table = entry.getTable(); + CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); if ( table instanceof Wrapper ) { final Prepare.PreparingTable algOptTable = ((Wrapper) table).unwrap( Prepare.PreparingTable.class ); if ( algOptTable != null ) { return algOptTable; } } - return AlgOptTableImpl.create( this, table.getRowType( typeFactory ), entry, null ); + return AlgOptTableImpl.create( this, table.getRowType( typeFactory ), entry, catalogTable, null ); } return null; } @@ -100,7 +102,8 @@ public AlgOptTable getCollection( final List names ) { PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); if ( entry != null ) { final Table table = entry.getTable(); - return AlgOptTableImpl.create( this, table.getRowType( typeFactory ), entry, null ); + CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); + return AlgOptTableImpl.create( this, table.getRowType( typeFactory ), entry, catalogTable, null ); } return null; } diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index 5b02bc040f..126a6d234a 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -64,6 +64,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.QueryableTable; @@ -106,6 +108,7 @@ AlgNode toAlg( Queryable queryable ) { if ( queryable instanceof AbstractTableQueryable ) { final AbstractTableQueryable tableQueryable = (AbstractTableQueryable) queryable; final QueryableTable table = tableQueryable.table; + final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); final PolyphenyDbSchema.TableEntry tableEntry = PolyphenyDbSchema .from( tableQueryable.schema ) @@ -114,6 +117,7 @@ AlgNode toAlg( Queryable queryable ) { null, table.getRowType( translator.typeFactory ), tableEntry, + catalogTable, null ); if ( table instanceof TranslatableTable ) { return ((TranslatableTable) table).toAlg( translator.toAlgContext(), algOptTable, translator.cluster.traitSet() ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index a3b7c69d67..f7666e854a 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -65,6 +65,7 @@ import org.polypheny.db.algebra.type.DynamicRecordTypeImpl; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptSchema; @@ -523,6 +524,12 @@ public List getQualifiedName() { } + @Override + public CatalogTable getCatalogTable() { + return null; + } + + @Override public Monotonicity getMonotonicity( String columnName ) { return monotonicColumnSet.contains( columnName ) @@ -538,7 +545,7 @@ public AccessType getAllowedAccess() { @Override - public Expression getExpression( Class clazz ) { + public Expression getExpression( Class clazz ) { throw new UnsupportedOperationException(); } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 50a0fb262b..b477ce83b5 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -70,6 +70,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.languages.QueryParameters; @@ -343,6 +344,7 @@ private AlgOptTable getEntity( MqlCollectionStatement query, String dbSchemaName //fieldInfo.add( new AlgDataTypeFieldImpl( "_id", 0, typeFactory.createPolyType( PolyType.VARCHAR, 24 ) ) ); fieldInfo.add( new AlgDataTypeFieldImpl( "d", 0, typeFactory.createPolyType( PolyType.DOCUMENT ) ) ); AlgDataType rowType = fieldInfo.build(); + CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTable().getTableId() ); return AlgOptTableImpl.create( table.getRelOptSchema(), @@ -350,6 +352,7 @@ private AlgOptTable getEntity( MqlCollectionStatement query, String dbSchemaName new TableEntryImpl( catalogReader.getRootSchema(), names.get( names.size() - 1 ), new LogicalTable( Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, new Pattern( dbSchemaName ) ).get( 0 ).id, names.get( 0 ), names.get( names.size() - 1 ), List.of(), List.of(), AlgDataTypeImpl.proto( rowType ), NamespaceType.GRAPH ) ), + catalogTable, 1.0 ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/PhysicalPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Expressible.java similarity index 80% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/PhysicalPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Expressible.java index 42ea61be12..0190493a9c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/PhysicalPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Expressible.java @@ -14,10 +14,12 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.physical; +package org.polypheny.db.catalog; -import org.polypheny.db.catalog.readers.Peek; +import org.apache.calcite.linq4j.tree.Expression; -public interface PhysicalPeek extends Peek { +public interface Expressible { + + Expression asExpression(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 926d40b810..9d3c2e064b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -33,7 +33,7 @@ import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; -import org.polypheny.db.catalog.readers.logical.LogicalFullPeek; +import org.polypheny.db.catalog.snapshot.logical.LogicalFullSnapshot; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptTable; @@ -64,7 +64,7 @@ public class PolyCatalog implements Serializable, CatalogReader { public final Map users; private final IdBuilder idBuilder = new IdBuilder(); - private LogicalFullPeek logicalPeek; + private LogicalFullSnapshot logicalFullSnapshot; public PolyCatalog() { @@ -78,18 +78,18 @@ public PolyCatalog( this.users = users; this.catalogs = catalogs; - updatePeeks(); + updateSnapshot(); } - private void updatePeeks() { - this.logicalPeek = new LogicalFullPeek( catalogs ); + private void updateSnapshot() { + this.logicalFullSnapshot = new LogicalFullSnapshot( catalogs ); } public void commit() { log.debug( "commit" ); - updatePeeks(); + updateSnapshot(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/Peek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java similarity index 89% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/Peek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index 35e2c90463..9f57bfdaf4 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/Peek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -14,8 +14,8 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers; +package org.polypheny.db.catalog.snapshot; -public interface Peek { +public interface Snapshot { } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/allocation/AllocationSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/allocation/AllocationSnapshot.java new file mode 100644 index 0000000000..e125fad59e --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/allocation/AllocationSnapshot.java @@ -0,0 +1,23 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot.allocation; + +import org.polypheny.db.catalog.snapshot.Snapshot; + +public class AllocationSnapshot implements Snapshot { + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java similarity index 94% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java index 6c93cec326..1979093c2a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalDocumentPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.logical; +package org.polypheny.db.catalog.snapshot.logical; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,7 +28,7 @@ import org.polypheny.db.catalog.logical.document.DocumentCatalog; @Value -public class LogicalDocumentPeek implements LogicalPeek { +public class LogicalDocumentSnapshot implements LogicalSnapshot { ImmutableList catalogs; public ImmutableList databases; @@ -39,7 +39,7 @@ public class LogicalDocumentPeek implements LogicalPeek { public ImmutableMap collectionNames; - public LogicalDocumentPeek( final List catalogs ) { + public LogicalDocumentSnapshot( final List catalogs ) { this.catalogs = ImmutableList.copyOf( catalogs.stream().map( DocumentCatalog::copy ).collect( Collectors.toList() ) ); this.databases = ImmutableList.copyOf( buildDatabases() ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalFullPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java similarity index 73% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalFullPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java index a84f26a666..f44ddd261e 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalFullPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.logical; +package org.polypheny.db.catalog.snapshot.logical; import com.google.common.collect.ImmutableMap; import java.util.HashMap; @@ -27,34 +27,34 @@ import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; -public class LogicalFullPeek implements LogicalPeek { +public class LogicalFullSnapshot implements LogicalSnapshot { private final ImmutableMap catalogs; - private final LogicalRelationalPeek relationalPeek; - private final LogicalGraphPeek graphPeek; - private final LogicalDocumentPeek documentPeek; - private final ImmutableMap ids; - private final ImmutableMap names; + private final LogicalRelationalSnapshot relationalPeek; + private final LogicalGraphSnapshot graphPeek; + private final LogicalDocumentSnapshot documentPeek; + private final ImmutableMap ids; + private final ImmutableMap names; - public LogicalFullPeek( Map catalogs ) { + public LogicalFullSnapshot( Map catalogs ) { this.catalogs = ImmutableMap.copyOf( catalogs ); List relational = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.RELATIONAL ).map( NCatalog::asRelational ).collect( Collectors.toList() ); List graph = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.GRAPH ).map( NCatalog::asGraph ).collect( Collectors.toList() ); List document = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.DOCUMENT ).map( NCatalog::asDocument ).collect( Collectors.toList() ); - this.relationalPeek = new LogicalRelationalPeek( relational ); - this.graphPeek = new LogicalGraphPeek( graph ); - this.documentPeek = new LogicalDocumentPeek( document ); + this.relationalPeek = new LogicalRelationalSnapshot( relational ); + this.graphPeek = new LogicalGraphSnapshot( graph ); + this.documentPeek = new LogicalDocumentSnapshot( document ); this.ids = buildIds(); this.names = buildNames(); } - private ImmutableMap buildIds() { - Map ids = new HashMap<>(); + private ImmutableMap buildIds() { + Map ids = new HashMap<>(); this.relationalPeek.schemaIds.keySet().forEach( id -> ids.put( id, relationalPeek ) ); this.graphPeek.graphIds.keySet().forEach( id -> ids.put( id, graphPeek ) ); this.documentPeek.databaseIds.keySet().forEach( id -> ids.put( id, documentPeek ) ); @@ -63,8 +63,8 @@ private ImmutableMap buildIds() { } - private ImmutableMap buildNames() { - Map names = new HashMap<>(); + private ImmutableMap buildNames() { + Map names = new HashMap<>(); this.relationalPeek.schemaNames.keySet().forEach( name -> names.put( name, relationalPeek ) ); this.graphPeek.graphNames.keySet().forEach( name -> names.put( name, graphPeek ) ); this.documentPeek.databaseNames.keySet().forEach( name -> names.put( name, documentPeek ) ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java similarity index 91% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java index cc8298bc8e..bd90a7997a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalGraphPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.logical; +package org.polypheny.db.catalog.snapshot.logical; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -27,7 +27,7 @@ import org.polypheny.db.catalog.logical.graph.GraphCatalog; @Value -public class LogicalGraphPeek implements LogicalPeek { +public class LogicalGraphSnapshot implements LogicalSnapshot { ImmutableList catalogs; public ImmutableList graphs; @@ -36,7 +36,7 @@ public class LogicalGraphPeek implements LogicalPeek { public ImmutableMap graphNames; - public LogicalGraphPeek( final List catalogs ) { + public LogicalGraphSnapshot( final List catalogs ) { this.catalogs = ImmutableList.copyOf( catalogs.stream().map( GraphCatalog::copy ).collect( Collectors.toList() ) ); this.graphs = ImmutableList.copyOf( buildGraphs() ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java similarity index 95% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java index a7f0a0cc80..ea083643d4 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalRelationalPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.logical; +package org.polypheny.db.catalog.snapshot.logical; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -29,7 +29,7 @@ import org.polypheny.db.catalog.logical.relational.RelationalCatalog; @Value -public class LogicalRelationalPeek implements LogicalPeek { +public class LogicalRelationalSnapshot implements LogicalSnapshot { ImmutableList catalogs; @@ -44,7 +44,7 @@ public class LogicalRelationalPeek implements LogicalPeek { public ImmutableMap columnNames; - public LogicalRelationalPeek( List catalogs ) { + public LogicalRelationalSnapshot( List catalogs ) { this.catalogs = ImmutableList.copyOf( catalogs.stream().map( RelationalCatalog::copy ).collect( Collectors.toList() ) ); this.schemas = ImmutableList.copyOf( buildSchemas() ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java similarity index 81% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java index 7d4eb20b79..322fc7378b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/logical/LogicalPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java @@ -14,12 +14,12 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.logical; +package org.polypheny.db.catalog.snapshot.logical; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.readers.Peek; +import org.polypheny.db.catalog.snapshot.Snapshot; -public interface LogicalPeek extends Peek { +public interface LogicalSnapshot extends Snapshot { NamespaceType getType(); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/GraphOnlyPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/DocumentOnlySnapshot.java similarity index 84% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/GraphOnlyPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/DocumentOnlySnapshot.java index d6ead0bdad..c69cd59403 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/GraphOnlyPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/DocumentOnlySnapshot.java @@ -14,8 +14,8 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.physical; +package org.polypheny.db.catalog.snapshot.physical; -public class GraphOnlyPeek implements PhysicalPeek { +public class DocumentOnlySnapshot implements PhysicalSnapshot { } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/RelationalOnlyPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/GraphOnlySnapshot.java similarity index 84% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/RelationalOnlyPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/GraphOnlySnapshot.java index f69047ee52..c784f83336 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/RelationalOnlyPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/GraphOnlySnapshot.java @@ -14,8 +14,8 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.physical; +package org.polypheny.db.catalog.snapshot.physical; -public class RelationalOnlyPeek implements PhysicalPeek { +public class GraphOnlySnapshot implements PhysicalSnapshot { } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/allocation/AllocationPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/PhysicalSnapshot.java similarity index 79% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/allocation/AllocationPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/PhysicalSnapshot.java index b6cdca4c43..3e5f8fe752 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/allocation/AllocationPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/PhysicalSnapshot.java @@ -14,10 +14,10 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.allocation; +package org.polypheny.db.catalog.snapshot.physical; -import org.polypheny.db.catalog.readers.Peek; +import org.polypheny.db.catalog.snapshot.Snapshot; -public class AllocationPeek implements Peek { +public interface PhysicalSnapshot extends Snapshot { } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/DocumentOnlyPeek.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/RelationalOnlySnapshot.java similarity index 83% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/DocumentOnlyPeek.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/RelationalOnlySnapshot.java index 0d7463e078..99cdceaa61 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/readers/physical/DocumentOnlyPeek.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/RelationalOnlySnapshot.java @@ -14,8 +14,8 @@ * limitations under the License. */ -package org.polypheny.db.catalog.readers.physical; +package org.polypheny.db.catalog.snapshot.physical; -public class DocumentOnlyPeek implements PhysicalPeek { +public class RelationalOnlySnapshot implements PhysicalSnapshot { } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index e03f4ae443..da1425f323 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -26,6 +26,8 @@ import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.StructKind; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.prepare.AlgOptTableImpl; @@ -147,6 +149,7 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L path = path.plus( null, -1, entry.name, StructKind.NONE ); remainingNames = Util.skip( remainingNames ); final Table table = entry.getTable(); + final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); ValidatorTable table2 = null; if ( table instanceof Wrapper ) { table2 = ((Wrapper) table).unwrap( Prepare.PreparingTable.class ); @@ -154,7 +157,7 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L if ( table2 == null ) { final AlgOptSchema algOptSchema = validator.catalogReader.unwrap( AlgOptSchema.class ); final AlgDataType rowType = table.getRowType( validator.typeFactory ); - table2 = AlgOptTableImpl.create( algOptSchema, rowType, entry, null ); + table2 = AlgOptTableImpl.create( algOptSchema, rowType, entry, catalogTable, null ); } namespace = new TableNamespace( validator, table2 ); resolved.found( namespace, false, null, path, remainingNames ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 1ed8c2d9f7..8a96c002d5 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -120,7 +120,9 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; @@ -2159,8 +2161,9 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { if ( operator instanceof SqlUserDefinedTableMacro ) { final SqlUserDefinedTableMacro udf = (SqlUserDefinedTableMacro) operator; final TranslatableTable table = udf.getTable( typeFactory, callBinding.sqlOperands() ); + final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); final AlgDataType rowType = table.getRowType( typeFactory ); - AlgOptTable algOptTable = AlgOptTableImpl.create( null, rowType, table, udf.getNameAsId().names ); + AlgOptTable algOptTable = AlgOptTableImpl.create( null, rowType, table, catalogTable, udf.getNameAsId().names ); AlgNode converted = toAlg( algOptTable ); bb.setRoot( converted, true ); return; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java index 6548715920..bbb9dadf3b 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java @@ -438,7 +438,7 @@ public List getColumnStrategies() { @Override - public Expression getExpression( Class clazz ) { + public Expression getExpression( Class clazz ) { return null; } @@ -480,7 +480,7 @@ public T unwrap( Class clazz ) { @Override - public Expression getExpression( Class clazz ) { + public Expression getExpression( Class clazz ) { return parent.getExpression( clazz ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java index e90e00c80f..974a3b7a4a 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java @@ -67,6 +67,7 @@ import org.polypheny.db.algebra.rules.SortRemoveRule; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.PolyphenyDbServerStatement; @@ -157,6 +158,12 @@ public Statistic getStatistic() { }; final AlgOptAbstractTable t1 = new AlgOptAbstractTable( algOptSchema, "t1", table.getRowType( typeFactory ) ) { + @Override + public CatalogTable getCatalogTable() { + return null; + } + + @Override public T unwrap( Class clazz ) { return clazz.isInstance( table ) From 1a1ce8389a07e68fe83a0634e24d3184e0cef47b Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 21 Feb 2023 13:48:19 +0100 Subject: [PATCH 014/436] transition from Table to CatalogEntity --- .../db/adapter/enumerable/EnumerableScan.java | 12 +- .../enumerable/EnumerableScanRule.java | 16 +-- .../enumerable/EnumerableTableModify.java | 4 +- .../polypheny/db/algebra/AbstractAlgNode.java | 4 +- .../polypheny/db/algebra/AlgFieldTrimmer.java | 4 +- .../org/polypheny/db/algebra/AlgInput.java | 4 +- .../org/polypheny/db/algebra/AlgNode.java | 4 +- .../algebra/AlgStructuredTypeFlattener.java | 2 +- .../algebra/UnsupportedFromInsertShuttle.java | 5 +- .../db/algebra/core/AlgFactories.java | 28 ++-- .../org/polypheny/db/algebra/core/Modify.java | 6 +- .../db/algebra/core/PrimaryKeyCheck.java | 6 +- .../org/polypheny/db/algebra/core/Scan.java | 10 +- .../db/algebra/core/document/DocumentAlg.java | 4 +- .../algebra/core/document/DocumentModify.java | 6 +- .../algebra/core/document/DocumentScan.java | 10 +- .../algebra/core/document/DocumentValues.java | 4 +- .../relational/RelationalTransformable.java | 6 +- .../db/algebra/externalize/AlgJsonReader.java | 4 +- .../common/LogicalConstraintEnforcer.java | 44 ++----- .../document/LogicalDocumentModify.java | 8 +- .../logical/document/LogicalDocumentScan.java | 12 +- .../db/algebra/logical/lpg/LogicalGraph.java | 6 +- .../algebra/logical/lpg/LogicalLpgModify.java | 4 +- .../algebra/logical/lpg/LogicalLpgScan.java | 14 +- .../algebra/logical/lpg/LogicalLpgValues.java | 4 +- .../logical/relational/LogicalModify.java | 6 +- .../{LogicalScan.java => LogicalRelScan.java} | 18 +-- .../relational/LogicalRelViewScan.java | 20 +-- .../db/algebra/metadata/AlgColumnOrigin.java | 8 +- .../db/algebra/metadata/AlgMdCollation.java | 4 +- .../algebra/metadata/AlgMdColumnOrigins.java | 4 +- .../algebra/metadata/AlgMdDistribution.java | 4 +- .../db/algebra/metadata/AlgMetadataQuery.java | 4 +- .../metadata/JaninoRelMetadataProvider.java | 4 +- .../algebra/mutable/MutableTableModify.java | 8 +- .../db/algebra/rules/FilterScanRule.java | 4 +- .../algebra/rules/LoptOptimizeJoinRule.java | 16 +-- .../algebra/rules/LoptSemiJoinOptimizer.java | 16 +-- .../db/algebra/rules/ProjectScanRule.java | 6 +- .../polypheny/db/algebra/rules/ScanRule.java | 10 +- .../db/algebra/stream/StreamRules.java | 24 ++-- .../db/catalog/entity/CatalogCollection.java | 6 +- .../db/catalog/entity/CatalogEntity.java | 37 ++++++ .../catalog/entity/CatalogGraphDatabase.java | 5 +- .../db/catalog/entity/CatalogTable.java | 7 +- .../polypheny/db/interpreter/Bindables.java | 24 ++-- .../polypheny/db/interpreter/ScanNode.java | 24 ++-- ...ctTable.java => AlgOptAbstractEntity.java} | 16 +-- .../{AlgOptTable.java => AlgOptEntity.java} | 16 +-- .../org/polypheny/db/plan/AlgOptSchema.java | 6 +- .../db/plan/AlgOptSchemaWithSampling.java | 4 +- .../org/polypheny/db/plan/AlgOptUtil.java | 6 +- .../db/plan/SubstitutionVisitor.java | 8 +- ...ptTableImpl.java => AlgOptEntityImpl.java} | 65 +++++----- .../db/prepare/LixToAlgTranslator.java | 12 +- .../db/prepare/PolyphenyDbCatalogReader.java | 15 ++- .../org/polypheny/db/prepare/Prepare.java | 24 ++-- .../db/prepare/QueryableAlgBuilder.java | 6 +- .../db/processing/DeepCopyShuttle.java | 4 +- .../processing/LogicalAlgAnalyzeShuttle.java | 82 +++--------- .../polypheny/db/rex/RexTableInputRef.java | 10 +- .../polypheny/db/schema/ColumnStrategy.java | 4 +- .../org/polypheny/db/schema/LogicalTable.java | 8 +- .../db/schema/ModifiableCollection.java | 4 +- .../polypheny/db/schema/ModifiableTable.java | 4 +- .../db/schema/TranslatableGraph.java | 2 +- .../db/schema/TranslatableTable.java | 6 +- .../org/polypheny/db/tools/AlgBuilder.java | 18 +-- .../db/util/InitializerExpressionFactory.java | 8 +- .../NullInitializerExpressionFactory.java | 6 +- .../db/view/MaterializedViewManager.java | 14 +- .../org/polypheny/db/view/ViewManager.java | 19 +-- .../polypheny/db/catalog/CountingFactory.java | 6 +- .../EmpInitializerExpressionFactory.java | 6 +- .../db/catalog/MockCatalogReader.java | 80 ++++++------ .../db/catalog/MockCatalogReaderDocument.java | 2 +- .../db/catalog/MockCatalogReaderDynamic.java | 6 +- .../db/catalog/MockCatalogReaderExtended.java | 2 +- .../db/catalog/MockCatalogReaderSimple.java | 41 +++--- .../java/org/polypheny/db/test/JdbcTest.java | 4 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 14 +- .../db/processing/AbstractQueryProcessor.java | 106 ++++++++------- .../processing/ConstraintEnforceAttacher.java | 10 +- .../db/processing/DataMigratorImpl.java | 8 +- .../db/routing/routers/AbstractDqlRouter.java | 15 ++- .../db/routing/routers/BaseRouter.java | 44 +++---- .../db/routing/routers/CachedPlanRouter.java | 15 +-- .../db/routing/routers/DmlRouterImpl.java | 122 ++++++++---------- .../db/transaction/EntityAccessMap.java | 22 ++-- .../statistics/StatisticsManagerImpl.java | 12 +- .../adapter/cottontail/CottontailTable.java | 10 +- .../cottontail/algebra/CottontailAlg.java | 4 +- .../cottontail/algebra/CottontailScan.java | 4 +- .../algebra/CottontailTableModify.java | 4 +- .../org/polypheny/db/adapter/csv/CsvScan.java | 16 +-- .../db/adapter/csv/CsvTranslatableTable.java | 15 +-- .../db/adapter/druid/DruidQuery.java | 14 +- .../db/adapter/druid/DruidTable.java | 19 +-- .../elasticsearch/ElasticsearchRel.java | 4 +- .../elasticsearch/ElasticsearchScan.java | 4 +- .../elasticsearch/ElasticsearchTable.java | 29 +++-- .../adapter/file/FileTranslatableTable.java | 10 +- .../db/adapter/file/algebra/FileScan.java | 4 +- .../adapter/file/algebra/FileTableModify.java | 4 +- .../db/adapter/geode/algebra/GeodeAlg.java | 4 +- .../db/adapter/geode/algebra/GeodeScan.java | 4 +- .../db/adapter/geode/algebra/GeodeTable.java | 15 +-- .../polypheny/db/adapter/html/HtmlScan.java | 4 +- .../polypheny/db/adapter/html/HtmlTable.java | 13 +- .../polypheny/db/adapter/jdbc/JdbcRules.java | 54 ++++++-- .../polypheny/db/adapter/jdbc/JdbcScan.java | 11 +- .../polypheny/db/adapter/jdbc/JdbcTable.java | 26 ++-- .../adapter/jdbc/rel2sql/SqlImplementor.java | 8 +- .../db/adapter/jdbc/rel2sql/PlannerTest.java | 4 +- .../db/adapter/mongodb/MongoAlg.java | 17 ++- .../db/adapter/mongodb/MongoEntity.java | 12 +- .../db/adapter/mongodb/MongoRules.java | 4 +- .../db/adapter/mongodb/MongoScan.java | 12 +- .../languages/mql2alg/MqlToAlgConverter.java | 27 ++-- .../polypheny/db/adapter/neo4j/NeoEntity.java | 10 +- .../polypheny/db/adapter/neo4j/NeoGraph.java | 2 +- .../neo4j/NeoRelationalImplementor.java | 6 +- .../neo4j/rules/relational/NeoModify.java | 4 +- .../neo4j/rules/relational/NeoScan.java | 4 +- .../db/adapter/pig/PigAggregate.java | 4 +- .../db/adapter/pig/PigAlgFactories.java | 4 +- .../polypheny/db/adapter/pig/PigFilter.java | 4 +- .../org/polypheny/db/adapter/pig/PigJoin.java | 4 +- .../polypheny/db/adapter/pig/PigProject.java | 4 +- .../polypheny/db/adapter/pig/PigRules.java | 8 +- .../org/polypheny/db/adapter/pig/PigScan.java | 4 +- .../polypheny/db/adapter/pig/PigTable.java | 8 +- .../org/polypheny/db/catalog/PolyCatalog.java | 10 +- .../java/org/polypheny/db/restapi/Rest.java | 8 +- .../language/validate/DelegatingScope.java | 6 +- .../db/sql/language/validate/EmptyScope.java | 8 +- .../language/validate/SqlValidatorImpl.java | 20 +-- .../language/validate/SqlValidatorUtil.java | 14 +- .../sql/language/validate/TableNamespace.java | 8 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 43 +++--- .../org/polypheny/db/sql/AlgWriterTest.java | 6 +- .../org/polypheny/db/sql/FrameworksTest.java | 8 +- .../db/sql/language/SqlToAlgTestBase.java | 30 ++--- .../db/sql/volcano/TraitPropagationTest.java | 4 +- 145 files changed, 975 insertions(+), 984 deletions(-) rename core/src/main/java/org/polypheny/db/algebra/logical/relational/{LogicalScan.java => LogicalRelScan.java} (88%) create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java rename core/src/main/java/org/polypheny/db/plan/{AlgOptAbstractTable.java => AlgOptAbstractEntity.java} (87%) rename core/src/main/java/org/polypheny/db/plan/{AlgOptTable.java => AlgOptEntity.java} (93%) rename core/src/main/java/org/polypheny/db/prepare/{AlgOptTableImpl.java => AlgOptEntityImpl.java} (85%) diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java index ee3ae874fc..b35398511e 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java @@ -59,8 +59,8 @@ import org.polypheny.db.interpreter.Row; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.volcano.VolcanoCost; import org.polypheny.db.schema.FilterableTable; @@ -85,7 +85,7 @@ public class EnumerableScan extends Scan implements EnumerableAlg { * * Use {@link #create} unless you know what you are doing. */ - public EnumerableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, Class elementType ) { + public EnumerableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, Class elementType ) { super( cluster, traitSet, table ); assert getConvention() instanceof EnumerableConvention; this.elementType = elementType; @@ -95,8 +95,8 @@ public EnumerableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable /** * Creates an EnumerableScan. */ - public static EnumerableScan create( AlgOptCluster cluster, AlgOptTable algOptTable ) { - final Table table = algOptTable.unwrap( Table.class ); + public static EnumerableScan create( AlgOptCluster cluster, AlgOptEntity algOptEntity ) { + final Table table = algOptEntity.unwrap( Table.class ); Class elementType = EnumerableScan.deduceElementType( table ); final AlgTraitSet traitSet = cluster.traitSetOf( EnumerableConvention.INSTANCE ) @@ -106,7 +106,7 @@ public static EnumerableScan create( AlgOptCluster cluster, AlgOptTable algOptTa } return ImmutableList.of(); } ); - return new EnumerableScan( cluster, traitSet, algOptTable, elementType ); + return new EnumerableScan( cluster, traitSet, algOptEntity, elementType ); } @@ -153,7 +153,7 @@ public static Class deduceElementType( Table table ) { } - public static JavaRowFormat deduceFormat( AlgOptTable table ) { + public static JavaRowFormat deduceFormat( AlgOptEntity table ) { final Class elementType = deduceElementType( table.unwrap( Table.class ) ); return elementType == Object[].class ? JavaRowFormat.ARRAY diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java index f62f0216ef..033e94611e 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java @@ -38,9 +38,9 @@ import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.convert.ConverterRule; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; import org.polypheny.db.schema.Table; import org.polypheny.db.tools.AlgBuilderFactory; @@ -57,23 +57,23 @@ public class EnumerableScanRule extends ConverterRule { * @param algBuilderFactory Builder for relational expressions */ public EnumerableScanRule( AlgBuilderFactory algBuilderFactory ) { - super( LogicalScan.class, (Predicate) r -> true, Convention.NONE, EnumerableConvention.INSTANCE, algBuilderFactory, "EnumerableScanRule" ); + super( LogicalRelScan.class, (Predicate) r -> true, Convention.NONE, EnumerableConvention.INSTANCE, algBuilderFactory, "EnumerableScanRule" ); } @Override public AlgNode convert( AlgNode alg ) { - LogicalScan scan = (LogicalScan) alg; - final AlgOptTable algOptTable = scan.getTable(); - final Table table = algOptTable.unwrap( Table.class ); + LogicalRelScan scan = (LogicalRelScan) alg; + final AlgOptEntity algOptEntity = scan.getTable(); + final Table table = algOptEntity.unwrap( Table.class ); if ( !EnumerableScan.canHandle( table ) ) { return null; } - final Expression expression = algOptTable.getExpression( Object.class ); + final Expression expression = algOptEntity.getExpression( Object.class ); if ( expression == null ) { return null; } - return EnumerableScan.create( scan.getCluster(), algOptTable ); + return EnumerableScan.create( scan.getCluster(), algOptEntity ); } } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java index 5fc69b064d..87a81f846f 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java @@ -50,8 +50,8 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexNode; @@ -74,7 +74,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) } - public EnumerableTableModify( AlgOptCluster cluster, AlgTraitSet traits, AlgOptTable table, Prepare.CatalogReader catalogReader, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public EnumerableTableModify( AlgOptCluster cluster, AlgTraitSet traits, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traits, table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); assert child.getConvention() instanceof EnumerableConvention; assert getConvention() instanceof EnumerableConvention; diff --git a/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java index 8d52ead917..bb9c1c81aa 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java @@ -54,8 +54,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTrait; import org.polypheny.db.plan.AlgTraitSet; @@ -387,7 +387,7 @@ public final String getDescription() { @Override - public AlgOptTable getTable() { + public AlgOptEntity getTable() { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgFieldTrimmer.java b/core/src/main/java/org/polypheny/db/algebra/AlgFieldTrimmer.java index e1d4b236ea..fd35d5732f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgFieldTrimmer.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgFieldTrimmer.java @@ -62,7 +62,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; @@ -889,7 +889,7 @@ protected Mapping createMapping( ImmutableBitSet fieldsUsed, int fieldCount ) { /** - * Variant of {@link #trimFields(AlgNode, ImmutableBitSet, Set)} for {@link LogicalScan}. + * Variant of {@link #trimFields(AlgNode, ImmutableBitSet, Set)} for {@link LogicalRelScan}. */ public TrimResult trimFields( final Scan tableAccessRel, ImmutableBitSet fieldsUsed, Set extraFields ) { final int fieldCount = tableAccessRel.getRowType().getFieldCount(); diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgInput.java b/core/src/main/java/org/polypheny/db/algebra/AlgInput.java index 2c22de1fc4..d190993d7b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgInput.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgInput.java @@ -37,7 +37,7 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.ImmutableBitSet; @@ -51,7 +51,7 @@ public interface AlgInput { AlgTraitSet getTraitSet(); - AlgOptTable getTable( String table ); + AlgOptEntity getTable( String table ); /** * Returns the input relational expression. Throws if there is not precisely one input. diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java index debd63c841..c902c57739 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java @@ -46,9 +46,9 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.plan.AlgImplementor; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptNode; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexNode; @@ -259,7 +259,7 @@ public interface AlgNode extends AlgOptNode, Cloneable { * * @return If this relational expression represents an access to a table, returns that table, otherwise returns null */ - AlgOptTable getTable(); + AlgOptEntity getTable(); /** * Returns the name of this relational expression's class, sans package name, for use in explain. For example, for a diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java index 7468371a3e..b7912e7489 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java @@ -101,7 +101,7 @@ import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexCorrelVariable; diff --git a/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java b/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java index 2146b44c8e..b3f37152ba 100644 --- a/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java +++ b/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java @@ -19,6 +19,7 @@ import java.util.Objects; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.plan.volcano.AlgSubset; public class UnsupportedFromInsertShuttle extends AlgShuttleImpl { @@ -33,7 +34,7 @@ private UnsupportedFromInsertShuttle( Long tableId ) { public static boolean contains( Modify modify ) { - Long id = modify.getTable().getTable().getTableId(); + long id = modify.getTable().getCatalogEntity().id; UnsupportedFromInsertShuttle shuttle = new UnsupportedFromInsertShuttle( id ); modify.accept( shuttle ); return shuttle.containsOtherTableId; @@ -42,7 +43,7 @@ public static boolean contains( Modify modify ) { @Override public AlgNode visit( Scan scan ) { - if ( !Objects.equals( scan.getTable().getTable().getTableId(), tableId ) ) { + if ( !Objects.equals( scan.getTable().getCatalogEntity().id, tableId ) ) { containsOtherTableId = true; } return super.visit( scan ); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java index 06f73d190e..e32d341a47 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java @@ -59,23 +59,20 @@ import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; -import org.polypheny.db.algebra.logical.relational.LogicalScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalSortExchange; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.Contexts; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.TranslatableTable; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.AlgBuilderFactory; @@ -540,31 +537,24 @@ public interface ScanFactory { /** * Creates a {@link Scan}. */ - AlgNode createScan( AlgOptCluster cluster, AlgOptTable table ); + AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ); } /** - * Implementation of {@link ScanFactory} that returns a {@link LogicalScan}. + * Implementation of {@link ScanFactory} that returns a {@link LogicalRelScan}. */ private static class ScanFactoryImpl implements ScanFactory { @Override - public AlgNode createScan( AlgOptCluster cluster, AlgOptTable table ) { + public AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ) { // Check if RelOptTable contains a View, in this case a LogicalViewScan needs to be created - if ( (table.getTable()) instanceof LogicalTable ) { - Catalog catalog = Catalog.getInstance(); - long idLogical = table.getTable().getTableId(); - CatalogTable catalogTable = catalog.getTable( idLogical ); - if ( catalogTable.entityType == EntityType.VIEW ) { - return LogicalRelViewScan.create( cluster, table ); - } else { - return LogicalScan.create( cluster, table ); - } + if ( table.getCatalogEntity().entityType == EntityType.VIEW ) { + return LogicalRelViewScan.create( cluster, table ); } else { - return LogicalScan.create( cluster, table ); + return LogicalRelScan.create( cluster, table ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Modify.java b/core/src/main/java/org/polypheny/db/algebra/core/Modify.java index 686e8583bb..5f1b3679b1 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Modify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Modify.java @@ -48,8 +48,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare; @@ -88,7 +88,7 @@ public enum Operation { * The table definition. */ @Getter - protected final AlgOptTable table; + protected final AlgOptEntity table; @Getter private final Operation operation; @Getter @@ -118,7 +118,7 @@ public enum Operation { * @param sourceExpressionList List of value expressions to be set (e.g. exp1, exp2); null if not UPDATE * @param flattened Whether set flattens the input row type */ - protected Modify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, Prepare.CatalogReader catalogReader, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + protected Modify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traitSet, input ); this.table = table; this.catalogReader = catalogReader; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/PrimaryKeyCheck.java b/core/src/main/java/org/polypheny/db/algebra/core/PrimaryKeyCheck.java index c49974a1ae..40efdba5f7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/PrimaryKeyCheck.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/PrimaryKeyCheck.java @@ -19,20 +19,20 @@ import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; public abstract class PrimaryKeyCheck extends AbstractAlgNode { protected AlgNode dbSource; protected AlgNode values; - protected AlgOptTable table; + protected AlgOptEntity table; /** * Creates an AbstractRelNode. */ - public PrimaryKeyCheck( AlgNode dbSource, AlgNode values, AlgOptTable table ) { + public PrimaryKeyCheck( AlgNode dbSource, AlgNode values, AlgOptEntity table ) { super( dbSource.getCluster(), dbSource.getTraitSet() ); this.dbSource = dbSource; this.values = values; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Scan.java b/core/src/main/java/org/polypheny/db/algebra/core/Scan.java index 37ba5bdee0..d40c1d36ca 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Scan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Scan.java @@ -48,7 +48,7 @@ import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; @@ -65,10 +65,10 @@ public abstract class Scan extends AbstractAlgNode { /** * The table definition. */ - protected final AlgOptTable table; + protected final AlgOptEntity table; - protected Scan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table ) { + protected Scan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table ) { super( cluster, traitSet ); this.table = table; if ( table.getRelOptSchema() != null ) { @@ -92,7 +92,7 @@ public double estimateRowCount( AlgMetadataQuery mq ) { @Override - public AlgOptTable getTable() { + public AlgOptEntity getTable() { return table; } @@ -115,7 +115,7 @@ public AlgDataType deriveRowType() { /** * Returns an identity projection for the given table. */ - public static ImmutableIntList identity( AlgOptTable table ) { + public static ImmutableIntList identity( AlgOptEntity table ) { return ImmutableIntList.identity( table.getRowType().getFieldCount() ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java index 87f01f8f15..fb9d24dbb2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java @@ -17,7 +17,7 @@ package org.polypheny.db.algebra.core.document; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; /** @@ -27,7 +27,7 @@ public interface DocumentAlg { DocType getDocType(); - default AlgOptTable getCollection() { + default AlgOptEntity getCollection() { assert this instanceof AlgNode; return ((AlgNode) this).getTable(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java index cd58248433..2544d91074 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java @@ -23,7 +23,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -34,7 +34,7 @@ public abstract class DocumentModify extends SingleAlg implements DocumentAlg { public final Operation operation; @Getter - private final AlgOptTable collection; + private final AlgOptEntity collection; @Getter private final List keys; @Getter @@ -47,7 +47,7 @@ public abstract class DocumentModify extends SingleAlg implements DocumentAlg { * Creates a {@link DocumentModify}. * {@link org.polypheny.db.schema.ModelTrait#DOCUMENT} node, which modifies a collection. */ - protected DocumentModify( AlgTraitSet traits, AlgOptTable collection, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { + protected DocumentModify( AlgTraitSet traits, AlgOptEntity collection, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { super( input.getCluster(), input.getTraitSet(), input ); this.operation = operation; this.collection = collection; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java index c4be1cf954..fe76fd3bc9 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java @@ -27,7 +27,7 @@ import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.type.PolyType; @@ -35,20 +35,20 @@ public abstract class DocumentScan extends AbstractAlgNode implements DocumentAlg { @Getter - private final AlgOptTable collection; + private final AlgOptEntity collection; /** * Creates a {@link DocumentScan}. * {@link org.polypheny.db.schema.ModelTrait#DOCUMENT} node, which scans the content of a collection. */ - public DocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable collection ) { + public DocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity collection ) { super( cluster, traitSet ); this.collection = collection; AlgDataType docType = cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ); // todo dl: change after RowType refactor - if ( this.collection.getTable().getSchemaType() == NamespaceType.DOCUMENT ) { + if ( this.collection.getCatalogEntity().namespaceType == NamespaceType.DOCUMENT ) { this.rowType = new DocumentType(); } else { List list = collection.getRowType().getFieldList().stream() @@ -61,7 +61,7 @@ public DocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable co @Override public String algCompareString() { - return "$" + getClass().getSimpleName() + "$" + collection.getTable().getTableId() + "$"; + return "$" + getClass().getSimpleName() + "$" + collection.getCatalogEntity().id + "$"; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java index 816bb18f21..6069458275 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java @@ -31,7 +31,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; @@ -130,7 +130,7 @@ public LogicalValues getRelationalEquivalent() { } - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { return List.of( getRelationalEquivalent() ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java index 17787158d8..f1fbeed88e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java @@ -20,7 +20,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.schema.ModifiableTable; @@ -35,10 +35,10 @@ default CatalogReader getCatalogReader() { } - List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ); + List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ); - static Modify getModify( AlgOptTable table, CatalogReader catalogReader, AlgNode alg, Operation operation ) { + static Modify getModify( AlgOptEntity table, CatalogReader catalogReader, AlgNode alg, Operation operation ) { return table.unwrap( ModifiableTable.class ).toModificationAlg( alg.getCluster(), table, catalogReader, alg, operation, null, null, true ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java b/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java index eb06b66908..5c95b55ce4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java +++ b/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java @@ -54,8 +54,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.schema.Schema; @@ -125,7 +125,7 @@ public AlgTraitSet getTraitSet() { @Override - public AlgOptTable getTable( String table ) { + public AlgOptEntity getTable( String table ) { final List list; if ( jsonAlg.get( table ) instanceof String ) { String str = (String) jsonAlg.get( table ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 4ef4beb0ef..4255cbd5c0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -35,7 +35,7 @@ import org.polypheny.db.algebra.exceptions.ConstraintViolationException; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalModify; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.ConstraintType; @@ -45,18 +45,15 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.transaction.Statement; @@ -135,7 +132,7 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem //builder.scan( table.getSchemaName(), table.name ); for ( CatalogConstraint constraint : constraints ) { builder.clear(); - final AlgNode scan = LogicalScan.create( modify.getCluster(), modify.getTable() ); + final AlgNode scan = LogicalRelScan.create( modify.getCluster(), modify.getTable() ); builder.push( scan ); // Enforce uniqueness between the already existing values and the new values List keys = constraint.key @@ -167,10 +164,10 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem for ( final CatalogForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { builder.clear(); final AlgOptSchema algOptSchema = modify.getCatalogReader(); - final AlgOptTable scanOptTable = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getTableName() ) ); - final AlgOptTable refOptTable = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getReferencedKeyTableName() ) ); - final AlgNode scan = LogicalScan.create( modify.getCluster(), scanOptTable ); - final LogicalScan ref = LogicalScan.create( modify.getCluster(), refOptTable ); + final AlgOptEntity scanOptTable = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getTableName() ) ); + final AlgOptEntity refOptTable = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getReferencedKeyTableName() ) ); + final AlgNode scan = LogicalRelScan.create( modify.getCluster(), scanOptTable ); + final LogicalRelScan ref = LogicalRelScan.create( modify.getCluster(), refOptTable ); builder.push( scan ); builder.project( foreignKey.getColumnNames().stream().map( builder::field ).collect( Collectors.toList() ) ); @@ -418,32 +415,11 @@ public AlgNode accept( AlgShuttle shuttle ) { public static CatalogTable getCatalogTable( Modify modify ) { - Catalog catalog = Catalog.getInstance(); - String schemaName; - if ( modify.getTable().getTable() instanceof LogicalTable ) { - schemaName = ((LogicalTable) modify.getTable().getTable()).getLogicalSchemaName(); - } else if ( modify.getTable().getQualifiedName().size() == 2 ) { - schemaName = modify.getTable().getQualifiedName().get( 0 ); - } else if ( modify.getTable().getQualifiedName().size() == 3 ) { - schemaName = modify.getTable().getQualifiedName().get( 1 ); - } else { - throw new RuntimeException( "The schema was not provided correctly!" ); - } - final CatalogSchema schema; - try { - schema = catalog.getSchema( Catalog.defaultDatabaseId, schemaName ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( "The schema was not provided correctly!" ); - } - - try { - String tableName = getEntityName( modify, schema ); - return catalog.getTable( schema.id, tableName ); - - } catch ( UnknownTableException e ) { - log.error( "Caught exception", e ); + if ( modify.getTable() == null ) { throw new RuntimeException( "The table was not found in the catalog!" ); } + + return (CatalogTable) modify.getTable().getCatalogEntity(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java index 1b9badc3e0..c6a773e76a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java @@ -22,7 +22,7 @@ import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; @@ -33,12 +33,12 @@ public class LogicalDocumentModify extends DocumentModify implements RelationalT /** * Subclass of {@link DocumentModify} not targeted at any particular engine or calling convention. */ - public LogicalDocumentModify( AlgTraitSet traits, AlgOptTable table, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { + public LogicalDocumentModify( AlgTraitSet traits, AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { super( traits, table, catalogReader, input, operation, keys, updates ); } - public static LogicalDocumentModify create( AlgOptTable table, AlgNode input, CatalogReader catalogReader, Operation operation, List keys, List updates ) { + public static LogicalDocumentModify create( AlgOptEntity table, AlgNode input, CatalogReader catalogReader, Operation operation, List keys, List updates ) { return new LogicalDocumentModify( input.getTraitSet(), table, catalogReader, input, operation, keys, updates ); } @@ -50,7 +50,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { return List.of( RelationalTransformable.getModify( entities.get( 0 ), catalogReader, values.get( 0 ), operation ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java index e36ebea353..e058252473 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java @@ -21,10 +21,10 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentScan; import org.polypheny.db.algebra.core.relational.RelationalTransformable; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.schema.ModelTrait; @@ -35,19 +35,19 @@ public class LogicalDocumentScan extends DocumentScan implements RelationalTrans /** * Subclass of {@link DocumentScan} not targeted at any particular engine or calling convention. */ - public LogicalDocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable document ) { + public LogicalDocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity document ) { super( cluster, traitSet.replace( ModelTrait.DOCUMENT ), document ); } - public static AlgNode create( AlgOptCluster cluster, AlgOptTable collection ) { + public static AlgNode create( AlgOptCluster cluster, AlgOptEntity collection ) { return new LogicalDocumentScan( cluster, cluster.traitSet(), collection ); } @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { - return List.of( AlgOptRule.convert( LogicalScan.create( getCluster(), entities.get( 0 ) ), ModelTrait.RELATIONAL ) ); + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + return List.of( AlgOptRule.convert( LogicalRelScan.create( getCluster(), entities.get( 0 ) ), ModelTrait.RELATIONAL ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java index 2b6d8109d8..06f7209926 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java @@ -26,8 +26,8 @@ import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Schema; @@ -55,7 +55,7 @@ public LogicalGraph( long id ) { @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java index 013c769170..20d1632973 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java @@ -26,7 +26,7 @@ import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -52,7 +52,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { @Override - public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { List modifies = new ArrayList<>(); // modify of nodes diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java index 5dc07e0106..ee6df1db93 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java @@ -25,12 +25,12 @@ import org.polypheny.db.algebra.core.lpg.LpgScan; import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.logical.relational.LogicalJoin; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; @@ -51,11 +51,11 @@ public LogicalLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, Translatable @Override - public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { assert !entities.isEmpty(); AlgTraitSet out = getTraitSet().replace( ModelTrait.RELATIONAL ); - LogicalScan nodes = new LogicalScan( getCluster(), out, entities.get( 0 ) ); - LogicalScan nodesProperty = new LogicalScan( getCluster(), out, entities.get( 1 ) ); + LogicalRelScan nodes = new LogicalRelScan( getCluster(), out, entities.get( 0 ) ); + LogicalRelScan nodesProperty = new LogicalRelScan( getCluster(), out, entities.get( 1 ) ); RexBuilder builder = getCluster().getRexBuilder(); @@ -70,8 +70,8 @@ public List getRelationalEquivalent( List inputs, List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder() ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModify.java index ad4dd41fd6..ebfba416a8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModify.java @@ -24,7 +24,7 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -48,7 +48,7 @@ public final class LogicalModify extends Modify { * * Use {@link #create} unless you know what you're doing. */ - public LogicalModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, CatalogReader schema, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public LogicalModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, CatalogReader schema, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traitSet.replace( ModelTrait.RELATIONAL ), table, schema, input, operation, updateColumnList, sourceExpressionList, flattened ); } @@ -56,7 +56,7 @@ public LogicalModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable t /** * Creates a LogicalModify. */ - public static LogicalModify create( AlgOptTable table, CatalogReader schema, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public static LogicalModify create( AlgOptEntity table, CatalogReader schema, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { final AlgOptCluster cluster = input.getCluster(); final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ); return new LogicalModify( cluster, traitSet, table, schema, input, operation, updateColumnList, sourceExpressionList, flattened ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java similarity index 88% rename from core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalScan.java rename to core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java index 55db1dac39..daa822c034 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java @@ -41,7 +41,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.schema.ModelTrait; @@ -49,7 +49,7 @@ /** - * A LogicalScan reads all the rows from a {@link AlgOptTable}. + * A LogicalScan reads all the rows from a {@link AlgOptEntity}. * * If the table is a net.sf.saffron.ext.JdbcTable, then this is literally possible. But for other kinds of tables, * there may be many ways to read the data from the table. For some kinds of table, it may not even be possible to read all of @@ -69,7 +69,7 @@ * * can. It is the optimizer's responsibility to find these ways, by applying transformation rules. */ -public final class LogicalScan extends Scan { +public final class LogicalRelScan extends Scan { /** @@ -77,7 +77,7 @@ public final class LogicalScan extends Scan { * * Use {@link #create} unless you know what you're doing. */ - public LogicalScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table ) { + public LogicalRelScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table ) { super( cluster, traitSet, table ); } @@ -85,7 +85,7 @@ public LogicalScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable tab /** * Creates a LogicalScan by parsing serialized output. */ - public LogicalScan( AlgInput input ) { + public LogicalRelScan( AlgInput input ) { super( input ); } @@ -102,10 +102,10 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { * Creates a LogicalScan. * * @param cluster Cluster - * @param algOptTable Table + * @param algOptEntity Table */ - public static LogicalScan create( AlgOptCluster cluster, final AlgOptTable algOptTable ) { - final Table table = algOptTable.unwrap( Table.class ); + public static LogicalRelScan create( AlgOptCluster cluster, final AlgOptEntity algOptEntity ) { + final Table table = algOptEntity.unwrap( Table.class ); final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ) @@ -119,7 +119,7 @@ public static LogicalScan create( AlgOptCluster cluster, final AlgOptTable algOp return ImmutableList.of(); } ); - return new LogicalScan( cluster, traitSet, algOptTable ); + return new LogicalRelScan( cluster, traitSet, algOptEntity ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java index 8996224df3..ba0998cdcb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java @@ -25,16 +25,13 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.Table; @@ -46,15 +43,15 @@ public class LogicalRelViewScan extends Scan { private final AlgCollation algCollation; - public LogicalRelViewScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, AlgNode algNode, AlgCollation algCollation ) { + public LogicalRelViewScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, AlgNode algNode, AlgCollation algCollation ) { super( cluster, traitSet, table ); this.algNode = algNode; this.algCollation = algCollation; } - public static AlgNode create( AlgOptCluster cluster, final AlgOptTable algOptTable ) { - final Table table = algOptTable.unwrap( Table.class ); + public static AlgNode create( AlgOptCluster cluster, final AlgOptEntity algOptEntity ) { + final Table table = algOptEntity.unwrap( Table.class ); final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ) @@ -67,13 +64,10 @@ public static AlgNode create( AlgOptCluster cluster, final AlgOptTable algOptTab return ImmutableList.of(); } ); - Catalog catalog = Catalog.getInstance(); + CatalogView catalogView = (CatalogView) algOptEntity.getCatalogEntity(); + AlgCollation algCollation = catalogView.getAlgCollation(); - long idLogical = ((LogicalTable) algOptTable.getTable()).getTableId(); - CatalogTable catalogTable = catalog.getTable( idLogical ); - AlgCollation algCollation = ((CatalogView) catalogTable).getAlgCollation(); - - return new LogicalRelViewScan( cluster, traitSet, algOptTable, ((CatalogView) catalogTable).prepareView( cluster ), algCollation ); + return new LogicalRelViewScan( cluster, traitSet, algOptEntity, catalogView.prepareView( cluster ), algCollation ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java index 492b74b658..35ef9b5a9f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java @@ -34,7 +34,7 @@ package org.polypheny.db.algebra.metadata; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; /** @@ -42,14 +42,14 @@ */ public class AlgColumnOrigin { - private final AlgOptTable originTable; + private final AlgOptEntity originTable; private final int iOriginColumn; private final boolean isDerived; - public AlgColumnOrigin( AlgOptTable originTable, int iOriginColumn, boolean isDerived ) { + public AlgColumnOrigin( AlgOptEntity originTable, int iOriginColumn, boolean isDerived ) { this.originTable = originTable; this.iOriginColumn = iOriginColumn; this.isDerived = isDerived; @@ -59,7 +59,7 @@ public AlgColumnOrigin( AlgOptTable originTable, int iOriginColumn, boolean isDe /** * @return table of origin */ - public AlgOptTable getOriginTable() { + public AlgOptEntity getOriginTable() { return originTable; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java index 7c729cfd87..96993add7e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java @@ -73,7 +73,7 @@ import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.Window; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.hep.HepAlgVertex; import org.polypheny.db.plan.volcano.AlgSubset; import org.polypheny.db.rex.RexCall; @@ -204,7 +204,7 @@ public ImmutableList collations( AlgSubset alg, AlgMetadataQuery m /** * Helper method to determine a {@link Scan}'s collation. */ - public static List table( AlgOptTable table ) { + public static List table( AlgOptEntity table ) { return table.getCollationList(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java index 351ce2ddc2..25a68fc745 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java @@ -47,7 +47,7 @@ import org.polypheny.db.algebra.core.SetOp; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.TableFunctionScan; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexVisitor; @@ -222,7 +222,7 @@ public Set getColumnOrigins( AlgNode alg, AlgMetadataQuery mq, final Set set = new HashSet<>(); - AlgOptTable table = alg.getTable(); + AlgOptEntity table = alg.getTable(); if ( table == null ) { // Somebody is making column values up out of thin air, like a VALUES clause, so we return an empty set. return set; diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java index 1c974f4609..81a620298b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java @@ -49,7 +49,7 @@ import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.hep.HepAlgVertex; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; @@ -130,7 +130,7 @@ public AlgDistribution distribution( HepAlgVertex alg, AlgMetadataQuery mq ) { /** * Helper method to determine a {@link Scan}'s distribution. */ - public static AlgDistribution table( AlgOptTable table ) { + public static AlgDistribution table( AlgOptEntity table ) { return table.getDistribution(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java index 6b47a08c22..765307b9c5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java @@ -49,8 +49,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPredicateList; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexTableInputRef.AlgTableRef; import org.polypheny.db.util.ImmutableBitSet; @@ -392,7 +392,7 @@ public Set getTableReferences( AlgNode alg ) { * @param alg the AlgNode * @return the table, if the {@link AlgNode} is a simple table; otherwise null */ - public AlgOptTable getTableOrigin( AlgNode alg ) { + public AlgOptEntity getTableOrigin( AlgNode alg ) { // Determine the simple origin of the first column in the/ AlgNode. If it's simple, then that means that the underlying table is also simple, even if the column itself is derived. if ( alg.getRowType().getFieldCount() == 0 ) { return null; diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java b/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java index 6968401005..151aa3d3ef 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java @@ -109,7 +109,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalMinus; import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.algebra.logical.relational.LogicalUnion; @@ -173,7 +173,7 @@ public class JaninoRelMetadataProvider implements AlgMetadataProvider { LogicalSort.class, LogicalTableFunctionScan.class, LogicalModify.class, - LogicalScan.class, + LogicalRelScan.class, LogicalUnion.class, LogicalValues.class, LogicalWindow.class, diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java index e552c8202a..28ff74558b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexNode; @@ -50,14 +50,14 @@ public class MutableTableModify extends MutableSingleAlg { public final Prepare.CatalogReader catalogReader; - public final AlgOptTable table; + public final AlgOptEntity table; public final Operation operation; public final List updateColumnList; public final List sourceExpressionList; public final boolean flattened; - private MutableTableModify( AlgDataType rowType, MutableAlg input, AlgOptTable table, Prepare.CatalogReader catalogReader, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + private MutableTableModify( AlgDataType rowType, MutableAlg input, AlgOptEntity table, Prepare.CatalogReader catalogReader, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( MutableAlgType.TABLE_MODIFY, rowType, input ); this.table = table; this.catalogReader = catalogReader; @@ -80,7 +80,7 @@ private MutableTableModify( AlgDataType rowType, MutableAlg input, AlgOptTable t * @param sourceExpressionList List of value expressions to be set (e.g. exp1, exp2); null if not UPDATE * @param flattened Whether set flattens the input row type */ - public static MutableTableModify of( AlgDataType rowType, MutableAlg input, AlgOptTable table, Prepare.CatalogReader catalogReader, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public static MutableTableModify of( AlgDataType rowType, MutableAlg input, AlgOptEntity table, Prepare.CatalogReader catalogReader, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { return new MutableTableModify( rowType, input, table, catalogReader, operation, updateColumnList, sourceExpressionList, flattened ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java index 49c0b30a99..cb3ce663a3 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java @@ -40,10 +40,10 @@ import org.polypheny.db.algebra.core.Filter; import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.interpreter.Bindables.BindableScan; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.plan.AlgOptRuleOperand; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexUtil; import org.polypheny.db.schema.FilterableTable; @@ -117,7 +117,7 @@ protected FilterScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilde public static boolean test( Scan scan ) { // We can only push filters into a FilterableTable or ProjectableFilterableTable. - final AlgOptTable table = scan.getTable(); + final AlgOptEntity table = scan.getTable(); return table.unwrap( FilterableTable.class ) != null || table.unwrap( ProjectableFilterableTable.class ) != null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java index b83dea12c5..c7533f0b6b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java @@ -59,9 +59,9 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -252,10 +252,10 @@ private void setJoinKey( ImmutableBitSet.Builder joinKeys, ImmutableBitSet.Build */ private void findRemovableSelfJoins( AlgMetadataQuery mq, LoptMultiJoin multiJoin ) { // Candidates for self-joins must be simple factors - Map simpleFactors = getSimpleFactors( mq, multiJoin ); + Map simpleFactors = getSimpleFactors( mq, multiJoin ); // See if a simple factor is repeated and therefore potentially is part of a self-join. Restrict each factor to at most one self-join. - final List repeatedTables = new ArrayList<>(); + final List repeatedTables = new ArrayList<>(); final TreeSet sortedFactors = new TreeSet<>(); sortedFactors.addAll( simpleFactors.keySet() ); final Map selfJoinPairs = new HashMap<>(); @@ -298,8 +298,8 @@ private void findRemovableSelfJoins( AlgMetadataQuery mq, LoptMultiJoin multiJoi * @param multiJoin join factors being optimized * @return map consisting of the simple factors and the tables they correspond */ - private Map getSimpleFactors( AlgMetadataQuery mq, LoptMultiJoin multiJoin ) { - final Map returnList = new HashMap<>(); + private Map getSimpleFactors( AlgMetadataQuery mq, LoptMultiJoin multiJoin ) { + final Map returnList = new HashMap<>(); // Loop through all join factors and locate the ones where each column referenced from the factor is not derived and originates from the same underlying table. Also, discard factors that // are null-generating or will be removed because of semijoins. @@ -311,7 +311,7 @@ private Map getSimpleFactors( AlgMetadataQuery mq, LoptMul continue; } final AlgNode alg = multiJoin.getJoinFactor( factIdx ); - final AlgOptTable table = mq.getTableOrigin( alg ); + final AlgOptEntity table = mq.getTableOrigin( alg ); if ( table != null ) { returnList.put( factIdx, table ); } @@ -1509,11 +1509,11 @@ public static boolean isRemovableSelfJoin( Join joinRel ) { // Make sure the join is between the same simple factor final AlgMetadataQuery mq = joinRel.getCluster().getMetadataQuery(); - final AlgOptTable leftTable = mq.getTableOrigin( left ); + final AlgOptEntity leftTable = mq.getTableOrigin( left ); if ( leftTable == null ) { return false; } - final AlgOptTable rightTable = mq.getTableOrigin( right ); + final AlgOptEntity rightTable = mq.getTableOrigin( right ); if ( rightTable == null ) { return false; } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java index d43ec96ac4..b77abe086d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java @@ -53,7 +53,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -236,7 +236,7 @@ private SemiJoin findSemiJoinIndexByCost( LoptMultiJoin multiJoin, List // Make sure all the fact table keys originate from the same table and are simple column references final List actualLeftKeys = new ArrayList<>(); - LcsTable factTable = validateKeys( factRel, leftKeys, rightKeys, actualLeftKeys ); + LcsEntity factTable = validateKeys( factRel, leftKeys, rightKeys, actualLeftKeys ); if ( factTable == null ) { return null; } @@ -337,9 +337,9 @@ private RexNode adjustSemiJoinCondition( LoptMultiJoin multiJoin, int leftAdjust * @param actualLeftKeys the remaining valid fact table semijoin keys * @return the underlying fact table if the semijoin keys are valid; otherwise null */ - private LcsTable validateKeys( AlgNode factRel, List leftKeys, List rightKeys, List actualLeftKeys ) { + private LcsEntity validateKeys( AlgNode factRel, List leftKeys, List rightKeys, List actualLeftKeys ) { int keyIdx = 0; - AlgOptTable theTable = null; + AlgOptEntity theTable = null; ListIterator keyIter = leftKeys.listIterator(); while ( keyIter.hasNext() ) { boolean removeKey = false; @@ -349,9 +349,9 @@ private LcsTable validateKeys( AlgNode factRel, List leftKeys, List leftKeys, Listbuilder() - .addAll( algOptTable.getQualifiedName() ) + .addAll( algOptEntity.getQualifiedName() ) .add( "(STREAM)" ).build() ); - final LogicalScan newScan = LogicalScan.create( cluster, algOptTable2 ); + final LogicalRelScan newScan = LogicalRelScan.create( cluster, algOptEntity2 ); call.transformTo( newScan ); } } @@ -311,8 +311,8 @@ public DeltaScanToEmptyRule( AlgBuilderFactory algBuilderFactory ) { public void onMatch( AlgOptRuleCall call ) { final Delta delta = call.alg( 0 ); final Scan scan = call.alg( 1 ); - final AlgOptTable algOptTable = scan.getTable(); - final StreamableTable streamableTable = algOptTable.unwrap( StreamableTable.class ); + final AlgOptEntity algOptEntity = scan.getTable(); + final StreamableTable streamableTable = algOptEntity.unwrap( StreamableTable.class ); final AlgBuilder builder = call.builder(); if ( streamableTable == null ) { call.transformTo( builder.values( delta.getRowType() ).build() ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java index 19b65f4d47..a42ebd6bbc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java @@ -22,16 +22,19 @@ import java.util.Collection; import java.util.List; import java.util.stream.Collectors; +import lombok.Getter; import lombok.NonNull; import lombok.SneakyThrows; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.Catalog.NamespaceType; -public class CatalogCollection implements CatalogObject { +public class CatalogCollection extends CatalogEntity implements CatalogObject { private static final long serialVersionUID = -6490762948368178584L; + @Getter public final long id; public final ImmutableList placements; public final String name; @@ -42,6 +45,7 @@ public class CatalogCollection implements CatalogObject { public CatalogCollection( long databaseId, long namespaceId, long id, String name, @NonNull Collection placements, EntityType type, String physicalName ) { + super( id, EntityType.ENTITY, NamespaceType.DOCUMENT ); this.id = id; this.databaseId = databaseId; this.namespaceId = namespaceId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java new file mode 100644 index 0000000000..4f50873c53 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity; + +import java.io.Serializable; +import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.schema.Wrapper; + +public abstract class CatalogEntity implements Wrapper, Serializable { + + public final long id; + public final EntityType entityType; + public final NamespaceType namespaceType; + + + protected CatalogEntity( long id, EntityType type, NamespaceType namespaceType ) { + this.id = id; + this.entityType = type; + this.namespaceType = namespaceType; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java index f5b7ec556d..4482615c0c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java @@ -25,10 +25,12 @@ import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.NonNull; +import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.Catalog.NamespaceType; @EqualsAndHashCode -public class CatalogGraphDatabase implements CatalogObject, Comparable { +public class CatalogGraphDatabase extends CatalogEntity implements CatalogObject, Comparable { private static final long serialVersionUID = 7343856827901459672L; @@ -43,6 +45,7 @@ public class CatalogGraphDatabase implements CatalogObject, Comparable placements, boolean caseSensitive ) { + super( id, EntityType.ENTITY, NamespaceType.GRAPH ); this.id = id; this.name = name; this.ownerId = ownerId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java index b15509935b..219e77e427 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java @@ -32,11 +32,12 @@ import org.polypheny.db.partition.properties.PartitionProperty; -@EqualsAndHashCode -public class CatalogTable implements CatalogObject, Comparable { +@EqualsAndHashCode(callSuper = false) +public class CatalogTable extends CatalogEntity implements CatalogObject, Comparable { private static final long serialVersionUID = 4653390333258552102L; + @Getter public final long id; public final String name; public final ImmutableList fieldIds; @@ -67,6 +68,7 @@ public CatalogTable( @NonNull final ImmutableList dataPlacements, boolean modifiable, PartitionProperty partitionProperty ) { + super( id, type, NamespaceType.RELATIONAL ); this.id = id; this.name = name; this.fieldIds = fieldIds; @@ -101,6 +103,7 @@ public CatalogTable( boolean modifiable, PartitionProperty partitionProperty, ImmutableList connectedViews ) { + super( id, type, NamespaceType.RELATIONAL ); this.id = id; this.name = name; this.fieldIds = fieldIds; diff --git a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java index 22dce15f31..5436cff2de 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java @@ -68,7 +68,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.logical.relational.LogicalWindow; @@ -79,10 +79,10 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -162,14 +162,14 @@ public static class BindableScanRule extends AlgOptRule { * @param algBuilderFactory Builder for relational expressions */ public BindableScanRule( AlgBuilderFactory algBuilderFactory ) { - super( operand( LogicalScan.class, none() ), algBuilderFactory, null ); + super( operand( LogicalRelScan.class, none() ), algBuilderFactory, null ); } @Override public void onMatch( AlgOptRuleCall call ) { - final LogicalScan scan = call.alg( 0 ); - final AlgOptTable table = scan.getTable(); + final LogicalRelScan scan = call.alg( 0 ); + final AlgOptEntity table = scan.getTable(); if ( BindableScan.canHandle( table ) ) { call.transformTo( BindableScan.create( scan.getCluster(), table ) ); } @@ -192,7 +192,7 @@ public static class BindableScan extends Scan implements BindableAlg { * * Use {@link #create} unless you know what you are doing. */ - BindableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, ImmutableList filters, ImmutableIntList projects ) { + BindableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, ImmutableList filters, ImmutableIntList projects ) { super( cluster, traitSet, table ); this.filters = Objects.requireNonNull( filters ); this.projects = Objects.requireNonNull( projects ); @@ -203,16 +203,16 @@ public static class BindableScan extends Scan implements BindableAlg { /** * Creates a BindableScan. */ - public static BindableScan create( AlgOptCluster cluster, AlgOptTable algOptTable ) { - return create( cluster, algOptTable, ImmutableList.of(), identity( algOptTable ) ); + public static BindableScan create( AlgOptCluster cluster, AlgOptEntity algOptEntity ) { + return create( cluster, algOptEntity, ImmutableList.of(), identity( algOptEntity ) ); } /** * Creates a BindableScan. */ - public static BindableScan create( AlgOptCluster cluster, AlgOptTable algOptTable, List filters, List projects ) { - final Table table = algOptTable.unwrap( Table.class ); + public static BindableScan create( AlgOptCluster cluster, AlgOptEntity algOptEntity, List filters, List projects ) { + final Table table = algOptEntity.unwrap( Table.class ); final AlgTraitSet traitSet = cluster.traitSetOf( BindableConvention.INSTANCE ) .replace( table.getSchemaType().getModelTrait() ) @@ -222,7 +222,7 @@ public static BindableScan create( AlgOptCluster cluster, AlgOptTable algOptTabl } return ImmutableList.of(); } ); - return new BindableScan( cluster, traitSet, algOptTable, ImmutableList.copyOf( filters ), ImmutableIntList.copyOf( projects ) ); + return new BindableScan( cluster, traitSet, algOptEntity, ImmutableList.copyOf( filters ), ImmutableIntList.copyOf( projects ) ); } @@ -274,7 +274,7 @@ public String algCompareString() { } - public static boolean canHandle( AlgOptTable table ) { + public static boolean canHandle( AlgOptEntity table ) { return table.unwrap( ScannableTable.class ) != null || table.unwrap( FilterableTable.class ) != null || table.unwrap( ProjectableFilterableTable.class ) != null; diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index eb72eaad1b..d33b18bdbd 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -52,7 +52,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexUtil; @@ -92,29 +92,29 @@ public void run() { * Tries various table SPIs, and negotiates with the table which filters and projects it can implement. Adds to the Enumerable implementations of any filters and projects that cannot be implemented by the table. */ static ScanNode create( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects ) { - final AlgOptTable algOptTable = alg.getTable(); - final ProjectableFilterableTable pfTable = algOptTable.unwrap( ProjectableFilterableTable.class ); + final AlgOptEntity algOptEntity = alg.getTable(); + final ProjectableFilterableTable pfTable = algOptEntity.unwrap( ProjectableFilterableTable.class ); if ( pfTable != null ) { return createProjectableFilterable( compiler, alg, filters, projects, pfTable ); } - final FilterableTable filterableTable = algOptTable.unwrap( FilterableTable.class ); + final FilterableTable filterableTable = algOptEntity.unwrap( FilterableTable.class ); if ( filterableTable != null ) { return createFilterable( compiler, alg, filters, projects, filterableTable ); } - final ScannableTable scannableTable = algOptTable.unwrap( ScannableTable.class ); + final ScannableTable scannableTable = algOptEntity.unwrap( ScannableTable.class ); if ( scannableTable != null ) { return createScannable( compiler, alg, filters, projects, scannableTable ); } //noinspection unchecked - final Enumerable enumerable = algOptTable.unwrap( Enumerable.class ); + final Enumerable enumerable = algOptEntity.unwrap( Enumerable.class ); if ( enumerable != null ) { return createEnumerable( compiler, alg, enumerable, null, filters, projects ); } - final QueryableTable queryableTable = algOptTable.unwrap( QueryableTable.class ); + final QueryableTable queryableTable = algOptEntity.unwrap( QueryableTable.class ); if ( queryableTable != null ) { return createQueryable( compiler, alg, filters, projects, queryableTable ); } - throw new AssertionError( "cannot convert table " + algOptTable + " to enumerable" ); + throw new AssertionError( "cannot convert table " + algOptEntity + " to enumerable" ); } @@ -126,16 +126,16 @@ private static ScanNode createScannable( Compiler compiler, Scan alg, ImmutableL private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, QueryableTable queryableTable ) { final DataContext root = compiler.getDataContext(); - final AlgOptTable algOptTable = alg.getTable(); + final AlgOptEntity algOptEntity = alg.getTable(); final Type elementType = queryableTable.getElementType(); SchemaPlus schema = root.getRootSchema(); - for ( String name : Util.skipLast( algOptTable.getQualifiedName() ) ) { + for ( String name : Util.skipLast( algOptEntity.getQualifiedName() ) ) { schema = schema.getSubSchema( name ); } final Enumerable rowEnumerable; if ( elementType instanceof Class ) { //noinspection unchecked - final Queryable queryable = Schemas.queryable( root, (Class) elementType, algOptTable.getQualifiedName() ); + final Queryable queryable = Schemas.queryable( root, (Class) elementType, algOptEntity.getQualifiedName() ); ImmutableList.Builder fieldBuilder = ImmutableList.builder(); Class type = (Class) elementType; for ( Field field : type.getFields() ) { @@ -157,7 +157,7 @@ private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableL return new Row( values ); } ); } else { - rowEnumerable = Schemas.queryable( root, Row.class, algOptTable.getQualifiedName() ); + rowEnumerable = Schemas.queryable( root, Row.class, algOptEntity.getQualifiedName() ); } return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractTable.java b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java similarity index 87% rename from core/src/main/java/org/polypheny/db/plan/AlgOptAbstractTable.java rename to core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java index 66bb64e1cb..179cd6be37 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractTable.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java @@ -43,25 +43,25 @@ import org.polypheny.db.algebra.AlgDistributions; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgReferentialConstraint; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.util.ImmutableBitSet; /** - * Partial implementation of {@link AlgOptTable}. + * Partial implementation of {@link AlgOptEntity}. */ -public abstract class AlgOptAbstractTable implements AlgOptTable { +public abstract class AlgOptAbstractEntity implements AlgOptEntity { protected final AlgOptSchema schema; protected final AlgDataType rowType; protected final String name; - protected AlgOptAbstractTable( AlgOptSchema schema, String name, AlgDataType rowType ) { + protected AlgOptAbstractEntity( AlgOptSchema schema, String name, AlgDataType rowType ) { this.schema = schema; this.name = name; this.rowType = rowType; @@ -134,7 +134,7 @@ public List getReferentialConstraints() { @Override public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { - return LogicalScan.create( context.getCluster(), this ); + return LogicalRelScan.create( context.getCluster(), this ); } @@ -145,14 +145,14 @@ public Expression getExpression( Class clazz ) { @Override - public AlgOptTable extend( List extendedFields ) { + public AlgOptEntity extend( List extendedFields ) { throw new UnsupportedOperationException(); } @Override public List getColumnStrategies() { - return AlgOptTableImpl.columnStrategies( this ); + return AlgOptEntityImpl.columnStrategies( this ); } } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptTable.java b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java similarity index 93% rename from core/src/main/java/org/polypheny/db/plan/AlgOptTable.java rename to core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java index 5b981cdacf..362032033a 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptTable.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java @@ -40,11 +40,11 @@ import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgReferentialConstraint; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.Table; import org.polypheny.db.schema.Wrapper; @@ -54,7 +54,7 @@ /** * Represents a relational dataset in a {@link AlgOptSchema}. It has methods to describe and implement itself. */ -public interface AlgOptTable extends Wrapper { +public interface AlgOptEntity extends Wrapper { /** * Obtains an identifier for this table. The identifier must be unique with respect to the Connection producing this table. @@ -63,8 +63,6 @@ public interface AlgOptTable extends Wrapper { */ List getQualifiedName(); - CatalogTable getCatalogTable(); - /** * Returns an estimate of the number of rows in the table. */ @@ -84,7 +82,7 @@ public interface AlgOptTable extends Wrapper { * Converts this table into a {@link AlgNode relational expression}. * * The {@link AlgOptPlanner planner} calls this method to convert a table into an initial - * relational expression, generally something abstract, such as a {@link LogicalScan}, then optimizes this + * relational expression, generally something abstract, such as a {@link LogicalRelScan}, then optimizes this * expression by applying {@link AlgOptRule rules} to transform it into more efficient access methods for this table. */ AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ); @@ -130,7 +128,7 @@ public interface AlgOptTable extends Wrapper { * The extended table includes the fields of this base table plus the extended fields that do not have the same name as * a field in the base table. */ - AlgOptTable extend( List extendedFields ); + AlgOptEntity extend( List extendedFields ); /** * Returns a list describing how each column is populated. The list has the same number of entries as there are fields, @@ -138,11 +136,13 @@ public interface AlgOptTable extends Wrapper { */ List getColumnStrategies(); - + @Deprecated default Table getTable() { return null; } + CatalogEntity getCatalogEntity(); + /** * Contains the context needed to convert a table into a relational expression. diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java b/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java index 347b22aedf..68ec723cd7 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java @@ -38,12 +38,12 @@ /** - * A RelOptSchema is a set of {@link AlgOptTable} objects. + * A RelOptSchema is a set of {@link AlgOptEntity} objects. */ public interface AlgOptSchema { /** - * Retrieves a {@link AlgOptTable} based upon a member access. + * Retrieves a {@link AlgOptEntity} based upon a member access. * * For example, the Saffron expression salesSchema.emps would be resolved using a call to salesSchema.getTableForMember(new String[]{"emps" }). * @@ -51,6 +51,6 @@ public interface AlgOptSchema { * * @param names Qualified name */ - AlgOptTable getTableForMember( List names ); + AlgOptEntity getTableForMember( List names ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptSchemaWithSampling.java b/core/src/main/java/org/polypheny/db/plan/AlgOptSchemaWithSampling.java index 8751383520..90ee0674e4 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptSchemaWithSampling.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptSchemaWithSampling.java @@ -45,14 +45,14 @@ public interface AlgOptSchemaWithSampling extends AlgOptSchema { /** - * Retrieves a {@link AlgOptTable} based upon a member access, using a sample dataset if it exists. + * Retrieves a {@link AlgOptEntity} based upon a member access, using a sample dataset if it exists. * * @param names Compound name of table * @param datasetName Name of sample dataset to substitute, if it exists; null to not look for a sample * @param usedDataset Output parameter which is set to true if a sample dataset is found; may be null * @return Table, or null if not found */ - AlgOptTable getTableForMember( List names, String datasetName, boolean[] usedDataset ); + AlgOptEntity getTableForMember( List names, String datasetName, boolean[] usedDataset ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java index f5dd509efe..d29edebbba 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java @@ -187,7 +187,7 @@ public static boolean isOrder( AlgNode alg ) { /** * Returns a set of tables used by this expression or its children */ - public static Set findTables( AlgNode alg ) { + public static Set findTables( AlgNode alg ) { return new LinkedHashSet<>( findAllTables( alg ) ); } @@ -195,9 +195,9 @@ public static Set findTables( AlgNode alg ) { /** * Returns a list of all tables used by this expression or its children */ - public static List findAllTables( AlgNode alg ) { + public static List findAllTables( AlgNode alg ) { final Multimap, AlgNode> nodes = AlgMetadataQuery.instance().getNodeTypes( alg ); - final List usedTables = new ArrayList<>(); + final List usedTables = new ArrayList<>(); for ( Entry, Collection> e : nodes.asMap().entrySet() ) { if ( Scan.class.isAssignableFrom( e.getKey() ) ) { for ( AlgNode node : e.getValue() ) { diff --git a/core/src/main/java/org/polypheny/db/plan/SubstitutionVisitor.java b/core/src/main/java/org/polypheny/db/plan/SubstitutionVisitor.java index d68dd792b7..a19c6dde1c 100644 --- a/core/src/main/java/org/polypheny/db/plan/SubstitutionVisitor.java +++ b/core/src/main/java/org/polypheny/db/plan/SubstitutionVisitor.java @@ -63,7 +63,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.mutable.Holder; import org.polypheny.db.algebra.mutable.MutableAggregate; @@ -121,7 +121,7 @@ * Uses a bottom-up matching algorithm. Nodes do not need to be identical. At each level, returns the residue. * * The inputs must only include the core relational operators: - * {@link LogicalScan}, + * {@link LogicalRelScan}, * {@link LogicalFilter}, * {@link LogicalProject}, * {@link LogicalJoin}, @@ -956,7 +956,7 @@ protected static Operand target( int ordinal ) { /** * Implementation of {@link UnifyRule} that matches if the query is already equal to the target. * - * Matches scans to the same table, because these will be {@link MutableScan}s with the same {@link LogicalScan} instance. + * Matches scans to the same table, because these will be {@link MutableScan}s with the same {@link LogicalRelScan} instance. */ private static class TrivialRule extends AbstractUnifyRule { @@ -980,7 +980,7 @@ public UnifyResult apply( UnifyRuleCall call ) { /** - * Implementation of {@link UnifyRule} that matches {@link LogicalScan}. + * Implementation of {@link UnifyRule} that matches {@link LogicalRelScan}. */ private static class ScanToProjectUnifyRule extends AbstractUnifyRule { diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java similarity index 85% rename from core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java rename to core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index 29e5e446cf..0502be745d 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptTableImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -51,17 +51,18 @@ import org.polypheny.db.algebra.AlgReferentialConstraint; import org.polypheny.db.algebra.constant.Modality; import org.polypheny.db.algebra.constant.Monotonicity; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeFactoryImpl; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.prepare.Prepare.AbstractPreparingEntity; import org.polypheny.db.runtime.Hook; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.FilterableTable; @@ -84,9 +85,9 @@ /** - * Implementation of {@link AlgOptTable}. + * Implementation of {@link AlgOptEntity}. */ -public class AlgOptTableImpl extends Prepare.AbstractPreparingTable { +public class AlgOptEntityImpl extends AbstractPreparingEntity { private final transient AlgOptSchema schema; private final AlgDataType rowType; @@ -96,7 +97,7 @@ public class AlgOptTableImpl extends Prepare.AbstractPreparingTable { @Getter @Nullable - private final CatalogTable catalogTable; + private final CatalogEntity catalogEntity; @Nullable private final transient Function, Expression> expressionFunction; private final ImmutableList names; @@ -109,30 +110,30 @@ public class AlgOptTableImpl extends Prepare.AbstractPreparingTable { private final Double rowCount; - private AlgOptTableImpl( + private AlgOptEntityImpl( AlgOptSchema schema, AlgDataType rowType, List names, Table table, - CatalogTable catalogTable, + CatalogEntity catalogEntity, Function, Expression> expressionFunction, Double rowCount ) { this.schema = schema; this.rowType = Objects.requireNonNull( rowType ); this.names = ImmutableList.copyOf( names ); this.table = table; // may be null - this.catalogTable = catalogTable; + this.catalogEntity = catalogEntity; this.expressionFunction = expressionFunction; // may be null this.rowCount = rowCount; // may be null } - public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, List names, Expression expression ) { - return new AlgOptTableImpl( schema, rowType, names, null, null, c -> expression, null ); + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, List names, Expression expression ) { + return new AlgOptEntityImpl( schema, rowType, names, null, null, c -> expression, null ); } - public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, final PolyphenyDbSchema.TableEntry tableEntry, CatalogTable catalogTable, Double count ) { + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, final PolyphenyDbSchema.TableEntry tableEntry, CatalogEntity catalogEntity, Double count ) { final Table table = tableEntry.getTable(); Double rowCount; if ( count == null ) { @@ -141,15 +142,15 @@ public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, rowCount = count; } - return new AlgOptTableImpl( schema, rowType, tableEntry.path(), table, catalogTable, getClassExpressionFunction( tableEntry, table ), rowCount ); + return new AlgOptEntityImpl( schema, rowType, tableEntry.path(), table, catalogEntity, getClassExpressionFunction( tableEntry, table ), rowCount ); } /** * Creates a copy of this RelOptTable. The new RelOptTable will have newRowType. */ - public AlgOptTableImpl copy( AlgDataType newRowType ) { - return new AlgOptTableImpl( this.schema, newRowType, this.names, this.table, this.catalogTable, this.expressionFunction, this.rowCount ); + public AlgOptEntityImpl copy( AlgDataType newRowType ) { + return new AlgOptEntityImpl( this.schema, newRowType, this.names, this.table, this.catalogEntity, this.expressionFunction, this.rowCount ); } @@ -176,11 +177,11 @@ private static Function, Expression> getClassExpressionFunction( final } - public static AlgOptTableImpl create( AlgOptSchema schema, AlgDataType rowType, Table table, CatalogTable catalogTable, ImmutableList names ) { + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Table table, CatalogEntity catalogEntity, ImmutableList names ) { assert table instanceof TranslatableTable || table instanceof ScannableTable || table instanceof ModifiableTable; - return new AlgOptTableImpl( schema, rowType, names, table, catalogTable, null, null ); + return new AlgOptEntityImpl( schema, rowType, names, table, catalogEntity, null, null ); } @@ -215,9 +216,9 @@ public Expression getExpression( Class clazz ) { @Override - protected AlgOptTable extend( Table extendedTable ) { + protected AlgOptEntity extend( Table extendedTable ) { final AlgDataType extendedRowType = extendedTable.getRowType( AlgDataTypeFactory.DEFAULT ); - return new AlgOptTableImpl( + return new AlgOptEntityImpl( getRelOptSchema(), extendedRowType, getQualifiedName(), @@ -230,9 +231,9 @@ protected AlgOptTable extend( Table extendedTable ) { @Override public boolean equals( Object obj ) { - return obj instanceof AlgOptTableImpl - && this.rowType.equals( ((AlgOptTableImpl) obj).getRowType() ) - && this.table == ((AlgOptTableImpl) obj).table; + return obj instanceof AlgOptEntityImpl + && this.rowType.equals( ((AlgOptEntityImpl) obj).getRowType() ) + && this.table == ((AlgOptEntityImpl) obj).table; } @@ -269,8 +270,8 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { // immutable RelRecordType using the same field list. if ( this.getRowType().isDynamicStruct() ) { final AlgDataType staticRowType = new AlgRecordType( getRowType().getFieldList() ); - final AlgOptTable algOptTable = this.copy( staticRowType ); - return algOptTable.toAlg( context, traitSet ); + final AlgOptEntity algOptEntity = this.copy( staticRowType ); + return algOptEntity.toAlg( context, traitSet ); } // If there are any virtual columns, create a copy of this table without those virtual columns. @@ -282,8 +283,8 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { b.add( field.getName(), null, field.getType() ); } } - final AlgOptTable algOptTable = - new AlgOptTableImpl( this.schema, b.build(), this.names, this.table, this.catalogTable, this.expressionFunction, this.rowCount ) { + final AlgOptEntity algOptEntity = + new AlgOptEntityImpl( this.schema, b.build(), this.names, this.table, this.catalogEntity, this.expressionFunction, this.rowCount ) { @Override public T unwrap( Class clazz ) { if ( clazz.isAssignableFrom( InitializerExpressionFactory.class ) ) { @@ -292,7 +293,7 @@ public T unwrap( Class clazz ) { return super.unwrap( clazz ); } }; - return algOptTable.toAlg( context, traitSet ); + return algOptEntity.toAlg( context, traitSet ); } if ( table instanceof TranslatableTable ) { @@ -300,7 +301,7 @@ public T unwrap( Class clazz ) { } final AlgOptCluster cluster = context.getCluster(); if ( Hook.ENABLE_BINDABLE.get( false ) ) { - return LogicalScan.create( cluster, this ); + return LogicalRelScan.create( cluster, this ); } if ( PolyphenyDbPrepareImpl.ENABLE_ENUMERABLE && table instanceof QueryableTable ) { return EnumerableScan.create( cluster, this ); @@ -308,7 +309,7 @@ public T unwrap( Class clazz ) { if ( table instanceof ScannableTable || table instanceof FilterableTable || table instanceof ProjectableFilterableTable ) { - return LogicalScan.create( cluster, this ); + return LogicalRelScan.create( cluster, this ); } if ( PolyphenyDbPrepareImpl.ENABLE_ENUMERABLE ) { return EnumerableScan.create( cluster, this ); @@ -398,7 +399,7 @@ public AccessType getAllowedAccess() { /** * Helper for {@link #getColumnStrategies()}. */ - public static List columnStrategies( final AlgOptTable table ) { + public static List columnStrategies( final AlgOptEntity table ) { final int fieldCount = table.getRowType().getFieldCount(); final InitializerExpressionFactory ief = Util.first( table.unwrap( InitializerExpressionFactory.class ), @@ -422,7 +423,7 @@ public ColumnStrategy get( int index ) { * Converts the ordinal of a field into the ordinal of a stored field. * That is, it subtracts the number of virtual fields that come before it. */ - public static int realOrdinal( final AlgOptTable table, int i ) { + public static int realOrdinal( final AlgOptEntity table, int i ) { List strategies = table.getColumnStrategies(); int n = 0; for ( int j = 0; j < i; j++ ) { @@ -439,7 +440,7 @@ public static int realOrdinal( final AlgOptTable table, int i ) { * Returns the row type of a table after any {@link ColumnStrategy#VIRTUAL} columns have been removed. This is the type * of the records that are actually stored. */ - public static AlgDataType realRowType( AlgOptTable table ) { + public static AlgDataType realRowType( AlgOptEntity table ) { final AlgDataType rowType = table.getRowType(); final List strategies = columnStrategies( table ); if ( !strategies.contains( ColumnStrategy.VIRTUAL ) ) { diff --git a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java index 091a6128d3..1fca0c05ef 100644 --- a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java +++ b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java @@ -50,9 +50,9 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.util.BuiltInMethod; @@ -109,18 +109,18 @@ public AlgNode translate( Expression expression ) { toRex( (FunctionExpression) call.expressions.get( 0 ), input ) ); case AS_QUERYABLE: - return LogicalScan.create( + return LogicalRelScan.create( cluster, - AlgOptTableImpl.create( + AlgOptEntityImpl.create( null, typeFactory.createJavaType( Types.toClass( Types.getElementType( call.targetExpression.getType() ) ) ), ImmutableList.of(), call.targetExpression ) ); case SCHEMA_GET_TABLE: - return LogicalScan.create( + return LogicalRelScan.create( cluster, - AlgOptTableImpl.create( + AlgOptEntityImpl.create( null, typeFactory.createJavaType( (Class) ((ConstantExpression) call.expressions.get( 1 )).value ), ImmutableList.of(), diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index 1ea77c9a7d..02a0aee464 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -49,7 +49,8 @@ import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Table; import org.polypheny.db.schema.Wrapper; @@ -78,32 +79,32 @@ public PolyphenyDbCatalogReader( PolyphenyDbSchema rootSchema, List defa @Override - public Prepare.PreparingTable getTable( final List names ) { + public PreparingEntity getTable( final List names ) { // First look in the default schema, if any. If not found, look in the root schema. PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); if ( entry != null ) { final Table table = entry.getTable(); CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); if ( table instanceof Wrapper ) { - final Prepare.PreparingTable algOptTable = ((Wrapper) table).unwrap( Prepare.PreparingTable.class ); + final PreparingEntity algOptTable = ((Wrapper) table).unwrap( PreparingEntity.class ); if ( algOptTable != null ) { return algOptTable; } } - return AlgOptTableImpl.create( this, table.getRowType( typeFactory ), entry, catalogTable, null ); + return AlgOptEntityImpl.create( this, table.getRowType( typeFactory ), entry, catalogTable, null ); } return null; } @Override - public AlgOptTable getCollection( final List names ) { + public AlgOptEntity getCollection( final List names ) { // First look in the default schema, if any. If not found, look in the root schema. PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); if ( entry != null ) { final Table table = entry.getTable(); CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); - return AlgOptTableImpl.create( this, table.getRowType( typeFactory ), entry, catalogTable, null ); + return AlgOptEntityImpl.create( this, table.getRowType( typeFactory ), entry, catalogTable, null ); } return null; } @@ -174,7 +175,7 @@ public List> getSchemaPaths() { @Override - public Prepare.PreparingTable getTableForMember( List names ) { + public PreparingEntity getTableForMember( List names ) { return getTable( names ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index abf73b7c04..d49a201039 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -59,10 +59,10 @@ import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -240,12 +240,12 @@ protected LogicalModify.Operation mapTableModOp( boolean isDml, Kind Kind ) { public interface CatalogReader extends AlgOptSchema, ValidatorCatalogReader, OperatorTable { @Override - PreparingTable getTableForMember( List names ); + PreparingEntity getTableForMember( List names ); @Override - PreparingTable getTable( List names ); + PreparingEntity getTable( List names ); - AlgOptTable getCollection( List names ); + AlgOptEntity getCollection( List names ); Graph getGraph( String name ); @@ -257,18 +257,18 @@ public interface CatalogReader extends AlgOptSchema, ValidatorCatalogReader, Ope /** * Definition of a table, for the purposes of the validator and planner. */ - public interface PreparingTable extends AlgOptTable, ValidatorTable { + public interface PreparingEntity extends AlgOptEntity, ValidatorTable { } /** - * Abstract implementation of {@link PreparingTable}. + * Abstract implementation of {@link PreparingEntity}. */ - public abstract static class AbstractPreparingTable implements PreparingTable { + public abstract static class AbstractPreparingEntity implements PreparingEntity { @Override - public final AlgOptTable extend( List extendedFields ) { + public final AlgOptEntity extend( List extendedFields ) { final Table table = unwrap( Table.class ); // Get the set of extended columns that do not have the same name as a column in the base table. @@ -285,14 +285,14 @@ public final AlgOptTable extend( List extendedFields ) { /** - * Implementation-specific code to instantiate a new {@link AlgOptTable} based on a {@link Table} that has been extended. + * Implementation-specific code to instantiate a new {@link AlgOptEntity} based on a {@link Table} that has been extended. */ - protected abstract AlgOptTable extend( Table extendedTable ); + protected abstract AlgOptEntity extend( Table extendedTable ); @Override public List getColumnStrategies() { - return AlgOptTableImpl.columnStrategies( AbstractPreparingTable.this ); + return AlgOptEntityImpl.columnStrategies( AbstractPreparingEntity.this ); } } diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index 126a6d234a..d6c570fd37 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -63,7 +63,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.rex.RexNode; @@ -113,7 +113,7 @@ AlgNode toAlg( Queryable queryable ) { PolyphenyDbSchema .from( tableQueryable.schema ) .add( tableQueryable.tableName, tableQueryable.table ); - final AlgOptTableImpl algOptTable = AlgOptTableImpl.create( + final AlgOptEntityImpl algOptTable = AlgOptEntityImpl.create( null, table.getRowType( translator.typeFactory ), tableEntry, @@ -122,7 +122,7 @@ AlgNode toAlg( Queryable queryable ) { if ( table instanceof TranslatableTable ) { return ((TranslatableTable) table).toAlg( translator.toAlgContext(), algOptTable, translator.cluster.traitSet() ); } else { - return LogicalScan.create( translator.cluster, algOptTable ); + return LogicalRelScan.create( translator.cluster, algOptTable ); } } return translator.translate( queryable.getExpression() ); diff --git a/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java b/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java index 55da52480c..d75ecf4583 100644 --- a/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java @@ -33,7 +33,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; @@ -52,7 +52,7 @@ private AlgTraitSet copy( final AlgTraitSet other ) { @Override public AlgNode visit( Scan scan ) { final AlgNode node = super.visit( scan ); - return new LogicalScan( node.getCluster(), copy( node.getTraitSet() ), node.getTable() ); + return new LogicalRelScan( node.getCluster(), copy( node.getTraitSet() ), node.getTable() ); } diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 1ba3b3de60..305b65d618 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -38,7 +38,6 @@ import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; import org.polypheny.db.algebra.logical.document.LogicalDocumentSort; import org.polypheny.db.algebra.logical.document.LogicalDocumentTransformer; -import org.polypheny.db.algebra.logical.lpg.LogicalGraph; import org.polypheny.db.algebra.logical.lpg.LogicalLpgAggregate; import org.polypheny.db.algebra.logical.lpg.LogicalLpgFilter; import org.polypheny.db.algebra.logical.lpg.LogicalLpgMatch; @@ -58,15 +57,11 @@ import org.polypheny.db.algebra.logical.relational.LogicalMinus; import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.prepare.AlgOptTableImpl; -import org.polypheny.db.schema.LogicalTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.transaction.Statement; @@ -257,7 +252,7 @@ public AlgNode visit( LogicalDocumentProject project ) { @Override public AlgNode visit( LogicalDocumentScan scan ) { - hashBasis.add( "LogicalDocumentScan#" + scan.getCollection().getTable().getTableId() ); + hashBasis.add( "LogicalDocumentScan#" + scan.getCollection().getCatalogEntity().id ); return super.visit( scan ); } @@ -330,7 +325,7 @@ public AlgNode visit( LogicalCorrelate correlate ) { @Override public AlgNode visit( LogicalJoin join ) { - if ( join.getLeft() instanceof LogicalScan && join.getRight() instanceof LogicalScan ) { + if ( join.getLeft() instanceof LogicalRelScan && join.getRight() instanceof LogicalRelScan ) { hashBasis.add( "LogicalJoin#" + join.getLeft().getTable().getQualifiedName() + "#" + join.getRight().getTable().getQualifiedName() ); } @@ -397,41 +392,39 @@ public AlgNode visit( AlgNode other ) { private void getAvailableColumns( AlgNode scan ) { this.entities.addAll( scan.getTable().getQualifiedName() ); - final Table table = scan.getTable().getTable(); - LogicalTable logicalTable = (table instanceof LogicalTable) ? (LogicalTable) table : null; - if ( logicalTable != null ) { - final List ids = logicalTable.getColumnIds(); - final List names = logicalTable.getLogicalColumnNames(); - final String baseName = logicalTable.getLogicalSchemaName() + "." + logicalTable.getLogicalTableName() + "."; + final CatalogTable table = (CatalogTable) scan.getTable().getCatalogEntity(); + if ( table != null ) { + final List ids = table.fieldIds; + final List names = table.getColumnNames(); + final String baseName = table.getNamespaceName() + "." + table.name + "."; for ( int i = 0; i < ids.size(); i++ ) { this.availableColumns.putIfAbsent( ids.get( i ), baseName + names.get( i ) ); - this.availableColumnsWithTable.putIfAbsent( ids.get( i ), logicalTable.getTableId() ); + this.availableColumnsWithTable.putIfAbsent( ids.get( i ), table.id ); } } } private void getPartitioningInfo( LogicalFilter filter ) { - AlgOptTableImpl table = (AlgOptTableImpl) filter.getInput().getTable(); + AlgOptEntityImpl table = (AlgOptEntityImpl) filter.getInput().getTable(); if ( table == null ) { return; } - final Table logicalTable = table.getTable(); - if ( !(logicalTable instanceof LogicalTable) ) { - return; - } - CatalogTable catalogTable = Catalog.getInstance().getTable( logicalTable.getTableId() ); + handleIfPartitioned( filter, (CatalogTable) table.getCatalogEntity() ); + } + + private void handleIfPartitioned( AlgNode node, CatalogTable catalogTable ) { // Only if table is partitioned if ( catalogTable.partitionProperty.isPartitioned ) { WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, catalogTable.fieldIds.indexOf( catalogTable.partitionProperty.partitionColumnId ) ); - filter.accept( whereClauseVisitor ); + node.accept( whereClauseVisitor ); - int scanId = filter.getInput().getId(); + int scanId = node.getInput( 0 ).getId(); if ( !partitionValueFilterPerScan.containsKey( scanId ) ) { partitionValueFilterPerScan.put( scanId, new HashSet<>() ); @@ -449,53 +442,18 @@ private void getPartitioningInfo( LogicalFilter filter ) { private void getPartitioningInfo( LogicalDocumentFilter filter ) { - AlgOptTableImpl table = (AlgOptTableImpl) filter.getInput().getTable(); + AlgOptEntityImpl table = (AlgOptEntityImpl) filter.getInput().getTable(); if ( table == null ) { return; } - final Table logicalTable = table.getTable(); - if ( !(logicalTable instanceof LogicalTable) ) { - return; - } - CatalogTable catalogTable = Catalog.getInstance().getTable( logicalTable.getTableId() ); - - // Only if table is partitioned - if ( catalogTable.partitionProperty.isPartitioned ) { - WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( - statement, - catalogTable.fieldIds.indexOf( catalogTable.partitionProperty.partitionColumnId ) ); - filter.accept( whereClauseVisitor ); - - int scanId = filter.getInput().getId(); - - if ( !partitionValueFilterPerScan.containsKey( scanId ) ) { - partitionValueFilterPerScan.put( scanId, new HashSet<>() ); - } - - if ( whereClauseVisitor.valueIdentified ) { - if ( !whereClauseVisitor.getValues().isEmpty() && !whereClauseVisitor.isUnsupportedFilter() ) { - partitionValueFilterPerScan.get( scanId ).addAll( whereClauseVisitor.getValues().stream() - .map( Object::toString ) - .collect( Collectors.toSet() ) ); - } - } - } + handleIfPartitioned( filter, (CatalogTable) table.getCatalogEntity() ); } private void getPartitioningInfo( LogicalLpgFilter filter ) { Graph graph = ((LpgAlg) filter.getInput()).getGraph(); - if ( graph == null ) { - return; - } - - if ( !(graph instanceof LogicalGraph) ) { - return; - } - CatalogGraphDatabase catalogEntity = Catalog.getInstance().getGraph( graph.getId() ); - - // Only if table is partitioned + // todo might add } } diff --git a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java index 65dfb954b8..8f0ca74cf6 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java +++ b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.metadata.BuiltInMetadata.AllPredicates; import org.polypheny.db.algebra.metadata.BuiltInMetadata.ExpressionLineage; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; /** @@ -129,12 +129,12 @@ public Kind getKind() { */ public static class AlgTableRef implements Comparable { - private final AlgOptTable table; + private final AlgOptEntity table; private final int entityNumber; private final String digest; - private AlgTableRef( AlgOptTable table, int entityNumber ) { + private AlgTableRef( AlgOptEntity table, int entityNumber ) { this.table = table; this.entityNumber = entityNumber; this.digest = table.getQualifiedName() + ".#" + entityNumber; @@ -156,7 +156,7 @@ public int hashCode() { } - public AlgOptTable getTable() { + public AlgOptEntity getTable() { return table; } @@ -177,7 +177,7 @@ public String toString() { } - public static AlgTableRef of( AlgOptTable table, int entityNumber ) { + public static AlgTableRef of( AlgOptEntity table, int entityNumber ) { return new AlgTableRef( table, entityNumber ); } diff --git a/core/src/main/java/org/polypheny/db/schema/ColumnStrategy.java b/core/src/main/java/org/polypheny/db/schema/ColumnStrategy.java index 16c3b5bafc..e400aa5f4d 100644 --- a/core/src/main/java/org/polypheny/db/schema/ColumnStrategy.java +++ b/core/src/main/java/org/polypheny/db/schema/ColumnStrategy.java @@ -34,7 +34,7 @@ package org.polypheny.db.schema; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.util.InitializerExpressionFactory; @@ -42,7 +42,7 @@ * Describes how a column gets populated. * * @see InitializerExpressionFactory#generationStrategy - * @see AlgOptTable#getColumnStrategies() + * @see AlgOptEntity#getColumnStrategies() */ public enum ColumnStrategy { diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalTable.java b/core/src/main/java/org/polypheny/db/schema/LogicalTable.java index 8b5d724f32..ae05fec3a0 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalTable.java @@ -34,8 +34,8 @@ import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -90,7 +90,7 @@ public String toString() { @Override public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, @@ -135,7 +135,7 @@ public Enumerable scan( DataContext root ) { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { throw new RuntimeException( "toAlg() is not implemented for Logical Tables!" ); } diff --git a/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java b/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java index 2a22d6fed7..e3a20b6b0f 100644 --- a/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java +++ b/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java @@ -21,7 +21,7 @@ import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; @@ -30,7 +30,7 @@ public interface ModifiableCollection extends QueryableTable { DocumentModify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Operation operation, diff --git a/core/src/main/java/org/polypheny/db/schema/ModifiableTable.java b/core/src/main/java/org/polypheny/db/schema/ModifiableTable.java index 0c8114d88a..077871b7c8 100644 --- a/core/src/main/java/org/polypheny/db/schema/ModifiableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/ModifiableTable.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; @@ -60,7 +60,7 @@ public interface ModifiableTable extends QueryableTable { */ Modify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Modify.Operation operation, diff --git a/core/src/main/java/org/polypheny/db/schema/TranslatableGraph.java b/core/src/main/java/org/polypheny/db/schema/TranslatableGraph.java index db5280e9e0..1b221bb7eb 100644 --- a/core/src/main/java/org/polypheny/db/schema/TranslatableGraph.java +++ b/core/src/main/java/org/polypheny/db/schema/TranslatableGraph.java @@ -17,7 +17,7 @@ package org.polypheny.db.schema; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.schema.graph.Graph; public interface TranslatableGraph extends Graph { diff --git a/core/src/main/java/org/polypheny/db/schema/TranslatableTable.java b/core/src/main/java/org/polypheny/db/schema/TranslatableTable.java index bc92f99a3a..06bb6f2054 100644 --- a/core/src/main/java/org/polypheny/db/schema/TranslatableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/TranslatableTable.java @@ -36,8 +36,8 @@ import org.polypheny.db.adapter.enumerable.EnumerableScan; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; @@ -53,6 +53,6 @@ public interface TranslatableTable extends Table { /** * Converts this table into a {@link AlgNode relational expression}. */ - AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ); + AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 16b927be24..e6af238eb2 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -107,9 +107,9 @@ import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPredicateList; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.Context; import org.polypheny.db.plan.Contexts; @@ -1326,21 +1326,21 @@ public RexNode patternExclude( RexNode node ) { */ public AlgBuilder scan( Iterable tableNames ) { final List names = ImmutableList.copyOf( tableNames ); - final AlgOptTable algOptTable = algOptSchema.getTableForMember( names ); - if ( algOptTable == null ) { + final AlgOptEntity algOptEntity = algOptSchema.getTableForMember( names ); + if ( algOptEntity == null ) { throw RESOURCE.tableNotFound( String.join( ".", names ) ).ex(); } - final AlgNode scan = scanFactory.createScan( cluster, algOptTable ); + final AlgNode scan = scanFactory.createScan( cluster, algOptEntity ); push( scan ); - rename( algOptTable.getRowType().getFieldNames() ); + rename( algOptEntity.getRowType().getFieldNames() ); return this; } - public AlgBuilder scan( @Nonnull AlgOptTable algOptTable ) { - final AlgNode scan = scanFactory.createScan( cluster, algOptTable ); + public AlgBuilder scan( @Nonnull AlgOptEntity algOptEntity ) { + final AlgNode scan = scanFactory.createScan( cluster, algOptEntity ); push( scan ); - rename( algOptTable.getRowType().getFieldNames() ); + rename( algOptEntity.getRowType().getFieldNames() ); return this; } @@ -1359,7 +1359,7 @@ public AlgBuilder scan( String... tableNames ) { } - public AlgBuilder documentScan( AlgOptTable collection ) { + public AlgBuilder documentScan( AlgOptEntity collection ) { stack.add( new Frame( new LogicalDocumentScan( cluster, cluster.traitSet().replace( ModelTrait.DOCUMENT ), collection ) ) ); return this; } diff --git a/core/src/main/java/org/polypheny/db/util/InitializerExpressionFactory.java b/core/src/main/java/org/polypheny/db/util/InitializerExpressionFactory.java index a08a06a951..296f380343 100644 --- a/core/src/main/java/org/polypheny/db/util/InitializerExpressionFactory.java +++ b/core/src/main/java/org/polypheny/db/util/InitializerExpressionFactory.java @@ -20,7 +20,7 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ColumnStrategy; @@ -36,9 +36,9 @@ public interface InitializerExpressionFactory { * @param table the table containing the column * @param iColumn the 0-based offset of the column in the table * @return generation strategy, never null - * @see AlgOptTable#getColumnStrategies() + * @see AlgOptEntity#getColumnStrategies() */ - ColumnStrategy generationStrategy( AlgOptTable table, int iColumn ); + ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ); /** * Creates an expression which evaluates to the default value for a particular column. @@ -48,7 +48,7 @@ public interface InitializerExpressionFactory { * @param context Context for creating the expression * @return default value expression */ - RexNode newColumnDefaultValue( AlgOptTable table, int iColumn, InitializerContext context ); + RexNode newColumnDefaultValue( AlgOptEntity table, int iColumn, InitializerContext context ); /** * Creates an expression which evaluates to the initializer expression for a particular attribute of a structured type. diff --git a/core/src/main/java/org/polypheny/db/util/NullInitializerExpressionFactory.java b/core/src/main/java/org/polypheny/db/util/NullInitializerExpressionFactory.java index b9ca6ac06d..2626a1d0c7 100644 --- a/core/src/main/java/org/polypheny/db/util/NullInitializerExpressionFactory.java +++ b/core/src/main/java/org/polypheny/db/util/NullInitializerExpressionFactory.java @@ -20,7 +20,7 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ColumnStrategy; @@ -38,7 +38,7 @@ public NullInitializerExpressionFactory() { @Override - public ColumnStrategy generationStrategy( AlgOptTable table, int iColumn ) { + public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { return table.getRowType().getFieldList().get( iColumn ).getType().isNullable() ? ColumnStrategy.NULLABLE : ColumnStrategy.NOT_NULLABLE; @@ -46,7 +46,7 @@ public ColumnStrategy generationStrategy( AlgOptTable table, int iColumn ) { @Override - public RexNode newColumnDefaultValue( AlgOptTable table, int iColumn, InitializerContext context ) { + public RexNode newColumnDefaultValue( AlgOptEntity table, int iColumn, InitializerContext context ) { return context.getRexBuilder().constantNull(); } diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index 6fb0287b79..ea7d1a3a8c 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -28,8 +28,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogMaterializedView; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.entity.MaterializedCriteria; -import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.transaction.Transaction; @@ -91,14 +91,10 @@ public static class TableUpdateVisitor extends AlgShuttleImpl { @Override public AlgNode visit( LogicalModify modify ) { if ( modify.getOperation() != Operation.MERGE ) { - if ( (modify.getTable().getTable() instanceof LogicalTable) ) { - List qualifiedName = modify.getTable().getQualifiedName(); - if ( qualifiedName.size() < 2 ) { - names.add( ((LogicalTable) modify.getTable().getTable()).getLogicalSchemaName() ); - names.add( ((LogicalTable) modify.getTable().getTable()).getLogicalTableName() ); - } else { - names.addAll( qualifiedName ); - } + if ( (modify.getTable().getCatalogEntity() != null) ) { + CatalogTable table = modify.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + names.add( table.getNamespaceName() ); + names.add( table.name ); } } return super.visit( modify ); diff --git a/core/src/main/java/org/polypheny/db/view/ViewManager.java b/core/src/main/java/org/polypheny/db/view/ViewManager.java index 03ff88eab6..bbd8a9d8d0 100644 --- a/core/src/main/java/org/polypheny/db/view/ViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/ViewManager.java @@ -40,20 +40,18 @@ import org.polypheny.db.algebra.logical.relational.LogicalMinus; import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; -import org.polypheny.db.algebra.logical.relational.LogicalScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.LogicalTable; public class ViewManager { @@ -253,14 +251,11 @@ private void handleNodeType( AlgNode other ) { public AlgNode checkNode( AlgNode other ) { if ( other instanceof LogicalRelViewScan ) { return expandViewNode( other ); - } else if ( doesSubstituteOrderBy && other instanceof LogicalScan ) { - if ( other.getTable() instanceof AlgOptTableImpl ) { - if ( other.getTable().getTable() instanceof LogicalTable ) { - long tableId = ((LogicalTable) ((AlgOptTableImpl) other.getTable()).getTable()).getTableId(); - CatalogTable catalogtable = Catalog.getInstance().getTable( tableId ); - if ( catalogtable.entityType == EntityType.MATERIALIZED_VIEW && ((CatalogMaterializedView) catalogtable).isOrdered() ) { - return orderMaterialized( other ); - } + } else if ( doesSubstituteOrderBy && other instanceof LogicalRelScan ) { + if ( other.getTable() instanceof AlgOptEntityImpl ) { + CatalogTable catalogTable = other.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((CatalogMaterializedView) catalogTable).isOrdered() ) { + return orderMaterialized( other ); } } } diff --git a/core/src/test/java/org/polypheny/db/catalog/CountingFactory.java b/core/src/test/java/org/polypheny/db/catalog/CountingFactory.java index 851b90a107..932c475d32 100644 --- a/core/src/test/java/org/polypheny/db/catalog/CountingFactory.java +++ b/core/src/test/java/org/polypheny/db/catalog/CountingFactory.java @@ -24,7 +24,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ColumnStrategy; @@ -51,7 +51,7 @@ public class CountingFactory extends NullInitializerExpressionFactory { @Override - public ColumnStrategy generationStrategy( AlgOptTable table, int iColumn ) { + public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { final AlgDataTypeField field = table.getRowType().getFieldList().get( iColumn ); if ( defaultColumns.contains( field.getName() ) ) { return ColumnStrategy.DEFAULT; @@ -61,7 +61,7 @@ public ColumnStrategy generationStrategy( AlgOptTable table, int iColumn ) { @Override - public RexNode newColumnDefaultValue( AlgOptTable table, int iColumn, InitializerContext context ) { + public RexNode newColumnDefaultValue( AlgOptEntity table, int iColumn, InitializerContext context ) { THREAD_CALL_COUNT.get().incrementAndGet(); final AlgDataTypeField field = table.getRowType().getFieldList().get( iColumn ); if ( defaultColumns.contains( field.getName() ) ) { diff --git a/core/src/test/java/org/polypheny/db/catalog/EmpInitializerExpressionFactory.java b/core/src/test/java/org/polypheny/db/catalog/EmpInitializerExpressionFactory.java index b15698e4db..6195184676 100644 --- a/core/src/test/java/org/polypheny/db/catalog/EmpInitializerExpressionFactory.java +++ b/core/src/test/java/org/polypheny/db/catalog/EmpInitializerExpressionFactory.java @@ -19,7 +19,7 @@ import java.math.BigDecimal; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ColumnStrategy; @@ -34,7 +34,7 @@ class EmpInitializerExpressionFactory extends NullInitializerExpressionFactory { @Override - public ColumnStrategy generationStrategy( AlgOptTable table, int iColumn ) { + public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { switch ( iColumn ) { case 0: case 1: @@ -47,7 +47,7 @@ public ColumnStrategy generationStrategy( AlgOptTable table, int iColumn ) { @Override - public RexNode newColumnDefaultValue( AlgOptTable table, int iColumn, InitializerContext context ) { + public RexNode newColumnDefaultValue( AlgOptEntity table, int iColumn, InitializerContext context ) { final RexBuilder rexBuilder = context.getRexBuilder(); final AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); switch ( iColumn ) { diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index f7666e854a..ec6cf0adb4 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -56,7 +56,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.constant.Modality; import org.polypheny.db.algebra.constant.Monotonicity; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -65,14 +65,15 @@ import org.polypheny.db.algebra.type.DynamicRecordTypeImpl; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare; +import org.polypheny.db.prepare.Prepare.AbstractPreparingEntity; +import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; import org.polypheny.db.schema.CustomColumnResolvingTable; import org.polypheny.db.schema.ExtensibleTable; @@ -130,7 +131,7 @@ public MockCatalogReader( AlgDataTypeFactory typeFactory, boolean caseSensitive protected void registerTablesWithRollUp( MockSchema schema, Fixture f ) { // Register "EMP_R" table. Contains a rolled up column. - final MockTable empRolledTable = MockTable.create( this, schema, "EMP_R", false, 14 ); + final MockEntity empRolledTable = MockEntity.create( this, schema, "EMP_R", false, 14 ); empRolledTable.addColumn( "EMPNO", f.intType, true ); empRolledTable.addColumn( "DEPTNO", f.intType ); empRolledTable.addColumn( "SLACKER", f.booleanType ); @@ -140,7 +141,7 @@ protected void registerTablesWithRollUp( MockSchema schema, Fixture f ) { // Register the "DEPT_R" table. Doesn't contain a rolled up column, // but is useful for testing join - MockTable deptSlackingTable = MockTable.create( this, schema, "DEPT_R", false, 4 ); + MockEntity deptSlackingTable = MockEntity.create( this, schema, "DEPT_R", false, 4 ); deptSlackingTable.addColumn( "DEPTNO", f.intType, true ); deptSlackingTable.addColumn( "SLACKINGMIN", f.intType ); registerTable( deptSlackingTable ); @@ -151,7 +152,7 @@ protected void registerTablesWithRollUp( MockSchema schema, Fixture f ) { // Register "EMP_R" table which contains a rolled up column in NEST schema. ImmutableList tablePath = ImmutableList.of( schema.getCatalogName(), schema.name, nestedSchema.name, "EMP_R" ); - final MockTable nestedEmpRolledTable = MockTable.create( this, tablePath, false, 14 ); + final MockEntity nestedEmpRolledTable = MockEntity.create( this, tablePath, false, 14 ); nestedEmpRolledTable.addColumn( "EMPNO", f.intType, true ); nestedEmpRolledTable.addColumn( "DEPTNO", f.intType ); nestedEmpRolledTable.addColumn( "SLACKER", f.booleanType ); @@ -169,7 +170,7 @@ protected void registerType( final List names, final AlgProtoDataType al } - protected void registerTable( final MockTable table ) { + protected void registerTable( final MockEntity table ) { table.onRegister( typeFactory ); final WrapperTable wrapperTable = new WrapperTable( table ); if ( table.stream ) { @@ -206,7 +207,7 @@ private void registerNestedSchema( MockSchema parentSchema, MockSchema schema ) } - private static List deduceMonotonicity( Prepare.PreparingTable table ) { + private static List deduceMonotonicity( PreparingEntity table ) { final List collationList = new ArrayList<>(); // Deduce which fields the table is sorted on. @@ -269,9 +270,9 @@ public String getName() { /** * Mock implementation of - * {@link Prepare.PreparingTable}. + * {@link PreparingEntity}. */ - public static class MockTable extends Prepare.AbstractPreparingTable { + public static class MockEntity extends AbstractPreparingEntity { protected final MockCatalogReader catalogReader; protected final boolean stream; @@ -289,7 +290,7 @@ public static class MockTable extends Prepare.AbstractPreparingTable { protected final Set rolledUpColumns = new HashSet<>(); - public MockTable( + public MockEntity( MockCatalogReader catalogReader, String catalogName, String schemaName, String name, boolean stream, double rowCount, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { this( catalogReader, ImmutableList.of( catalogName, schemaName, name ), stream, rowCount, resolver, initializerFactory ); @@ -301,7 +302,7 @@ public void registerRolledUpColumn( String columnName ) { } - private MockTable( MockCatalogReader catalogReader, List names, boolean stream, double rowCount, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { + private MockEntity( MockCatalogReader catalogReader, List names, boolean stream, double rowCount, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { this.catalogReader = catalogReader; this.stream = stream; this.rowCount = rowCount; @@ -314,7 +315,7 @@ private MockTable( MockCatalogReader catalogReader, List names, boolean /** * Copy constructor. */ - protected MockTable( + protected MockEntity( MockCatalogReader catalogReader, boolean stream, double rowCount, List> columnList, List keyList, AlgDataType rowType, List collationList, List names, Set monotonicColumnSet, StructKind kind, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { this.catalogReader = catalogReader; @@ -344,7 +345,7 @@ protected ModifiableTable( String tableName ) { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return typeFactory.createStructType( MockTable.this.getRowType().getFieldList() ); + return typeFactory.createStructType( MockEntity.this.getRowType().getFieldList() ); } @@ -376,8 +377,8 @@ public Expression getExpression( SchemaPlus schema, String tableName, Class claz public C unwrap( Class aClass ) { if ( aClass.isInstance( initializerFactory ) ) { return aClass.cast( initializerFactory ); - } else if ( aClass.isInstance( MockTable.this ) ) { - return aClass.cast( MockTable.this ); + } else if ( aClass.isInstance( MockEntity.this ) ) { + return aClass.cast( MockEntity.this ); } return super.unwrap( aClass ); } @@ -404,8 +405,8 @@ public int getExtendedColumnOffset() { @Override - protected AlgOptTable extend( final Table extendedTable ) { - return new MockTable( catalogReader, names, stream, rowCount, resolver, initializerFactory ) { + protected AlgOptEntity extend( final Table extendedTable ) { + return new MockEntity( catalogReader, names, stream, rowCount, resolver, initializerFactory ) { @Override public AlgDataType getRowType() { return extendedTable.getRowType( catalogReader.typeFactory ); @@ -414,17 +415,17 @@ public AlgDataType getRowType() { } - public static MockTable create( MockCatalogReader catalogReader, MockSchema schema, String name, boolean stream, double rowCount ) { + public static MockEntity create( MockCatalogReader catalogReader, MockSchema schema, String name, boolean stream, double rowCount ) { return create( catalogReader, schema, name, stream, rowCount, null ); } - public static MockTable create( MockCatalogReader catalogReader, List names, boolean stream, double rowCount ) { - return new MockTable( catalogReader, names, stream, rowCount, null, NullInitializerExpressionFactory.INSTANCE ); + public static MockEntity create( MockCatalogReader catalogReader, List names, boolean stream, double rowCount ) { + return new MockEntity( catalogReader, names, stream, rowCount, null, NullInitializerExpressionFactory.INSTANCE ); } - public static MockTable create( + public static MockEntity create( MockCatalogReader catalogReader, MockSchema schema, String name, boolean stream, double rowCount, ColumnResolver resolver ) { @@ -433,8 +434,8 @@ public static MockTable create( } - public static MockTable create( MockCatalogReader catalogReader, MockSchema schema, String name, boolean stream, double rowCount, ColumnResolver resolver, InitializerExpressionFactory initializerExpressionFactory ) { - MockTable table = new MockTable( catalogReader, schema.getCatalogName(), schema.name, name, stream, rowCount, resolver, initializerExpressionFactory ); + public static MockEntity create( MockCatalogReader catalogReader, MockSchema schema, String name, boolean stream, double rowCount, ColumnResolver resolver, InitializerExpressionFactory initializerExpressionFactory ) { + MockEntity table = new MockEntity( catalogReader, schema.getCatalogName(), schema.name, name, stream, rowCount, resolver, initializerExpressionFactory ); schema.addTable( name ); return table; } @@ -472,7 +473,7 @@ public AlgOptSchema getRelOptSchema() { @Override public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { - return LogicalScan.create( context.getCluster(), this ); + return LogicalRelScan.create( context.getCluster(), this ); } @@ -524,11 +525,6 @@ public List getQualifiedName() { } - @Override - public CatalogTable getCatalogTable() { - return null; - } - @Override public Monotonicity getMonotonicity( String columnName ) { @@ -550,6 +546,12 @@ public Expression getExpression( Class clazz ) { } + @Override + public CatalogEntity getCatalogEntity() { + return null; + } + + public void addColumn( String name, AlgDataType type ) { addColumn( name, type, false ); } @@ -600,11 +602,11 @@ public List>> resolveColumn( AlgDataType row /** - * Mock implementation of {@link Prepare.PreparingTable} with dynamic record type. + * Mock implementation of {@link PreparingEntity} with dynamic record type. */ - public static class MockDynamicTable extends MockTable { + public static class MockDynamicEntity extends MockEntity { - public MockDynamicTable( MockCatalogReader catalogReader, String catalogName, String schemaName, String name, boolean stream, double rowCount ) { + public MockDynamicEntity( MockCatalogReader catalogReader, String catalogName, String schemaName, String name, boolean stream, double rowCount ) { super( catalogReader, catalogName, schemaName, name, stream, rowCount, null, NullInitializerExpressionFactory.INSTANCE ); } @@ -630,14 +632,14 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { /** - * Wrapper around a {@link MockTable}, giving it a {@link Table} interface. You can get the {@code MockTable} by calling {@link #unwrap(Class)}. + * Wrapper around a {@link MockEntity}, giving it a {@link Table} interface. You can get the {@code MockTable} by calling {@link #unwrap(Class)}. */ private static class WrapperTable implements Table, Wrapper { - private final MockTable table; + private final MockEntity table; - WrapperTable( MockTable table ) { + WrapperTable( MockEntity table ) { this.table = table; } @@ -721,11 +723,11 @@ public Schema.TableType getJdbcTableType() { /** - * Wrapper around a {@link MockTable}, giving it a {@link StreamableTable} interface. + * Wrapper around a {@link MockEntity}, giving it a {@link StreamableTable} interface. */ private static class StreamableWrapperTable extends WrapperTable implements StreamableTable { - StreamableWrapperTable( MockTable table ) { + StreamableWrapperTable( MockEntity table ) { super( table ); } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDocument.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDocument.java index 889c4ea379..b3f95160d0 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDocument.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDocument.java @@ -44,7 +44,7 @@ public MockCatalogReader init() { registerSchema( salesSchema ); // Register "EMP" table. - final MockTable empTable = MockTable.create( this, salesSchema, "secrets", false, 14, null ); + final MockEntity empTable = MockEntity.create( this, salesSchema, "secrets", false, 14, null ); empTable.addColumn( "d", typeFactory.createPolyType( PolyType.DOCUMENT ) ); registerTable( empTable ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDynamic.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDynamic.java index fa9bb39bcd..9f483c567b 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDynamic.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDynamic.java @@ -48,17 +48,17 @@ public MockCatalogReader init() { MockSchema schema = new MockSchema( "SALES" ); registerSchema( schema ); - MockTable nationTable = new MockDynamicTable( this, schema.getCatalogName(), schema.getName(), "NATION", false, 100 ); + MockEntity nationTable = new MockDynamicEntity( this, schema.getCatalogName(), schema.getName(), "NATION", false, 100 ); registerTable( nationTable ); - MockTable customerTable = new MockDynamicTable( this, schema.getCatalogName(), schema.getName(), "CUSTOMER", false, 100 ); + MockEntity customerTable = new MockDynamicEntity( this, schema.getCatalogName(), schema.getName(), "CUSTOMER", false, 100 ); registerTable( customerTable ); // CREATE TABLE "REGION" - static table with known schema. final AlgDataType intType = typeFactory.createPolyType( PolyType.INTEGER ); final AlgDataType varcharType = typeFactory.createPolyType( PolyType.VARCHAR ); - MockTable regionTable = MockTable.create( this, schema, "REGION", false, 100 ); + MockEntity regionTable = MockEntity.create( this, schema, "REGION", false, 100 ); regionTable.addColumn( "R_REGIONKEY", intType ); regionTable.addColumn( "R_NAME", varcharType ); regionTable.addColumn( "R_COMMENT", varcharType ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderExtended.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderExtended.java index 0cdef4c2cc..5c607c6ede 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderExtended.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderExtended.java @@ -57,7 +57,7 @@ public MockCatalogReader init() { final List extendedColumns = new ArrayList<>( columnsExtended ); extendedColumns.add( new CompoundNameColumn( "F2", "C2", f.varchar20Type ) ); final CompoundNameColumnResolver structExtendedTableResolver = new CompoundNameColumnResolver( extendedColumns, "F0" ); - final MockTable structExtendedTypeTable = MockTable.create( + final MockEntity structExtendedTypeTable = MockEntity.create( this, structTypeSchema, "T_EXTEND", diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderSimple.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderSimple.java index e4259fd0c3..3153fd6b08 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderSimple.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderSimple.java @@ -35,6 +35,8 @@ import com.google.common.collect.ImmutableList; +import java.util.Arrays; +import java.util.List; import lombok.Getter; import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.Syntax; @@ -46,9 +48,6 @@ import org.polypheny.db.type.PolyType; import org.polypheny.db.util.InitializerExpressionFactory; -import java.util.Arrays; -import java.util.List; - /** * Simple catalog reader for testing. @@ -93,7 +92,7 @@ public MockCatalogReader init() { registerType( ImmutableList.of( salesSchema.getCatalogName(), salesSchema.getName(), "customBigInt" ), typeFactory -> typeFactory.createPolyType( PolyType.BIGINT ) ); // Register "EMP" table. - final MockTable empTable = MockTable.create( this, salesSchema, "EMP", false, 14, null, countingInitializerExpressionFactory ); + final MockEntity empTable = MockEntity.create( this, salesSchema, "EMP", false, 14, null, countingInitializerExpressionFactory ); empTable.addColumn( "EMPNO", fixture.intType, true ); empTable.addColumn( "ENAME", fixture.varchar20Type ); empTable.addColumn( "JOB", fixture.varchar10Type ); @@ -106,7 +105,7 @@ public MockCatalogReader init() { registerTable( empTable ); // Register "EMPNULLABLES" table with nullable columns. - final MockTable empNullablesTable = MockTable.create( this, salesSchema, "EMPNULLABLES", false, 14 ); + final MockEntity empNullablesTable = MockEntity.create( this, salesSchema, "EMPNULLABLES", false, 14 ); empNullablesTable.addColumn( "EMPNO", fixture.intType, true ); empNullablesTable.addColumn( "ENAME", fixture.varchar20Type ); empNullablesTable.addColumn( "JOB", fixture.varchar10TypeNull ); @@ -119,7 +118,7 @@ public MockCatalogReader init() { registerTable( empNullablesTable ); // Register "EMPDEFAULTS" table with default values for some columns. - final MockTable empDefaultsTable = MockTable.create( this, salesSchema, "EMPDEFAULTS", false, 14, null, new EmpInitializerExpressionFactory() ); + final MockEntity empDefaultsTable = MockEntity.create( this, salesSchema, "EMPDEFAULTS", false, 14, null, new EmpInitializerExpressionFactory() ); empDefaultsTable.addColumn( "EMPNO", fixture.intType, true ); empDefaultsTable.addColumn( "ENAME", fixture.varchar20Type ); empDefaultsTable.addColumn( "JOB", fixture.varchar10TypeNull ); @@ -132,7 +131,7 @@ public MockCatalogReader init() { registerTable( empDefaultsTable ); // Register "EMP_B" table. As "EMP", birth with a "BIRTHDATE" column. - final MockTable empBTable = MockTable.create( this, salesSchema, "EMP_B", false, 14 ); + final MockEntity empBTable = MockEntity.create( this, salesSchema, "EMP_B", false, 14 ); empBTable.addColumn( "EMPNO", fixture.intType, true ); empBTable.addColumn( "ENAME", fixture.varchar20Type ); empBTable.addColumn( "JOB", fixture.varchar10Type ); @@ -146,13 +145,13 @@ public MockCatalogReader init() { registerTable( empBTable ); // Register "DEPT" table. - MockTable deptTable = MockTable.create( this, salesSchema, "DEPT", false, 4 ); + MockEntity deptTable = MockEntity.create( this, salesSchema, "DEPT", false, 4 ); deptTable.addColumn( "DEPTNO", fixture.intType, true ); deptTable.addColumn( "NAME", fixture.varchar10Type ); registerTable( deptTable ); // Register "DEPT_NESTED" table. - MockTable deptNestedTable = MockTable.create( this, salesSchema, "DEPT_NESTED", false, 4 ); + MockEntity deptNestedTable = MockEntity.create( this, salesSchema, "DEPT_NESTED", false, 4 ); deptNestedTable.addColumn( "DEPTNO", fixture.intType, true ); deptNestedTable.addColumn( "NAME", fixture.varchar10Type ); deptNestedTable.addColumn( "SKILL", fixture.skillRecordType ); @@ -160,7 +159,7 @@ public MockCatalogReader init() { registerTable( deptNestedTable ); // Register "BONUS" table. - MockTable bonusTable = MockTable.create( this, salesSchema, "BONUS", false, 0 ); + MockEntity bonusTable = MockEntity.create( this, salesSchema, "BONUS", false, 0 ); bonusTable.addColumn( "ENAME", fixture.varchar20Type ); bonusTable.addColumn( "JOB", fixture.varchar10Type ); bonusTable.addColumn( "SAL", fixture.intType ); @@ -168,14 +167,14 @@ public MockCatalogReader init() { registerTable( bonusTable ); // Register "SALGRADE" table. - MockTable salgradeTable = MockTable.create( this, salesSchema, "SALGRADE", false, 5 ); + MockEntity salgradeTable = MockEntity.create( this, salesSchema, "SALGRADE", false, 5 ); salgradeTable.addColumn( "GRADE", fixture.intType, true ); salgradeTable.addColumn( "LOSAL", fixture.intType ); salgradeTable.addColumn( "HISAL", fixture.intType ); registerTable( salgradeTable ); // Register "EMP_ADDRESS" table - MockTable contactAddressTable = MockTable.create( this, salesSchema, "EMP_ADDRESS", false, 26 ); + MockEntity contactAddressTable = MockEntity.create( this, salesSchema, "EMP_ADDRESS", false, 26 ); contactAddressTable.addColumn( "EMPNO", fixture.intType, true ); contactAddressTable.addColumn( "HOME_ADDRESS", addressType ); contactAddressTable.addColumn( "MAILING_ADDRESS", addressType ); @@ -186,7 +185,7 @@ public MockCatalogReader init() { registerSchema( customerSchema ); // Register "CONTACT" table. - MockTable contactTable = MockTable.create( this, customerSchema, "CONTACT", false, 1000 ); + MockEntity contactTable = MockEntity.create( this, customerSchema, "CONTACT", false, 1000 ); contactTable.addColumn( "CONTACTNO", fixture.intType ); contactTable.addColumn( "FNAME", fixture.varchar10Type ); contactTable.addColumn( "LNAME", fixture.varchar10Type ); @@ -195,7 +194,7 @@ public MockCatalogReader init() { registerTable( contactTable ); // Register "CONTACT_PEEK" table. The - MockTable contactPeekTable = MockTable.create( this, customerSchema, "CONTACT_PEEK", false, 1000 ); + MockEntity contactPeekTable = MockEntity.create( this, customerSchema, "CONTACT_PEEK", false, 1000 ); contactPeekTable.addColumn( "CONTACTNO", fixture.intType ); contactPeekTable.addColumn( "FNAME", fixture.varchar10Type ); contactPeekTable.addColumn( "LNAME", fixture.varchar10Type ); @@ -205,14 +204,14 @@ public MockCatalogReader init() { registerTable( contactPeekTable ); // Register "ACCOUNT" table. - MockTable accountTable = MockTable.create( this, customerSchema, "ACCOUNT", false, 457 ); + MockEntity accountTable = MockEntity.create( this, customerSchema, "ACCOUNT", false, 457 ); accountTable.addColumn( "ACCTNO", fixture.intType ); accountTable.addColumn( "TYPE", fixture.varchar20Type ); accountTable.addColumn( "BALANCE", fixture.intType ); registerTable( accountTable ); // Register "ORDERS" stream. - MockTable ordersStream = MockTable.create( this, salesSchema, "ORDERS", true, Double.POSITIVE_INFINITY ); + MockEntity ordersStream = MockEntity.create( this, salesSchema, "ORDERS", true, Double.POSITIVE_INFINITY ); ordersStream.addColumn( "ROWTIME", fixture.timestampType ); ordersStream.addMonotonic( "ROWTIME" ); ordersStream.addColumn( "PRODUCTID", fixture.intType ); @@ -220,21 +219,21 @@ public MockCatalogReader init() { registerTable( ordersStream ); // Register "SHIPMENTS" stream. "ROWTIME" is not column 0, just to mix things up. - MockTable shipmentsStream = MockTable.create( this, salesSchema, "SHIPMENTS", true, Double.POSITIVE_INFINITY ); + MockEntity shipmentsStream = MockEntity.create( this, salesSchema, "SHIPMENTS", true, Double.POSITIVE_INFINITY ); shipmentsStream.addColumn( "ORDERID", fixture.intType ); shipmentsStream.addColumn( "ROWTIME", fixture.timestampType ); shipmentsStream.addMonotonic( "ROWTIME" ); registerTable( shipmentsStream ); // Register "PRODUCTS" table. - MockTable productsTable = MockTable.create( this, salesSchema, "PRODUCTS", false, 200D ); + MockEntity productsTable = MockEntity.create( this, salesSchema, "PRODUCTS", false, 200D ); productsTable.addColumn( "PRODUCTID", fixture.intType ); productsTable.addColumn( "NAME", fixture.varchar20Type ); productsTable.addColumn( "SUPPLIERID", fixture.intType ); registerTable( productsTable ); // Register "SUPPLIERS" table. - MockTable suppliersTable = MockTable.create( this, salesSchema, "SUPPLIERS", false, 10D ); + MockEntity suppliersTable = MockEntity.create( this, salesSchema, "SUPPLIERS", false, 10D ); suppliersTable.addColumn( "SUPPLIERID", fixture.intType ); suppliersTable.addColumn( "NAME", fixture.varchar20Type ); suppliersTable.addColumn( "CITY", fixture.intType ); @@ -253,7 +252,7 @@ public MockCatalogReader init() { new CompoundNameColumn( "F1", "C2", fixture.intType ), new CompoundNameColumn( "F2", "C3", fixture.intType ) ); final CompoundNameColumnResolver structTypeTableResolver = new CompoundNameColumnResolver( columns, "F0" ); - final MockTable structTypeTable = MockTable.create( this, structTypeSchema, "T", false, 100, structTypeTableResolver ); + final MockEntity structTypeTable = MockEntity.create( this, structTypeSchema, "T", false, 100, structTypeTableResolver ); for ( CompoundNameColumn column : columns ) { structTypeTable.addColumn( column.getName(), column.type ); } @@ -269,7 +268,7 @@ public MockCatalogReader init() { new CompoundNameColumn( "F0", "C1", fixture.intTypeNull ), new CompoundNameColumn( "F1", "C2", fixture.intType ), new CompoundNameColumn( "F2", "C3", fixture.intTypeNull ) ); - final MockTable structNullableTypeTable = MockTable.create( this, structTypeSchema, "T_NULLABLES", false, 100, structTypeTableResolver ); + final MockEntity structNullableTypeTable = MockEntity.create( this, structTypeSchema, "T_NULLABLES", false, 100, structTypeTableResolver ); for ( CompoundNameColumn column : columnsNullable ) { structNullableTypeTable.addColumn( column.getName(), column.type ); } diff --git a/core/src/test/java/org/polypheny/db/test/JdbcTest.java b/core/src/test/java/org/polypheny/db/test/JdbcTest.java index 842da8c6e6..bb22d891ca 100644 --- a/core/src/test/java/org/polypheny/db/test/JdbcTest.java +++ b/core/src/test/java/org/polypheny/db/test/JdbcTest.java @@ -22,7 +22,7 @@ import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModifiableTable; @@ -47,7 +47,7 @@ protected AbstractModifiableTable( String tableName ) { @Override public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode child, Modify.Operation operation, diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 338d484a69..5f1b68418d 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -45,8 +45,8 @@ import org.polypheny.db.algebra.BiAlg; import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; -import org.polypheny.db.algebra.logical.relational.LogicalScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; @@ -131,7 +131,6 @@ import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.runtime.PolyphenyDbContextException; import org.polypheny.db.runtime.PolyphenyDbException; -import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.TransactionException; @@ -2133,12 +2132,12 @@ private List getColumnInformation( List projectedColum private Map> findUnderlyingTablesOfView( AlgNode algNode, Map> underlyingTables, AlgDataType fieldList ) { - if ( algNode instanceof LogicalScan ) { + if ( algNode instanceof LogicalRelScan ) { List underlyingColumns = getUnderlyingColumns( algNode, fieldList ); - underlyingTables.put( algNode.getTable().getTable().getTableId(), underlyingColumns ); + underlyingTables.put( algNode.getTable().getCatalogEntity().id, underlyingColumns ); } else if ( algNode instanceof LogicalRelViewScan ) { List underlyingColumns = getUnderlyingColumns( algNode, fieldList ); - underlyingTables.put( algNode.getTable().getTable().getTableId(), underlyingColumns ); + underlyingTables.put( algNode.getTable().getCatalogEntity().id, underlyingColumns ); } if ( algNode instanceof BiAlg ) { findUnderlyingTablesOfView( ((BiAlg) algNode).getLeft(), underlyingTables, fieldList ); @@ -2151,8 +2150,9 @@ private Map> findUnderlyingTablesOfView( AlgNode algNode, Map getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList ) { - List columnIds = ((LogicalTable) algNode.getTable().getTable()).getColumnIds(); - List logicalColumnNames = ((LogicalTable) algNode.getTable().getTable()).getLogicalColumnNames(); + CatalogTable table = algNode.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + List columnIds = table.fieldIds; + List logicalColumnNames = table.getColumnNames(); List underlyingColumns = new ArrayList<>(); for ( int i = 0; i < columnIds.size(); i++ ) { for ( AlgDataTypeField algDataTypeField : fieldList.getFieldList() ) { diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 34c90afaa7..4dad07b088 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -73,7 +73,7 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -100,7 +100,7 @@ import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.prepare.Prepare.PreparedResult; import org.polypheny.db.prepare.Prepare.PreparedResultImpl; @@ -129,7 +129,6 @@ import org.polypheny.db.routing.dto.ProposedRoutingPlanImpl; import org.polypheny.db.runtime.Bindable; import org.polypheny.db.runtime.Typed; -import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.ModelTrait; import org.polypheny.db.schema.ModelTraitDef; import org.polypheny.db.tools.AlgBuilder; @@ -915,9 +914,9 @@ public AlgNode visit( AlgNode node ) { @Override public AlgNode visit( LogicalProject project ) { - if ( project.getInput() instanceof LogicalScan ) { + if ( project.getInput() instanceof LogicalRelScan ) { // Figure out the original column names required for index lookup - final LogicalScan scan = (LogicalScan) project.getInput(); + final LogicalRelScan scan = (LogicalRelScan) project.getInput(); final String table = scan.getTable().getQualifiedName().get( scan.getTable().getQualifiedName().size() - 1 ); final List columns = new ArrayList<>( project.getChildExps().size() ); final List ctypes = new ArrayList<>( project.getChildExps().size() ); @@ -1324,7 +1323,7 @@ private LogicalQueryInformation analyzeQueryAndPrepareMonitoring( Statement stat */ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map> aggregatedPartitionValues ) { Map> accessedPartitionList = new HashMap<>(); // tableId -> partitionIds - if ( !(alg instanceof LogicalScan) ) { + if ( !(alg instanceof LogicalRelScan) ) { for ( int i = 0; i < alg.getInputs().size(); i++ ) { Map> result = getAccessedPartitionsPerScan( alg.getInput( i ), aggregatedPartitionValues ); if ( !result.isEmpty() ) { @@ -1336,70 +1335,69 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< } else { boolean fallback = false; if ( alg.getTable() != null ) { - AlgOptTableImpl table = (AlgOptTableImpl) alg.getTable(); - if ( table.getTable() instanceof LogicalTable ) { - LogicalTable logicalTable = ((LogicalTable) table.getTable()); - int scanId = alg.getId(); - - if ( logicalTable.getTableId() == -1 ) { - // todo dl: remove after RowType refactor - return accessedPartitionList; - } + AlgOptEntityImpl table = (AlgOptEntityImpl) alg.getTable(); + + int scanId = alg.getId(); + + if ( table.getCatalogEntity() == null ) { + // todo dl: remove after RowType refactor + return accessedPartitionList; + } - // Get placements of this table - CatalogTable catalogTable = Catalog.getInstance().getTable( logicalTable.getTableId() ); + // Get placements of this table + CatalogTable catalogTable = table.getCatalogEntity().unwrap( CatalogTable.class ); - if ( aggregatedPartitionValues.containsKey( scanId ) ) { - if ( aggregatedPartitionValues.get( scanId ) != null ) { - if ( !aggregatedPartitionValues.get( scanId ).isEmpty() ) { - List partitionValues = new ArrayList<>( aggregatedPartitionValues.get( scanId ) ); + if ( aggregatedPartitionValues.containsKey( scanId ) ) { + if ( aggregatedPartitionValues.get( scanId ) != null ) { + if ( !aggregatedPartitionValues.get( scanId ).isEmpty() ) { + List partitionValues = new ArrayList<>( aggregatedPartitionValues.get( scanId ) ); + if ( log.isDebugEnabled() ) { + log.debug( + "TableID: {} is partitioned on column: {} - {}", + catalogTable.id, + catalogTable.partitionProperty.partitionColumnId, + Catalog.getInstance().getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); + } + List identifiedPartitions = new ArrayList<>(); + for ( String partitionValue : partitionValues ) { if ( log.isDebugEnabled() ) { - log.debug( - "TableID: {} is partitioned on column: {} - {}", - logicalTable.getTableId(), - catalogTable.partitionProperty.partitionColumnId, - Catalog.getInstance().getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); + log.debug( "Extracted PartitionValue: {}", partitionValue ); } - List identifiedPartitions = new ArrayList<>(); - for ( String partitionValue : partitionValues ) { - if ( log.isDebugEnabled() ) { - log.debug( "Extracted PartitionValue: {}", partitionValue ); - } - long identifiedPartition = PartitionManagerFactory.getInstance() - .getPartitionManager( catalogTable.partitionProperty.partitionType ) - .getTargetPartitionId( catalogTable, partitionValue ); + long identifiedPartition = PartitionManagerFactory.getInstance() + .getPartitionManager( catalogTable.partitionProperty.partitionType ) + .getTargetPartitionId( catalogTable, partitionValue ); - identifiedPartitions.add( identifiedPartition ); - if ( log.isDebugEnabled() ) { - log.debug( "Identified PartitionId: {} for value: {}", identifiedPartition, partitionValue ); - } + identifiedPartitions.add( identifiedPartition ); + if ( log.isDebugEnabled() ) { + log.debug( "Identified PartitionId: {} for value: {}", identifiedPartition, partitionValue ); } - - accessedPartitionList.merge( - scanId, - identifiedPartitions, - ( l1, l2 ) -> Stream.concat( l1.stream(), l2.stream() ).collect( Collectors.toList() ) ); - scanPerTable.putIfAbsent( scanId, catalogTable.id ); - // Fallback all partitionIds are needed - } else { - fallback = true; } + + accessedPartitionList.merge( + scanId, + identifiedPartitions, + ( l1, l2 ) -> Stream.concat( l1.stream(), l2.stream() ).collect( Collectors.toList() ) ); + scanPerTable.putIfAbsent( scanId, catalogTable.id ); + // Fallback all partitionIds are needed } else { fallback = true; } } else { fallback = true; } + } else { + fallback = true; + } - if ( fallback ) { - accessedPartitionList.merge( - scanId, - catalogTable.partitionProperty.partitionIds, - ( l1, l2 ) -> Stream.concat( l1.stream(), l2.stream() ).collect( Collectors.toList() ) ); - scanPerTable.putIfAbsent( scanId, catalogTable.id ); - } + if ( fallback ) { + accessedPartitionList.merge( + scanId, + catalogTable.partitionProperty.partitionIds, + ( l1, l2 ) -> Stream.concat( l1.stream(), l2.stream() ).collect( Collectors.toList() ) ); + scanPerTable.putIfAbsent( scanId, catalogTable.id ); } + } } return accessedPartitionList; diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 9b725d6bd8..874913e869 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -48,7 +48,7 @@ import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer.ModifyExtractor; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; @@ -76,8 +76,8 @@ import org.polypheny.db.information.InformationPage; import org.polypheny.db.information.InformationQueryPlan; import org.polypheny.db.languages.OperatorRegistry; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexDynamicParam; @@ -239,7 +239,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme continue; } // Enforce uniqueness between the already existing values and the new values - final AlgNode scan = LogicalScan.create( root.getCluster(), root.getTable() ); + final AlgNode scan = LogicalRelScan.create( root.getCluster(), root.getTable() ); RexNode joinCondition = rexBuilder.makeLiteral( true ); // // TODO: Here we get issues with batch queries @@ -342,8 +342,8 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final RexBuilder rexBuilder = root.getCluster().getRexBuilder(); for ( final CatalogForeignKey foreignKey : foreignKeys ) { final AlgOptSchema algOptSchema = root.getCatalogReader(); - final AlgOptTable algOptTable = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getReferencedKeyTableName() ) ); - final LogicalScan scan = LogicalScan.create( root.getCluster(), algOptTable ); + final AlgOptEntity algOptEntity = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getReferencedKeyTableName() ) ); + final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), algOptEntity ); RexNode joinCondition = rexBuilder.makeLiteral( true ); builder.push( input ); builder.project( foreignKey.getColumnNames().stream().map( builder::field ).collect( Collectors.toList() ) ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 10fbe8baba..a155eed106 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -59,7 +59,7 @@ import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexDynamicParam; import org.polypheny.db.rex.RexNode; @@ -322,7 +322,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List buildSelect( AlgNode node, List( placements.values() ).get( 0 ).get( 0 ); // todo dl: remove after RowType refactor - if ( statement.getTransaction().getCatalogReader().getTable( List.of( placement.getLogicalSchemaName(), placement.getLogicalTableName() ) ).getTable().getSchemaType() == NamespaceType.DOCUMENT ) { + if ( catalog.getTable( placement.tableId ).namespaceType == NamespaceType.DOCUMENT ) { AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); builder.push( new LogicalTransformer( node.getCluster(), @@ -479,7 +479,7 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespa List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); - AlgOptTable collection = statement.getTransaction().getCatalogReader().getCollection( List.of( t.getNamespaceName(), t.name ) ); + AlgOptEntity collection = statement.getTransaction().getCatalogReader().getCollection( List.of( t.getNamespaceName(), t.name ) ); AlgNode scan = algBuilder.documentScan( collection ).build(); routeDocument( algBuilder, (AlgNode & DocumentAlg) scan, statement ); return Pair.of( t.name, algBuilder.build() ); @@ -496,11 +496,11 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespa public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement statement ) { CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.getGraph().getId() ); - PreparingTable nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); - PreparingTable nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); - PreparingTable edgesTable = getSubstitutionTable( statement, mapping.edgesId, mapping.idEdgeId, adapterId ); + PreparingEntity nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); + PreparingEntity nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); + PreparingEntity edgesTable = getSubstitutionTable( statement, mapping.edgesId, mapping.idEdgeId, adapterId ); - PreparingTable edgePropertiesTable = getSubstitutionTable( statement, mapping.edgesPropertyId, mapping.idEdgesPropertyId, adapterId ); + PreparingEntity edgePropertiesTable = getSubstitutionTable( statement, mapping.edgesPropertyId, mapping.idEdgesPropertyId, adapterId ); AlgNode node = buildSubstitutionJoin( alg, nodesTable, nodePropertiesTable ); @@ -511,7 +511,7 @@ public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement s } - protected PreparingTable getSubstitutionTable( Statement statement, long tableId, long columnId, int adapterId ) { + protected PreparingEntity getSubstitutionTable( Statement statement, long tableId, long columnId, int adapterId ) { CatalogTable nodes = Catalog.getInstance().getTable( tableId ); CatalogColumnPlacement placement = Catalog.getInstance().getColumnPlacement( adapterId, columnId ); List qualifiedTableName = ImmutableList.of( @@ -526,10 +526,10 @@ protected PreparingTable getSubstitutionTable( Statement statement, long tableId } - protected AlgNode buildSubstitutionJoin( AlgNode alg, PreparingTable nodesTable, PreparingTable nodePropertiesTable ) { + protected AlgNode buildSubstitutionJoin( AlgNode alg, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable ) { AlgTraitSet out = alg.getTraitSet().replace( ModelTrait.RELATIONAL ); - LogicalScan nodes = new LogicalScan( alg.getCluster(), out, nodesTable ); - LogicalScan nodesProperty = new LogicalScan( alg.getCluster(), out, nodePropertiesTable ); + LogicalRelScan nodes = new LogicalRelScan( alg.getCluster(), out, nodesTable ); + LogicalRelScan nodesProperty = new LogicalRelScan( alg.getCluster(), out, nodePropertiesTable ); RexBuilder builder = alg.getCluster().getRexBuilder(); @@ -546,15 +546,15 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement state Catalog catalog = Catalog.getInstance(); PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); - if ( alg.getCollection().getTable().getSchemaType() != NamespaceType.DOCUMENT ) { - if ( alg.getCollection().getTable().getSchemaType() == NamespaceType.GRAPH ) { + if ( alg.getCollection().getCatalogEntity().namespaceType != NamespaceType.DOCUMENT ) { + if ( alg.getCollection().getCatalogEntity().namespaceType == NamespaceType.GRAPH ) { return handleDocumentOnGraph( alg, statement, builder ); } return handleTransformerDocScan( alg, statement, builder ); } - CatalogCollection collection = catalog.getCollection( alg.getCollection().getTable().getTableId() ); + CatalogCollection collection = alg.getCollection().getCatalogEntity().unwrap( CatalogCollection.class ); List scans = new ArrayList<>(); @@ -565,7 +565,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement state for ( Integer placementId : placements ) { CatalogAdapter adapter = catalog.getAdapter( placementId ); - NamespaceType sourceModel = alg.getCollection().getTable().getSchemaType(); + NamespaceType sourceModel = collection.namespaceType; if ( !adapter.getSupportedNamespaces().contains( sourceModel ) ) { // document on relational @@ -575,7 +575,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement state CatalogCollectionPlacement placement = catalog.getCollectionPlacement( collection.id, placementId ); String namespaceName = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, collection.getNamespaceName(), placement.physicalNamespaceName ); String collectionName = collection.name + "_" + placement.id; - AlgOptTable collectionTable = reader.getCollection( List.of( namespaceName, collectionName ) ); + AlgOptEntity collectionTable = reader.getCollection( List.of( namespaceName, collectionName ) ); // we might previously have pushed the non-native transformer builder.clear(); return builder.push( LogicalDocumentScan.create( alg.getCluster(), collectionTable ) ); @@ -591,7 +591,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement state private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder ) { - AlgNode scan = buildJoinedScan( statement, alg.getCluster(), selectPlacement( catalog.getTable( alg.getCollection().getTable().getTableId() ) ) ); + AlgNode scan = buildJoinedScan( statement, alg.getCluster(), selectPlacement( catalog.getTable( alg.getCollection().getCatalogEntity().id ) ) ); builder.push( scan ); AlgTraitSet out = alg.getTraitSet().replace( ModelTrait.RELATIONAL ); @@ -602,9 +602,9 @@ private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statement s @NotNull private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer adapterId, Statement statement, RoutedAlgBuilder builder ) { - List columns = catalog.getColumns( node.getCollection().getTable().getTableId() ); + List columns = catalog.getColumns( node.getCollection().getCatalogEntity().id ); AlgTraitSet out = node.getTraitSet().replace( ModelTrait.RELATIONAL ); - PreparingTable subTable = getSubstitutionTable( statement, node.getCollection().getTable().getTableId(), columns.get( 0 ).id, adapterId ); + PreparingEntity subTable = getSubstitutionTable( statement, node.getCollection().getCatalogEntity().id, columns.get( 0 ).id, adapterId ); builder.scan( subTable ); builder.project( node.getCluster().getRexBuilder().makeInputRef( subTable.getRowType().getFieldList().get( 1 ).getType(), 1 ) ); builder.push( new LogicalTransformer( builder.getCluster(), List.of( builder.build() ), null, out.replace( ModelTrait.DOCUMENT ), ModelTrait.RELATIONAL, ModelTrait.DOCUMENT, node.getRowType(), false ) ); @@ -614,7 +614,7 @@ private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer private RoutedAlgBuilder handleDocumentOnGraph( DocumentScan alg, Statement statement, RoutedAlgBuilder builder ) { AlgTraitSet out = alg.getTraitSet().replace( ModelTrait.GRAPH ); - builder.lpgScan( alg.getCollection().getTable().getTableId() ); + builder.lpgScan( alg.getCollection().getCatalogEntity().id ); List names = alg.getCollection().getQualifiedName(); builder.lpgMatch( List.of( builder.lpgNodeMatch( List.of( names.get( names.size() - 1 ) ) ) ), List.of( "n" ) ); AlgNode unrouted = builder.build(); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index 2369ceee0f..6c19b380d2 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -24,16 +24,15 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.core.document.DocumentScan; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.dto.CachedProposedRoutingPlan; -import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; @@ -62,13 +61,9 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build return super.handleDocumentScan( (DocumentScan) node, statement, builder, null ); } - if ( node instanceof LogicalScan && node.getTable() != null ) { - AlgOptTableImpl table = (AlgOptTableImpl) node.getTable(); - if ( !(table.getTable() instanceof LogicalTable) ) { - throw new RuntimeException( "Unexpected table. Only logical tables expected here!" ); - } - LogicalTable logicalTable = ((LogicalTable) table.getTable()); - CatalogTable catalogTable = catalog.getTable( logicalTable.getTableId() ); + if ( node instanceof LogicalRelScan && node.getTable() != null ) { + AlgOptEntityImpl table = (AlgOptEntityImpl) node.getTable(); + CatalogTable catalogTable = table.getCatalogEntity().unwrap( CatalogTable.class ); List partitionIds = catalogTable.partitionProperty.partitionIds; Map> placement = new HashMap<>(); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index c3e1ce556c..f779f5bc91 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -69,7 +69,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalModifyCollect; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -94,11 +94,11 @@ import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingTable; +import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.processing.WhereClauseVisitor; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -109,7 +109,6 @@ import org.polypheny.db.routing.DmlRouter; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.RoutingManager; -import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.ModelTrait; import org.polypheny.db.schema.ModifiableCollection; import org.polypheny.db.schema.ModifiableTable; @@ -136,15 +135,10 @@ public AlgNode routeDml( LogicalModify modify, Statement statement ) { throw new RuntimeException( "Unexpected operator!" ); } - AlgOptTableImpl table = (AlgOptTableImpl) modify.getTable(); + AlgOptEntityImpl table = (AlgOptEntityImpl) modify.getTable(); - if ( !(table.getTable() instanceof LogicalTable) ) { - throw new RuntimeException( "Unexpected table. Only logical tables expected here!" ); - } - - LogicalTable t = ((LogicalTable) table.getTable()); // Get placements of this table - CatalogTable catalogTable = catalog.getTable( t.getTableId() ); + CatalogTable catalogTable = table.getCatalogEntity().unwrap( CatalogTable.class ); // Make sure that this table can be modified if ( !catalogTable.modifiable ) { @@ -417,8 +411,8 @@ else if ( identifiedPartitionForSetValue != -1 ) { catalogTable.getNamespaceName(), pkPlacement.physicalSchemaName ), - t.getLogicalTableName() + "_" + currentPartitionId ); - AlgOptTable physical = catalogReader.getTableForMember( qualifiedTableName ); + catalogTable.name + "_" + currentPartitionId ); + AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); ModifiableTable modifiableTable = physical.unwrap( ModifiableTable.class ); // Build DML @@ -508,8 +502,8 @@ else if ( identifiedPartitionForSetValue != -1 ) { catalogTable.getNamespaceName(), pkPlacement.physicalSchemaName ), - t.getLogicalTableName() + "_" + entry.getKey() ); - AlgOptTable physical = catalogReader.getTableForMember( qualifiedTableName ); + catalogTable.name + "_" + entry.getKey() ); + AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); ModifiableTable modifiableTable = physical.unwrap( ModifiableTable.class ); // Build DML @@ -597,8 +591,8 @@ else if ( identifiedPartitionForSetValue != -1 ) { catalogTable.getNamespaceName(), pkPlacement.physicalSchemaName ), - t.getLogicalTableName() + "_" + partitionId ); - AlgOptTable physical = catalogReader.getTableForMember( qualifiedTableName ); + catalogTable.name + "_" + partitionId ); + AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); // Build DML Modify adjustedModify; @@ -726,7 +720,7 @@ public AlgNode handleBatchIterator( AlgNode alg, Statement statement, LogicalQue public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Integer adapterId ) { PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); - CatalogCollection collection = Catalog.getInstance().getCollection( alg.getCollection().getTable().getTableId() ); + CatalogCollection collection = alg.getTable().getCatalogEntity().unwrap( CatalogCollection.class ); List modifies = new ArrayList<>(); @@ -742,7 +736,7 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, String collectionName = collection.name + "_" + placement.id; - AlgOptTable document = reader.getCollection( List.of( namespaceName, collectionName ) ); + AlgOptEntity document = reader.getCollection( List.of( namespaceName, collectionName ) ); if ( !adapter.getSupportedNamespaces().contains( NamespaceType.DOCUMENT ) ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, statement, placementId, queryInformation ) ); @@ -862,9 +856,9 @@ private AlgNode buildGraphDml( AlgNode node, Statement statement, int adapterId private AlgNode attachRelationalModify( LogicalDocumentModify alg, Statement statement, int adapterId, LogicalQueryInformation queryInformation ) { - CatalogCollectionMapping mapping = Catalog.getInstance().getCollectionMapping( alg.getCollection().getTable().getTableId() ); + CatalogCollectionMapping mapping = Catalog.getInstance().getCollectionMapping( alg.getCollection().getCatalogEntity().id ); - PreparingTable collectionTable = getSubstitutionTable( statement, mapping.collectionId, mapping.idId, adapterId ); + PreparingEntity collectionTable = getSubstitutionTable( statement, mapping.collectionId, mapping.idId, adapterId ); switch ( alg.operation ) { case INSERT: @@ -881,7 +875,7 @@ private AlgNode attachRelationalModify( LogicalDocumentModify alg, Statement sta } - private List attachRelationalDoc( LogicalDocumentModify alg, Statement statement, PreparingTable collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { + private List attachRelationalDoc( LogicalDocumentModify alg, Statement statement, PreparingEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { RoutedAlgBuilder builder = attachDocUpdate( alg.getInput(), statement, collectionTable, RoutedAlgBuilder.create( statement, alg.getCluster() ), queryInformation, adapterId ); RexBuilder rexBuilder = alg.getCluster().getRexBuilder(); AlgBuilder algBuilder = AlgBuilder.create( statement ); @@ -925,7 +919,7 @@ private AlgNode createDocumentTransform( AlgNode query, RexBuilder rexBuilder ) } - private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, PreparingTable collectionTable, RoutedAlgBuilder builder, LogicalQueryInformation information, int adapterId ) { + private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, PreparingEntity collectionTable, RoutedAlgBuilder builder, LogicalQueryInformation information, int adapterId ) { switch ( ((DocumentAlg) alg).getDocType() ) { case SCAN: @@ -962,7 +956,7 @@ private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, Prep } - private List attachRelationalDocInsert( LogicalDocumentModify alg, Statement statement, PreparingTable collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { + private List attachRelationalDocInsert( LogicalDocumentModify alg, Statement statement, PreparingEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { if ( alg.getInput() instanceof DocumentValues ) { // simple value insert AlgNode values = ((LogicalDocumentValues) alg.getInput()).getRelationalEquivalent( List.of(), List.of( collectionTable ), statement.getTransaction().getCatalogReader() ).get( 0 ); @@ -976,10 +970,10 @@ private List attachRelationalDocInsert( LogicalDocumentModify alg, Stat private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Statement statement ) { CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.getGraph().getId() ); - PreparingTable nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); - PreparingTable nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); - PreparingTable edgesTable = getSubstitutionTable( statement, mapping.edgesId, mapping.idEdgeId, adapterId ); - PreparingTable edgePropertiesTable = getSubstitutionTable( statement, mapping.edgesPropertyId, mapping.idEdgesPropertyId, adapterId ); + PreparingEntity nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); + PreparingEntity nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); + PreparingEntity edgesTable = getSubstitutionTable( statement, mapping.edgesId, mapping.idEdgeId, adapterId ); + PreparingEntity edgePropertiesTable = getSubstitutionTable( statement, mapping.edgesPropertyId, mapping.idEdgesPropertyId, adapterId ); List inputs = new ArrayList<>(); switch ( alg.operation ) { @@ -1025,7 +1019,7 @@ private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Sta } - private AlgNode attachRelationalGraphUpdate( LogicalLpgModify alg, Statement statement, PreparingTable nodesTable, PreparingTable nodePropertiesTable, PreparingTable edgesTable, PreparingTable edgePropertiesTable, int adapterId ) { + private AlgNode attachRelationalGraphUpdate( LogicalLpgModify alg, Statement statement, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, int adapterId ) { AlgNode project = new LogicalLpgProject( alg.getCluster(), alg.getTraitSet(), buildGraphDml( alg.getInput(), statement, adapterId ), alg.operations, alg.ids ); List inputs = new ArrayList<>(); @@ -1049,7 +1043,7 @@ private AlgNode attachRelationalGraphUpdate( LogicalLpgModify alg, Statement sta } - private AlgNode attachRelationalGraphDelete( LogicalLpgModify alg, Statement statement, PreparingTable nodesTable, PreparingTable nodePropertiesTable, PreparingTable edgesTable, PreparingTable edgePropertiesTable, int adapterId ) { + private AlgNode attachRelationalGraphDelete( LogicalLpgModify alg, Statement statement, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, int adapterId ) { AlgNode project = new LogicalLpgProject( alg.getCluster(), alg.getTraitSet(), buildGraphDml( alg.getInput(), statement, adapterId ), alg.operations, alg.ids ); List inputs = new ArrayList<>(); @@ -1071,7 +1065,7 @@ private AlgNode attachRelationalGraphDelete( LogicalLpgModify alg, Statement sta } - private List attachPreparedGraphNodeModifyDelete( AlgOptCluster cluster, PreparingTable nodesTable, PreparingTable nodePropertiesTable, Statement statement ) { + private List attachPreparedGraphNodeModifyDelete( AlgOptCluster cluster, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, Statement statement ) { AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); @@ -1102,7 +1096,7 @@ private List attachPreparedGraphNodeModifyDelete( AlgOptCluster cluster } - private AlgNode attachRelationalRelatedInsert( LogicalLpgModify alg, Statement statement, PreparingTable nodesTable, PreparingTable nodePropertiesTable, PreparingTable edgesTable, PreparingTable edgePropertiesTable, int adapterId ) { + private AlgNode attachRelationalRelatedInsert( LogicalLpgModify alg, Statement statement, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, int adapterId ) { AlgNode project = buildGraphDml( alg.getInput(), statement, adapterId ); List inputs = new ArrayList<>(); @@ -1123,7 +1117,7 @@ private AlgNode attachRelationalRelatedInsert( LogicalLpgModify alg, Statement s } - private List attachPreparedGraphNodeModifyInsert( AlgOptCluster cluster, PreparingTable nodesTable, PreparingTable nodePropertiesTable, Statement statement ) { + private List attachPreparedGraphNodeModifyInsert( AlgOptCluster cluster, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, Statement statement ) { AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); @@ -1152,7 +1146,7 @@ private List attachPreparedGraphNodeModifyInsert( AlgOptCluster cluster } - private List attachPreparedGraphEdgeModifyDelete( AlgOptCluster cluster, PreparingTable edgesTable, PreparingTable edgePropertiesTable, Statement statement ) { + private List attachPreparedGraphEdgeModifyDelete( AlgOptCluster cluster, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, Statement statement ) { AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); @@ -1180,7 +1174,7 @@ private List attachPreparedGraphEdgeModifyDelete( AlgOptCluster cluster } - private List attachPreparedGraphEdgeModifyInsert( AlgOptCluster cluster, PreparingTable edgesTable, PreparingTable edgePropertiesTable, Statement statement ) { + private List attachPreparedGraphEdgeModifyInsert( AlgOptCluster cluster, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, Statement statement ) { AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); @@ -1217,7 +1211,7 @@ private AlgNode switchContext( AlgNode node ) { } - private Modify getModify( AlgOptTable table, AlgNode input, Statement statement, Operation operation, List updateList, List sourceList ) { + private Modify getModify( AlgOptEntity table, AlgNode input, Statement statement, Operation operation, List updateList, List sourceList ) { return table.unwrap( ModifiableTable.class ).toModificationAlg( input.getCluster(), table, statement.getTransaction().getCatalogReader(), input, operation, updateList, sourceList, true ); } @@ -1255,36 +1249,32 @@ private AlgBuilder buildDml( partitionPlacement.physicalTableName, partitionPlacement.partitionId, catalogTable.getNamespaceType() ); - LogicalScan scan = (LogicalScan) builder.build(); + LogicalRelScan scan = (LogicalRelScan) builder.build(); builder.push( scan.copy( scan.getTraitSet().replace( ModelTrait.DOCUMENT ), scan.getInputs() ) ); return builder; - } else if ( node instanceof LogicalScan && node.getTable() != null ) { - AlgOptTableImpl table = (AlgOptTableImpl) node.getTable(); - - if ( table.getTable() instanceof LogicalTable ) { - CatalogTable fromTable = catalogTable; - // Special handling for INSERT INTO foo SELECT * FROM foo2 - if ( table.getTable().getTableId() != catalogTable.id ) { - return handleSelectFromOtherTable( builder, catalogTable, statement, table ); - } + } else if ( node instanceof LogicalRelScan && node.getTable() != null ) { + AlgOptEntityImpl table = (AlgOptEntityImpl) node.getTable(); - builder = super.handleScan( - builder, - statement, - placements.get( 0 ).tableId, - placements.get( 0 ).adapterUniqueName, - fromTable.getNamespaceName(), - fromTable.name, - placements.get( 0 ).physicalSchemaName, - partitionPlacement.physicalTableName, - partitionPlacement.partitionId, - fromTable.getNamespaceType() ); + // Special handling for INSERT INTO foo SELECT * FROM foo2 + if ( table.getCatalogEntity().id != catalogTable.id ) { + return handleSelectFromOtherTable( builder, catalogTable, statement, table ); + } + + builder = super.handleScan( + builder, + statement, + placements.get( 0 ).tableId, + placements.get( 0 ).adapterUniqueName, + catalogTable.getNamespaceName(), + catalogTable.name, + placements.get( 0 ).physicalSchemaName, + partitionPlacement.physicalTableName, + partitionPlacement.partitionId, + catalogTable.getNamespaceType() ); + + return builder; - return builder; - } else { - throw new RuntimeException( "Unexpected table. Only logical tables expected here!" ); - } } else if ( node instanceof Values ) { if ( node.getModel() == NamespaceType.DOCUMENT ) { return handleDocuments( (LogicalDocumentValues) node, builder ); @@ -1352,12 +1342,10 @@ private AlgBuilder buildDml( } - private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, CatalogTable catalogTable, Statement statement, AlgOptTableImpl table ) { + private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, CatalogTable catalogTable, Statement statement, AlgOptEntityImpl table ) { CatalogTable fromTable; // Select from other table - fromTable = catalog.getTable( table.getTable().getTableId() ); - // PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - //PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( fromTable.partitionProperty.partitionType ); + fromTable = table.getCatalogEntity().unwrap( CatalogTable.class ); if ( fromTable.partitionProperty.isPartitioned ) { throw new UnsupportedOperationException( "DMLs from other partitioned tables is not supported" ); @@ -1372,7 +1360,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Catalog for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { catalog.getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); - fromTable = catalog.getTable( table.getTable().getTableId() ); + fromTable = table.getCatalogEntity().unwrap( CatalogTable.class ); CatalogPartitionPlacement partition = catalog.getPartitionPlacement( pkPlacement.adapterId, fromTable.partitionProperty.partitionIds.get( 0 ) ); diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 4b77432cc2..c4ddae971a 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -39,10 +39,11 @@ import org.polypheny.db.algebra.core.lpg.LpgAlg; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.transaction.EntityAccessMap.EntityIdentifier.NamespaceLevel; import org.polypheny.db.transaction.Lock.LockMode; @@ -221,14 +222,11 @@ public Mode getEntityAccessMode( @NonNull EntityAccessMap.EntityIdentifier entit * @param table table of interest * @return qualified name */ - public EntityIdentifier getQualifiedName( AlgOptTable table, long partitionId ) { - if ( !(table instanceof AlgOptTableImpl) ) { + public EntityIdentifier getQualifiedName( AlgOptEntity table, long partitionId ) { + if ( !(table instanceof AlgOptEntityImpl) ) { throw new RuntimeException( "Unexpected table type: " + table.getClass() ); } - if ( !(table.getTable() instanceof LogicalTable) ) { - throw new RuntimeException( "Unexpected table type: " + table.getTable().getClass() ); - } - return new EntityIdentifier( table.getTable().getTableId(), partitionId, NamespaceLevel.ENTITY_LEVEL ); + return new EntityIdentifier( table.getCatalogEntity().id, partitionId, NamespaceLevel.ENTITY_LEVEL ); } @@ -240,7 +238,7 @@ private class TableRelVisitor extends AlgVisitor { @Override public void visit( AlgNode p, int ordinal, AlgNode parent ) { super.visit( p, ordinal, parent ); - AlgOptTable table = p.getTable(); + AlgOptEntity table = p.getTable(); if ( table == null ) { if ( p instanceof LpgAlg ) { attachGraph( (AlgNode & LpgAlg) p ); @@ -271,8 +269,8 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { List relevantPartitions; if ( accessedPartitions.containsKey( p.getId() ) ) { relevantPartitions = accessedPartitions.get( p.getId() ); - } else if ( table.getTable().getTableId() != -1 ) { - relevantPartitions = Catalog.getInstance().getTable( table.getTable().getTableId() ).partitionProperty.partitionIds; + } else if ( table.getCatalogEntity() != null ) { + relevantPartitions = table.getCatalogEntity().unwrap( CatalogTable.class ).partitionProperty.partitionIds; } else { relevantPartitions = List.of(); } @@ -301,7 +299,7 @@ private void attachDocument( T p ) { newAccess = Mode.READ_ACCESS; } // as documents are using the same id space as tables this will work - EntityIdentifier key = new EntityIdentifier( p.getCollection().getTable().getTableId(), 0, NamespaceLevel.ENTITY_LEVEL ); + EntityIdentifier key = new EntityIdentifier( p.getCollection().getCatalogEntity().id, 0, NamespaceLevel.ENTITY_LEVEL ); accessMap.put( key, newAccess ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 6f34bf4d8d..fddf7bc2ad 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -48,7 +48,7 @@ import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; @@ -72,7 +72,7 @@ import org.polypheny.db.information.InformationTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; @@ -538,7 +538,7 @@ private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder ); AlgNode queryNode; - LogicalScan tableScan = getLogicalScan( queryResult.getSchema(), queryResult.getTable(), reader, cluster ); + LogicalRelScan tableScan = getLogicalScan( queryResult.getSchema(), queryResult.getTable(), reader, cluster ); switch ( nodeType ) { case MIN: case MAX: @@ -563,9 +563,9 @@ private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { /** * Gets a tableScan for a given table. */ - private LogicalScan getLogicalScan( String schema, String table, CatalogReader reader, AlgOptCluster cluster ) { - AlgOptTable relOptTable = reader.getTable( Arrays.asList( schema, table ) ); - return LogicalScan.create( cluster, relOptTable ); + private LogicalRelScan getLogicalScan( String schema, String table, CatalogReader reader, AlgOptCluster cluster ) { + AlgOptEntity relOptTable = reader.getTable( Arrays.asList( schema, table ) ); + return LogicalRelScan.create( cluster, relOptTable ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailTable.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailTable.java index fa5945f37b..b2525c36d9 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailTable.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailTable.java @@ -34,8 +34,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -122,7 +122,7 @@ public Collection getModifiableCollection() { @Override public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, @@ -166,8 +166,8 @@ public Enumerable scan( DataContext root ) { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { - return new CottontailScan( context.getCluster(), algOptTable, this, traitSet, this.cottontailSchema.getConvention() ); + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { + return new CottontailScan( context.getCluster(), algOptEntity, this, traitSet, this.cottontailSchema.getConvention() ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java index 8db063deef..0e1fd76e81 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java @@ -21,7 +21,7 @@ import org.apache.calcite.linq4j.tree.ParameterExpression; import org.polypheny.db.adapter.cottontail.CottontailTable; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; public interface CottontailAlg extends AlgNode { @@ -49,7 +49,7 @@ class CottontailImplementContext { public String tableName; - public AlgOptTable table; + public AlgOptEntity table; public CottontailTable cottontailTable; public Expression filterBuilder; diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java index ee66b1cbf7..d6458adc7e 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java @@ -26,8 +26,8 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; @@ -36,7 +36,7 @@ public class CottontailScan extends Scan implements CottontailAlg { protected final CottontailTable cottontailTable; - public CottontailScan( AlgOptCluster cluster, AlgOptTable table, CottontailTable cottontailTable, AlgTraitSet traitSet, CottontailConvention cottontailConvention ) { + public CottontailScan( AlgOptCluster cluster, AlgOptEntity table, CottontailTable cottontailTable, AlgTraitSet traitSet, CottontailConvention cottontailConvention ) { super( cluster, traitSet.replace( cottontailConvention ), table ); this.cottontailTable = cottontailTable; } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java index 8d1650275e..28fe9dc15d 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java @@ -37,8 +37,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexCall; @@ -76,7 +76,7 @@ public class CottontailTableModify extends Modify implements CottontailAlg { public CottontailTableModify( AlgOptCluster cluster, AlgTraitSet traitSet, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java index 7001483ee8..00a1d4c577 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java @@ -37,22 +37,22 @@ import org.apache.calcite.linq4j.tree.Blocks; import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.Primitive; -import org.polypheny.db.adapter.enumerable.EnumerableAlg; -import org.polypheny.db.adapter.enumerable.EnumerableAlgImplementor; -import org.polypheny.db.adapter.enumerable.EnumerableConvention; -import org.polypheny.db.adapter.enumerable.PhysType; -import org.polypheny.db.adapter.enumerable.PhysTypeImpl; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.common.Scan; +import org.polypheny.db.algebra.enumerable.EnumerableAlg; +import org.polypheny.db.algebra.enumerable.EnumerableAlgImplementor; +import org.polypheny.db.algebra.enumerable.EnumerableConvention; +import org.polypheny.db.algebra.enumerable.PhysType; +import org.polypheny.db.algebra.enumerable.PhysTypeImpl; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; @@ -67,7 +67,7 @@ public class CsvScan extends Scan implements EnumerableAlg { final int[] fields; - protected CsvScan( AlgOptCluster cluster, AlgOptTable table, CsvTranslatableTable csvTable, int[] fields ) { + protected CsvScan( AlgOptCluster cluster, AlgOptEntity table, CsvTranslatableTable csvTable, int[] fields ) { super( cluster, cluster.traitSetOf( EnumerableConvention.INSTANCE ), table ); this.csvTable = csvTable; this.fields = fields; diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java index 8357617d52..0b4fd3cd22 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java @@ -33,6 +33,9 @@ package org.polypheny.db.adapter.csv; +import java.lang.reflect.Type; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; @@ -41,8 +44,8 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.QueryableTable; import org.polypheny.db.schema.SchemaPlus; @@ -50,10 +53,6 @@ import org.polypheny.db.schema.TranslatableTable; import org.polypheny.db.util.Source; -import java.lang.reflect.Type; -import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; - /** * Table based on a CSV file. @@ -109,9 +108,9 @@ public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { // Request all fields. - return new CsvScan( context.getCluster(), algOptTable, this, fields ); + return new CsvScan( context.getCluster(), algOptEntity, this, fields ); } } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java index feb828433a..890e7f9051 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java @@ -90,9 +90,9 @@ import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexInputRef; @@ -166,7 +166,7 @@ public class DruidQuery extends AbstractAlgNode implements BindableAlg { .build(); protected QuerySpec querySpec; - final AlgOptTable table; + final AlgOptEntity table; final DruidTable druidTable; final ImmutableList intervals; final ImmutableList algs; @@ -193,7 +193,7 @@ public class DruidQuery extends AbstractAlgNode implements BindableAlg { * @param algs Internal relational expressions * @param converterOperatorMap mapping of Polypheny-DB Sql Operator to Druid Expression API. */ - protected DruidQuery( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, DruidTable druidTable, List intervals, List algs, Map converterOperatorMap ) { + protected DruidQuery( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidTable druidTable, List intervals, List algs, Map converterOperatorMap ) { super( cluster, traitSet ); this.table = table; this.druidTable = druidTable; @@ -215,7 +215,7 @@ static boolean isValidSignature( String signature ) { /** * Creates a DruidQuery. */ - public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, DruidTable druidTable, List algs ) { + public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidTable druidTable, List algs ) { final ImmutableMap.Builder mapBuilder = ImmutableMap.builder(); for ( DruidSqlOperatorConverter converter : DEFAULT_OPERATORS_LIST ) { mapBuilder.put( converter.polyphenyDbOperator(), converter ); @@ -227,7 +227,7 @@ public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, Al /** * Creates a DruidQuery. */ - public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, DruidTable druidTable, List algs, Map converterOperatorMap ) { + public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidTable druidTable, List algs, Map converterOperatorMap ) { return create( cluster, traitSet, table, druidTable, druidTable.intervals, algs, converterOperatorMap ); } @@ -235,7 +235,7 @@ public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, Al /** * Creates a DruidQuery. */ - private static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, DruidTable druidTable, List intervals, List algs, Map converterOperatorMap ) { + private static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidTable druidTable, List intervals, List algs, Map converterOperatorMap ) { return new DruidQuery( cluster, traitSet, table, druidTable, intervals, algs, converterOperatorMap ); } @@ -525,7 +525,7 @@ public AlgNode getTopNode() { @Override - public AlgOptTable getTable() { + public AlgOptEntity getTable() { return table; } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTable.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTable.java index 406693e717..e160eeaa8a 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTable.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTable.java @@ -38,13 +38,18 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.chrono.ISOChronology; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.AggregateCall; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -55,8 +60,8 @@ import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.ModelTraitDef; import org.polypheny.db.schema.Table; @@ -67,8 +72,6 @@ import org.polypheny.db.sql.language.SqlSelectKeyword; import org.polypheny.db.type.PolyType; -import java.util.*; - /** * Table mapped onto a Druid table. @@ -238,11 +241,11 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); // ViewScan needed for Views - final LogicalScan scan = LogicalScan.create( cluster, algOptTable ); - return DruidQuery.create( cluster, cluster.traitSetOf( BindableConvention.INSTANCE ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptTable, this, ImmutableList.of( scan ) ); + final LogicalRelScan scan = LogicalRelScan.create( cluster, algOptEntity ); + return DruidQuery.create( cluster, cluster.traitSetOf( BindableConvention.INSTANCE ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptEntity, this, ImmutableList.of( scan ) ); } diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java index 24811e433f..f95615f13e 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java @@ -41,7 +41,7 @@ import java.util.Objects; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; import org.polypheny.db.util.Pair; @@ -107,7 +107,7 @@ class Implementor { */ Long fetch; - AlgOptTable table; + AlgOptEntity table; ElasticsearchTable elasticsearchTable; diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java index 2a667a8ab1..682a41603e 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java @@ -43,9 +43,9 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; @@ -70,7 +70,7 @@ public class ElasticsearchScan extends Scan implements ElasticsearchRel { * @param elasticsearchTable Elasticsearch table * @param projectRowType Fields and types to project; null to project raw row */ - ElasticsearchScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, ElasticsearchTable elasticsearchTable, AlgDataType projectRowType ) { + ElasticsearchScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, ElasticsearchTable elasticsearchTable, AlgDataType projectRowType ) { super( cluster, traitSet, table ); this.elasticsearchTable = Objects.requireNonNull( elasticsearchTable, "elasticsearchTable" ); this.projectRowType = projectRowType; diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchTable.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchTable.java index 10299cce37..9e48221a50 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchTable.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchTable.java @@ -39,6 +39,20 @@ import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Consumer; +import java.util.function.Predicate; +import java.util.stream.Collectors; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Linq4j; @@ -51,8 +65,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.ModelTraitDef; import org.polypheny.db.schema.SchemaPlus; @@ -60,13 +74,6 @@ import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.type.PolyType; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.*; -import java.util.function.Consumer; -import java.util.function.Predicate; -import java.util.stream.Collectors; - /** * Table based on an Elasticsearch type. @@ -328,9 +335,9 @@ public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); - return new ElasticsearchScan( cluster, cluster.traitSetOf( ElasticsearchRel.CONVENTION ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptTable, this, null ); + return new ElasticsearchScan( cluster, cluster.traitSetOf( ElasticsearchRel.CONVENTION ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptEntity, this, null ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableTable.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableTable.java index 1c012893b2..8424ff6078 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableTable.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableTable.java @@ -37,8 +37,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -105,9 +105,9 @@ public FileTranslatableTable( @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { fileSchema.getConvention().register( context.getCluster().getPlanner() ); - return new FileScan( context.getCluster(), algOptTable, this ); + return new FileScan( context.getCluster(), algOptEntity, this ); } @@ -127,7 +127,7 @@ public Collection getModifiableCollection() { @Override public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Operation operation, diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java index a946ef7547..c912d8fd06 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java @@ -27,8 +27,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.ModelTrait; @@ -38,7 +38,7 @@ public class FileScan extends Scan implements FileAlg { private final FileTranslatableTable fileTable; - public FileScan( AlgOptCluster cluster, AlgOptTable table, FileTranslatableTable fileTable ) { + public FileScan( AlgOptCluster cluster, AlgOptEntity table, FileTranslatableTable fileTable ) { //convention was: EnumerableConvention.INSTANCE super( cluster, cluster.traitSetOf( fileTable.getFileSchema().getConvention() ).replace( ModelTrait.RELATIONAL ), table ); this.fileTable = fileTable; diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java index 91b000bdb4..926115435c 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java @@ -28,8 +28,8 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexCall; @@ -41,7 +41,7 @@ public class FileTableModify extends Modify implements FileAlg { - public FileTableModify( AlgOptCluster cluster, AlgTraitSet traits, AlgOptTable table, CatalogReader catalogReader, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public FileTableModify( AlgOptCluster cluster, AlgTraitSet traits, AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traits, table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); } diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java index c914f53397..3a97af7801 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java @@ -39,7 +39,7 @@ import java.util.List; import java.util.Map; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; @@ -74,7 +74,7 @@ class GeodeImplementContext { final Map oqlAggregateFunctions = new LinkedHashMap<>(); Long limitValue; - AlgOptTable table; + AlgOptEntity table; GeodeTable geodeTable; diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java index 4943d4e2b3..cb590e0845 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java @@ -39,9 +39,9 @@ import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; @@ -63,7 +63,7 @@ public class GeodeScan extends Scan implements GeodeAlg { * @param geodeTable Geode table * @param projectRowType Fields and types to project; null to project raw row */ - GeodeScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, GeodeTable geodeTable, AlgDataType projectRowType ) { + GeodeScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, GeodeTable geodeTable, AlgDataType projectRowType ) { super( cluster, traitSet, table ); this.geodeTable = geodeTable; this.projectRowType = projectRowType; diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeTable.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeTable.java index 116aeb77f0..9fab17f7e8 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeTable.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeTable.java @@ -37,6 +37,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerable; @@ -56,8 +59,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.runtime.Hook; import org.polypheny.db.schema.SchemaPlus; @@ -66,10 +69,6 @@ import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Util; -import java.util.List; -import java.util.Locale; -import java.util.Map; - /** * Table based on a Geode Region @@ -222,9 +221,9 @@ public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); - return new GeodeScan( cluster, cluster.traitSetOf( GeodeAlg.CONVENTION ), algOptTable, this, null ); + return new GeodeScan( cluster, cluster.traitSetOf( GeodeAlg.CONVENTION ), algOptEntity, this, null ); } diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java index 972e711fb4..08fc3b79b6 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java @@ -50,7 +50,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; @@ -67,7 +67,7 @@ class HtmlScan extends Scan implements EnumerableAlg { private final int[] fields; - protected HtmlScan( AlgOptCluster cluster, AlgOptTable table, HtmlTable webTable, int[] fields ) { + protected HtmlScan( AlgOptCluster cluster, AlgOptEntity table, HtmlTable webTable, int[] fields ) { super( cluster, cluster.traitSetOf( EnumerableConvention.INSTANCE ), table ); this.webTable = webTable; this.fields = fields; diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlTable.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlTable.java index 79a99f74c1..4dfa9cf4ad 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlTable.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlTable.java @@ -34,6 +34,8 @@ package org.polypheny.db.adapter.html; +import java.util.List; +import java.util.Map; import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; @@ -47,8 +49,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Statistic; @@ -57,9 +59,6 @@ import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.util.Source; -import java.util.List; -import java.util.Map; - /** * Table implementation wrapping a URL / HTML table. @@ -149,8 +148,8 @@ public Enumerator enumerator() { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { - return new EnumerableScan( context.getCluster(), context.getCluster().traitSetOf( EnumerableConvention.INSTANCE ), algOptTable, (Class) getElementType() ); + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { + return new EnumerableScan( context.getCluster(), context.getCluster().traitSetOf( EnumerableConvention.INSTANCE ), algOptEntity, (Class) getElementType() ); } } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java index 32488a1254..8dcdce62e2 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java @@ -34,22 +34,61 @@ package org.polypheny.db.adapter.jdbc; import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.function.Predicate; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.jdbc.rel2sql.SqlImplementor; import org.polypheny.db.adapter.jdbc.rel2sql.SqlImplementor.Result; -import org.polypheny.db.algebra.*; +import org.polypheny.db.algebra.AbstractAlgNode; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgWriter; +import org.polypheny.db.algebra.InvalidAlgException; +import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.convert.ConverterRule; -import org.polypheny.db.algebra.core.*; +import org.polypheny.db.algebra.core.Aggregate; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.AlgFactories; +import org.polypheny.db.algebra.core.Calc; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.core.Filter; +import org.polypheny.db.algebra.core.Intersect; +import org.polypheny.db.algebra.core.Join; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.Minus; +import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.Project; +import org.polypheny.db.algebra.core.SemiJoin; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.nodes.Function; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.*; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptPlanner; +import org.polypheny.db.plan.AlgOptRule; +import org.polypheny.db.plan.AlgOptRuleCall; +import org.polypheny.db.plan.AlgTrait; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare; -import org.polypheny.db.rex.*; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexInputRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexMultisetUtil; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexOver; +import org.polypheny.db.rex.RexProgram; +import org.polypheny.db.rex.RexVisitorImpl; import org.polypheny.db.schema.ModelTrait; import org.polypheny.db.schema.ModifiableTable; import org.polypheny.db.schema.document.DocumentRules; @@ -64,11 +103,6 @@ import org.polypheny.db.util.trace.PolyphenyDbTrace; import org.slf4j.Logger; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import java.util.function.Predicate; - /** * Rules and relational operators for {@link JdbcConvention} calling convention. @@ -1017,7 +1051,7 @@ public static class JdbcTableModify extends Modify implements JdbcAlg { public JdbcTableModify( AlgOptCluster cluster, AlgTraitSet traitSet, - AlgOptTable table, + AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode input, Operation operation, diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java index e571fe6b60..5119b68ee7 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java @@ -34,15 +34,18 @@ package org.polypheny.db.adapter.jdbc; import com.google.common.collect.ImmutableList; +import java.util.List; import org.polypheny.db.adapter.jdbc.rel2sql.SqlImplementor.Result; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.plan.*; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptPlanner; +import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.ModelTrait; -import java.util.List; - /** * Relational expression representing a scan of a table in a JDBC data source. @@ -58,7 +61,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) } - public JdbcScan( AlgOptCluster cluster, AlgOptTable table, JdbcTable jdbcTable, JdbcConvention jdbcConvention ) { + public JdbcScan( AlgOptCluster cluster, AlgOptEntity table, JdbcTable jdbcTable, JdbcConvention jdbcConvention ) { super( cluster, cluster.traitSetOf( jdbcConvention ).replace( ModelTrait.RELATIONAL ), table ); this.jdbcTable = jdbcTable; assert jdbcTable != null; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcTable.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcTable.java index eedf29d771..0572f53844 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcTable.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcTable.java @@ -35,6 +35,10 @@ import com.google.common.collect.Lists; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Objects; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; @@ -54,8 +58,8 @@ import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -66,17 +70,17 @@ import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TranslatableTable; import org.polypheny.db.schema.impl.AbstractTableQueryable; -import org.polypheny.db.sql.language.*; +import org.polypheny.db.sql.language.SqlBasicCall; +import org.polypheny.db.sql.language.SqlIdentifier; +import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlNodeList; +import org.polypheny.db.sql.language.SqlOperator; +import org.polypheny.db.sql.language.SqlSelect; import org.polypheny.db.sql.language.pretty.SqlPrettyWriter; import org.polypheny.db.sql.language.util.SqlString; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Util; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Objects; - /** * Queryable that gets its data from a table within a JDBC connection. @@ -211,8 +215,8 @@ public SqlNodeList getNodeList() { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { - return new JdbcScan( context.getCluster(), algOptTable, this, jdbcSchema.getConvention() ); + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { + return new JdbcScan( context.getCluster(), algOptEntity, this, jdbcSchema.getConvention() ); } @@ -242,7 +246,7 @@ public Collection getModifiableCollection() { @Override public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java index 384bcfa1ea..c7e10be94c 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java @@ -58,7 +58,7 @@ import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.rex.*; import org.polypheny.db.sql.language.*; import org.polypheny.db.sql.language.SqlDialect.IntervalParameterStrategy; @@ -146,13 +146,13 @@ public Result setOpToSql( SqlSetOperator operator, AlgNode alg ) { final Result result = visitChild( input.i, input.e ); if ( node == null ) { if ( input.getValue() instanceof JdbcScan ) { - node = result.asSelect( ((JdbcTable) ((AlgOptTableImpl) input.getValue().getTable()).getTable()).getNodeList() ); + node = result.asSelect( ((JdbcTable) ((AlgOptEntityImpl) input.getValue().getTable()).getTable()).getNodeList() ); } else { node = result.asSelect(); } } else { if ( input.getValue() instanceof JdbcScan ) { - node = (SqlNode) operator.createCall( POS, node, result.asSelect( ((JdbcTable) ((AlgOptTableImpl) input.getValue().getTable()).getTable()).getNodeList() ) ); + node = (SqlNode) operator.createCall( POS, node, result.asSelect( ((JdbcTable) ((AlgOptEntityImpl) input.getValue().getTable()).getTable()).getNodeList() ) ); } else { node = (SqlNode) operator.createCall( POS, node, result.asSelect() ); } @@ -1128,7 +1128,7 @@ && hasNestedAggregations( (LogicalAggregate) alg ) ) { select = subSelect(); } else { if ( explicitColumnNames && alg.getInputs().size() == 1 && alg.getInput( 0 ) instanceof JdbcScan ) { - select = asSelect( ((JdbcTable) ((AlgOptTableImpl) alg.getInput( 0 ).getTable()).getTable()).getNodeList() ); + select = asSelect( ((JdbcTable) ((AlgOptEntityImpl) alg.getInput( 0 ).getTable()).getTable()).getNodeList() ); } else { select = asSelect(); } diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java index 8c22e755a6..cb5fa52495 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java @@ -43,9 +43,9 @@ import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitDef; import org.polypheny.db.plan.AlgTraitSet; @@ -181,7 +181,7 @@ public AlgNode convert( AlgNode alg ) { */ private static class MockJdbcScan extends Scan implements JdbcAlg { - MockJdbcScan( AlgOptCluster cluster, AlgOptTable table, JdbcConvention jdbcConvention ) { + MockJdbcScan( AlgOptCluster cluster, AlgOptEntity table, JdbcConvention jdbcConvention ) { super( cluster, cluster.traitSetOf( jdbcConvention ), table ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java index 22c84b531c..3d05eceac8 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java @@ -35,6 +35,12 @@ import com.mongodb.client.gridfs.GridFSBucket; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; +import java.util.stream.Collectors; import lombok.Getter; import lombok.Setter; import org.bson.BsonArray; @@ -48,17 +54,10 @@ import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; import org.polypheny.db.util.Pair; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; -import java.util.stream.Collectors; - /** * Relational expression that uses Mongo calling convention. @@ -92,7 +91,7 @@ class Implementor extends AlgShuttleImpl implements Serializable { public boolean onlyOne = false; public boolean isDocumentUpdate = false; - AlgOptTable table; + AlgOptEntity table; @Setter @Getter public boolean hasProject = false; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index f01964b833..a9719dd6c5 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -86,8 +86,8 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -176,9 +176,9 @@ public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); - return new MongoScan( cluster, traitSet.replace( MongoAlg.CONVENTION ), algOptTable, this, null ); + return new MongoScan( cluster, traitSet.replace( MongoAlg.CONVENTION ), algOptEntity, this, null ); } @@ -327,7 +327,7 @@ public Collection getModifiableCollection() { @Override public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Operation operation, @@ -351,7 +351,7 @@ public Modify toModificationAlg( @Override public DocumentModify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Operation operation, diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java index f47c953552..003744083c 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java @@ -89,9 +89,9 @@ import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTrait; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -985,7 +985,7 @@ private static class MongoEntityModify extends Modify implements MongoAlg { protected MongoEntityModify( AlgOptCluster cluster, AlgTraitSet traitSet, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java index e2600343ee..b636ac93ff 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java @@ -34,14 +34,18 @@ package org.polypheny.db.adapter.mongodb; +import java.util.List; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.plan.*; - -import java.util.List; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptPlanner; +import org.polypheny.db.plan.AlgOptRule; +import org.polypheny.db.plan.AlgTraitSet; /** @@ -64,7 +68,7 @@ public class MongoScan extends Scan implements MongoAlg { * @param mongoEntity MongoDB table * @param projectRowType Fields and types to project; null to project raw row */ - protected MongoScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, MongoEntity mongoEntity, AlgDataType projectRowType ) { + protected MongoScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, MongoEntity mongoEntity, AlgDataType projectRowType ) { super( cluster, traitSet, table ); this.mongoEntity = mongoEntity; this.projectRowType = projectRowType; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index b477ce83b5..8323700fe0 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -87,10 +87,10 @@ import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingTable; +import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.processing.Processor; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -227,7 +227,7 @@ public class MqlToAlgConverter { private String defaultDatabase; private boolean notActive = false; private boolean usesDocumentModel; - private AlgOptTable entity; + private AlgOptEntity entity; private MqlQueryParameters parameters; @@ -329,10 +329,10 @@ public AlgRoot convert( MqlCollectionStatement query ) { } - private AlgOptTable getEntity( MqlCollectionStatement query, String dbSchemaName ) { + private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaName ) { List names = ImmutableList.of( dbSchemaName, query.getCollection() ); - PreparingTable table = catalogReader.getTable( names ); + PreparingEntity table = catalogReader.getTable( names ); if ( table == null ) { return catalogReader.getCollection( names ); @@ -341,12 +341,11 @@ private AlgOptTable getEntity( MqlCollectionStatement query, String dbSchemaName final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final Builder fieldInfo = typeFactory.builder(); - //fieldInfo.add( new AlgDataTypeFieldImpl( "_id", 0, typeFactory.createPolyType( PolyType.VARCHAR, 24 ) ) ); fieldInfo.add( new AlgDataTypeFieldImpl( "d", 0, typeFactory.createPolyType( PolyType.DOCUMENT ) ) ); AlgDataType rowType = fieldInfo.build(); CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTable().getTableId() ); - return AlgOptTableImpl.create( + return AlgOptEntityImpl.create( table.getRelOptSchema(), rowType, new TableEntryImpl( @@ -363,7 +362,7 @@ private AlgOptTable getEntity( MqlCollectionStatement query, String dbSchemaName /** * Starts converting a db.collection.update(); */ - private AlgNode convertUpdate( MqlUpdate query, AlgOptTable table, AlgNode node ) { + private AlgNode convertUpdate( MqlUpdate query, AlgOptEntity table, AlgNode node ) { if ( !query.getQuery().isEmpty() ) { node = convertQuery( query, table.getRowType(), node ); if ( query.isOnlyOne() ) { @@ -386,7 +385,7 @@ private AlgNode convertUpdate( MqlUpdate query, AlgOptTable table, AlgNode node * this method is implemented like the reduced update pipeline, * but in fact could be combined and therefore optimized a lot more */ - private AlgNode translateUpdate( MqlUpdate query, AlgDataType rowType, AlgNode node, AlgOptTable table ) { + private AlgNode translateUpdate( MqlUpdate query, AlgDataType rowType, AlgNode node, AlgOptEntity table ) { Map updates = new HashMap<>(); Map>> mergedUpdates = new HashMap<>(); mergedUpdates.put( UpdateOperation.REMOVE, new ArrayList<>() ); @@ -540,7 +539,7 @@ private void combineUpdate( Map>> me * @param table the active table * @return the unified UPDATE AlgNode */ - private AlgNode finalizeUpdates( String key, Map>> mergedUpdates, AlgDataType rowType, AlgNode node, AlgOptTable table ) { + private AlgNode finalizeUpdates( String key, Map>> mergedUpdates, AlgDataType rowType, AlgNode node, AlgOptEntity table ) { RexNode updateChain = getIdentifier( key, rowType ); // replace List> replaceNodes = mergedUpdates.get( UpdateOperation.REPLACE ); @@ -721,7 +720,7 @@ private Map translateCurrentDate( BsonDocument value, AlgDataTy /** * Starts translating an update pipeline */ - private AlgNode convertReducedPipeline( MqlUpdate query, AlgDataType rowType, AlgNode node, AlgOptTable table ) { + private AlgNode convertReducedPipeline( MqlUpdate query, AlgDataType rowType, AlgNode node, AlgOptEntity table ) { Map updates = new HashMap<>(); Map>> mergedUpdates = new HashMap<>(); mergedUpdates.put( UpdateOperation.REMOVE, new ArrayList<>() ); @@ -765,7 +764,7 @@ private AlgNode convertReducedPipeline( MqlUpdate query, AlgDataType rowType, Al /** * Translates a delete operation from its MqlNode format to the {@link AlgNode} form */ - private AlgNode convertDelete( MqlDelete query, AlgOptTable table, AlgNode node ) { + private AlgNode convertDelete( MqlDelete query, AlgOptEntity table, AlgNode node ) { if ( !query.getQuery().isEmpty() ) { node = convertQuery( query, table.getRowType(), node ); } @@ -789,7 +788,7 @@ private AlgNode convertDelete( MqlDelete query, AlgOptTable table, AlgNode node * @param table the table/collection into which the values are inserted * @return the modified AlgNode */ - private AlgNode convertInsert( MqlInsert query, AlgOptTable table ) { + private AlgNode convertInsert( MqlInsert query, AlgOptEntity table ) { return LogicalDocumentModify.create( table, convertMultipleValues( query.getValues(), table.getRowType() ), diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java index e94d7c4b61..aff22b21fe 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java @@ -44,8 +44,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -95,9 +95,9 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); - return new NeoScan( cluster, traitSet.replace( NeoConvention.INSTANCE ), algOptTable, this ); + return new NeoScan( cluster, traitSet.replace( NeoConvention.INSTANCE ), algOptEntity, this ); } @@ -119,7 +119,7 @@ public Collection getModifiableCollection() { @Override public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptTable table, + AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Operation operation, diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java index 70a468abe7..a840000c78 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java @@ -47,7 +47,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java index a259976b62..91fdbced41 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java @@ -57,7 +57,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexDynamicParam; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; @@ -83,7 +83,7 @@ public class NeoRelationalImplementor extends AlgShuttleImpl { private boolean isDml; @Getter - private AlgOptTable table; + private AlgOptEntity table; @Getter private NeoEntity entity; @@ -109,7 +109,7 @@ public void addAll( List statements ) { } - public void setTable( AlgOptTable table ) { + public void setTable( AlgOptEntity table ) { this.table = table; this.entity = (NeoEntity) table.getTable(); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java index 89cfb73945..16793a3353 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java @@ -22,7 +22,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; @@ -47,7 +47,7 @@ public class NeoModify extends Modify implements NeoRelAlg { * @param sourceExpressionList List of value expressions to be set (e.g. exp1, exp2); null if not UPDATE * @param flattened Whether set flattens the input row type */ - public NeoModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, CatalogReader catalogReader, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public NeoModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traitSet, table, catalogReader, input, operation, updateColumnList, sourceExpressionList, flattened ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java index f58c37d418..d82853e003 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java @@ -34,7 +34,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; public class NeoScan extends Scan implements NeoRelAlg { @@ -43,7 +43,7 @@ public class NeoScan extends Scan implements NeoRelAlg { private final NeoEntity neoEntity; - public NeoScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, NeoEntity neoEntity ) { + public NeoScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, NeoEntity neoEntity ) { super( cluster, traitSet, table ); this.neoEntity = neoEntity; } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java index 6f1c4db8e0..17556c53d1 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java @@ -44,7 +44,7 @@ import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.ImmutableBitSet; @@ -96,7 +96,7 @@ private String getPigAggregateStatement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptTable getTable() { + public AlgOptEntity getTable() { return getInput().getTable(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlgFactories.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlgFactories.java index 389ffbd106..530dca3ad9 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlgFactories.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlgFactories.java @@ -44,7 +44,7 @@ import org.polypheny.db.algebra.core.CorrelationId; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Context; import org.polypheny.db.plan.Contexts; import org.polypheny.db.rex.RexNode; @@ -73,7 +73,7 @@ public static class PigScanFactory implements ScanFactory { @Override - public AlgNode createScan( AlgOptCluster cluster, AlgOptTable table ) { + public AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ) { return new PigScan( cluster, cluster.traitSetOf( PigAlg.CONVENTION ), table ); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java index 27943a51ae..ce335eb42d 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java @@ -43,7 +43,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Filter; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexCall; @@ -83,7 +83,7 @@ public void implement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptTable getTable() { + public AlgOptEntity getTable() { return getInput().getTable(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java index 0c3a7fde49..1d50debc17 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java @@ -42,7 +42,7 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexCall; @@ -82,7 +82,7 @@ public void implement( Implementor implementor ) { * The Pig alias of the joined relation will have the same name as one from the left side of the join. */ @Override - public AlgOptTable getTable() { + public AlgOptEntity getTable() { return getLeft().getTable(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java index 9814aa3c0e..35a4bdc323 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; @@ -74,7 +74,7 @@ public void implement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptTable getTable() { + public AlgOptEntity getTable() { return getInput().getTable(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigRules.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigRules.java index 837152c3fc..69c31cc524 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigRules.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigRules.java @@ -42,7 +42,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -90,7 +90,7 @@ public AlgNode convert( AlgNode alg ) { /** - * Rule to convert a {@link LogicalScan} to a {@link PigScan}. + * Rule to convert a {@link LogicalRelScan} to a {@link PigScan}. */ private static class PigScanRule extends ConverterRule { @@ -98,13 +98,13 @@ private static class PigScanRule extends ConverterRule { private PigScanRule() { - super( LogicalScan.class, Convention.NONE, PigAlg.CONVENTION, "PigScanRule" ); + super( LogicalRelScan.class, Convention.NONE, PigAlg.CONVENTION, "PigScanRule" ); } @Override public AlgNode convert( AlgNode alg ) { - final LogicalScan scan = (LogicalScan) alg; + final LogicalRelScan scan = (LogicalRelScan) alg; final AlgTraitSet traitSet = scan.getTraitSet().replace( PigAlg.CONVENTION ); return new PigScan( alg.getCluster(), traitSet, scan.getTable() ); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java index 21da4fb4ad..45de8ab15c 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java @@ -42,9 +42,9 @@ import org.polypheny.db.algebra.rules.AggregateExpandDistinctAggregatesRule; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.PolyphenyDbSchema; @@ -57,7 +57,7 @@ public class PigScan extends Scan implements PigAlg { /** * Creates a PigScan. */ - public PigScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table ) { + public PigScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table ) { super( cluster, traitSet, table ); assert getConvention() == CONVENTION; } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigTable.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigTable.java index de0fa45c7d..5f867bf04d 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigTable.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigTable.java @@ -39,8 +39,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.TranslatableTable; import org.polypheny.db.schema.impl.AbstractTable; @@ -89,9 +89,9 @@ public String getFilePath() { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); - return new PigScan( cluster, cluster.traitSetOf( PigAlg.CONVENTION ), algOptTable ); + return new PigScan( cluster, cluster.traitSetOf( PigAlg.CONVENTION ), algOptEntity ); } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 9d3c2e064b..1d6a26d681 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -36,9 +36,9 @@ import org.polypheny.db.catalog.snapshot.logical.LogicalFullSnapshot; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingTable; +import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.util.Moniker; @@ -187,19 +187,19 @@ public PolyphenyDbSchema getRootSchema() { @Override - public PreparingTable getTableForMember( List names ) { + public PreparingEntity getTableForMember( List names ) { return null; } @Override - public PreparingTable getTable( List names ) { + public PreparingEntity getTable( List names ) { return null; } @Override - public AlgOptTable getCollection( List names ) { + public AlgOptEntity getCollection( List names ) { return null; } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 6c8a129196..96cf751c64 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -55,7 +55,7 @@ import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingTable; +import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.restapi.RequestParser.Filters; import org.polypheny.db.restapi.exception.RestException; import org.polypheny.db.restapi.models.requests.ResourceDeleteRequest; @@ -156,7 +156,7 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi RexBuilder rexBuilder = new RexBuilder( typeFactory ); PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getCatalogReader(); - PreparingTable table = catalogReader.getTable( Arrays.asList( resourcePatchRequest.tables.get( 0 ).getNamespaceName(), resourcePatchRequest.tables.get( 0 ).name ) ); + PreparingEntity table = catalogReader.getTable( Arrays.asList( resourcePatchRequest.tables.get( 0 ).getNamespaceName(), resourcePatchRequest.tables.get( 0 ).name ) ); // Table Scans algBuilder = this.tableScans( algBuilder, rexBuilder, resourcePatchRequest.tables ); @@ -215,7 +215,7 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, RexBuilder rexBuilder = new RexBuilder( typeFactory ); PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getCatalogReader(); - PreparingTable table = catalogReader.getTable( Arrays.asList( resourceDeleteRequest.tables.get( 0 ).getNamespaceName(), resourceDeleteRequest.tables.get( 0 ).name ) ); + PreparingEntity table = catalogReader.getTable( Arrays.asList( resourceDeleteRequest.tables.get( 0 ).getNamespaceName(), resourceDeleteRequest.tables.get( 0 ).name ) ); // Table Scans algBuilder = this.tableScans( algBuilder, rexBuilder, resourceDeleteRequest.tables ); @@ -268,7 +268,7 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final RexBuilder rexBuilder = new RexBuilder( typeFactory ); PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getCatalogReader(); - PreparingTable table = catalogReader.getTable( Arrays.asList( insertValueRequest.tables.get( 0 ).getNamespaceName(), insertValueRequest.tables.get( 0 ).name ) ); + PreparingEntity table = catalogReader.getTable( Arrays.asList( insertValueRequest.tables.get( 0 ).getNamespaceName(), insertValueRequest.tables.get( 0 ).name ) ); // Values AlgDataType tableRowType = table.getRowType(); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index dcbc42ed10..1d30737759 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -32,7 +32,7 @@ import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.validate.ValidatorTable; -import org.polypheny.db.prepare.Prepare; +import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.CustomColumnResolvingTable; import org.polypheny.db.schema.Table; import org.polypheny.db.sql.language.SqlCall; @@ -101,8 +101,8 @@ void resolveInNamespace( SqlValidatorNamespace ns, boolean nullable, List>> entries = ((CustomColumnResolvingTable) t).resolveColumn( rowType, validator.getTypeFactory(), names ); for ( Pair> entry : entries ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index da1425f323..e469f6c5af 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -30,8 +30,8 @@ import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.prepare.AlgOptTableImpl; -import org.polypheny.db.prepare.Prepare; +import org.polypheny.db.prepare.AlgOptEntityImpl; +import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Table; import org.polypheny.db.schema.Wrapper; @@ -152,12 +152,12 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); ValidatorTable table2 = null; if ( table instanceof Wrapper ) { - table2 = ((Wrapper) table).unwrap( Prepare.PreparingTable.class ); + table2 = ((Wrapper) table).unwrap( PreparingEntity.class ); } if ( table2 == null ) { final AlgOptSchema algOptSchema = validator.catalogReader.unwrap( AlgOptSchema.class ); final AlgDataType rowType = table.getRowType( validator.typeFactory ); - table2 = AlgOptTableImpl.create( algOptSchema, rowType, entry, catalogTable, null ); + table2 = AlgOptEntityImpl.create( algOptSchema, rowType, entry, catalogTable, null ); } namespace = new TableNamespace( validator, table2 ); resolved.found( namespace, false, null, path, remainingNames ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index aa359f75c3..dfe85a0e98 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -88,7 +88,7 @@ import org.polypheny.db.nodes.validate.ValidatorNamespace; import org.polypheny.db.nodes.validate.ValidatorScope; import org.polypheny.db.nodes.validate.ValidatorTable; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; @@ -4059,7 +4059,7 @@ protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList tar } } final Set assignedFields = new HashSet<>(); - final AlgOptTable algOptTable = table instanceof AlgOptTable ? ((AlgOptTable) table) : null; + final AlgOptEntity algOptEntity = table instanceof AlgOptEntity ? ((AlgOptEntity) table) : null; for ( Node node : targetColumnList ) { SqlIdentifier id = (SqlIdentifier) node; AlgDataTypeField targetField = @@ -4068,7 +4068,7 @@ protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList tar typeFactory, id, catalogReader, - algOptTable, + algOptEntity, allowDynamic ); if ( targetField == null ) { @@ -4090,16 +4090,16 @@ protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList tar public void validateInsert( SqlInsert insert ) { final SqlValidatorNamespace targetNamespace = getSqlNamespace( insert ); validateNamespace( targetNamespace, unknownType ); - final AlgOptTable algOptTable = + final AlgOptEntity algOptEntity = SqlValidatorUtil.getAlgOptTable( targetNamespace, catalogReader.unwrap( Prepare.CatalogReader.class ), null, null ); final ValidatorTable table = - algOptTable == null + algOptEntity == null ? targetNamespace.getTable() - : algOptTable.unwrap( ValidatorTable.class ); + : algOptEntity.unwrap( ValidatorTable.class ); boolean allowDynamic = false; if ( insert.getSchemaType() == NamespaceType.DOCUMENT ) { @@ -4164,7 +4164,7 @@ public RexNode convertExpression( Node e ) { throw new UnsupportedOperationException(); } }; - final List strategies = table.unwrap( AlgOptTable.class ).getColumnStrategies(); + final List strategies = table.unwrap( AlgOptEntity.class ).getColumnStrategies(); for ( final AlgDataTypeField field : table.getRowType().getFieldList() ) { final AlgDataTypeField targetField = logicalTargetRowType.getField( field.getName(), true, false ); switch ( strategies.get( field.getIndex() ) ) { @@ -4374,16 +4374,16 @@ public void validateDelete( SqlDelete call ) { public void validateUpdate( SqlUpdate call ) { final SqlValidatorNamespace targetNamespace = getSqlNamespace( call ); validateNamespace( targetNamespace, unknownType ); - final AlgOptTable algOptTable = + final AlgOptEntity algOptEntity = SqlValidatorUtil.getAlgOptTable( targetNamespace, catalogReader.unwrap( Prepare.CatalogReader.class ), null, null ); final ValidatorTable table = - algOptTable == null + algOptEntity == null ? targetNamespace.getTable() - : algOptTable.unwrap( ValidatorTable.class ); + : algOptEntity.unwrap( ValidatorTable.class ); final AlgDataType targetRowType = createTargetRowType( diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 75cd97a235..e944add688 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -45,7 +45,7 @@ import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptSchemaWithSampling; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; import org.polypheny.db.schema.CustomColumnResolvingTable; @@ -81,7 +81,7 @@ private SqlValidatorUtil() { /** - * Converts a {@link SqlValidatorScope} into a {@link AlgOptTable}. This is only possible if the scope represents an identifier, such as "sales.emp". + * Converts a {@link SqlValidatorScope} into a {@link AlgOptEntity}. This is only possible if the scope represents an identifier, such as "sales.emp". * Otherwise, returns null. * * @param namespace Namespace @@ -89,7 +89,7 @@ private SqlValidatorUtil() { * @param datasetName Name of sample dataset to substitute, or null to use the regular table * @param usedDataset Output parameter which is set to true if a sample dataset is found; may be null */ - public static AlgOptTable getAlgOptTable( SqlValidatorNamespace namespace, Prepare.CatalogReader catalogReader, String datasetName, boolean[] usedDataset ) { + public static AlgOptEntity getAlgOptTable( SqlValidatorNamespace namespace, Prepare.CatalogReader catalogReader, String datasetName, boolean[] usedDataset ) { if ( namespace.isWrapperFor( TableNamespace.class ) ) { final TableNamespace tableNamespace = namespace.unwrap( TableNamespace.class ); return getAlgOptTable( tableNamespace, catalogReader, datasetName, usedDataset, tableNamespace.extendedFields ); @@ -111,9 +111,9 @@ public static AlgOptTable getAlgOptTable( SqlValidatorNamespace namespace, Prepa } - private static AlgOptTable getAlgOptTable( TableNamespace tableNamespace, Prepare.CatalogReader catalogReader, String datasetName, boolean[] usedDataset, List extendedFields ) { + private static AlgOptEntity getAlgOptTable( TableNamespace tableNamespace, Prepare.CatalogReader catalogReader, String datasetName, boolean[] usedDataset, List extendedFields ) { final List names = tableNamespace.getTable().getQualifiedName(); - AlgOptTable table; + AlgOptEntity table; if ( datasetName != null && catalogReader instanceof AlgOptSchemaWithSampling ) { final AlgOptSchemaWithSampling reader = (AlgOptSchemaWithSampling) catalogReader; table = reader.getTableForMember( names, datasetName, usedDataset ); @@ -267,7 +267,7 @@ public static SqlValidatorWithHints newValidator( OperatorTable opTab, Validator } - public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, AlgOptTable table ) { + public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, AlgOptEntity table ) { return getTargetField( rowType, typeFactory, id, catalogReader, table, false ); } @@ -280,7 +280,7 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF * @param table the target table or null if it is not a RelOptTable instance * @return the target field or null if the name cannot be resolved */ - public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, AlgOptTable table, boolean isDocument ) { + public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, AlgOptEntity table, boolean isDocument ) { final Table t = table == null ? null : table.unwrap( Table.class ); if ( !(t instanceof CustomColumnResolvingTable) ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java index 455ea1ea03..201884de74 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java @@ -26,7 +26,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.nodes.validate.ValidatorTable; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.schema.ExtensibleTable; import org.polypheny.db.schema.Table; import org.polypheny.db.sql.language.SqlIdentifier; @@ -106,10 +106,10 @@ public TableNamespace extend( SqlNodeList extendList ) { builder.addAll( SqlValidatorUtil.getExtendedColumns( validator.getTypeFactory(), getTable(), extendList ) ); final List extendedFields = builder.build(); final Table schemaTable = table.unwrap( Table.class ); - if ( schemaTable != null && table instanceof AlgOptTable && schemaTable instanceof ExtensibleTable ) { + if ( schemaTable != null && table instanceof AlgOptEntity && schemaTable instanceof ExtensibleTable ) { checkExtendedColumnTypes( extendList ); - final AlgOptTable algOptTable = ((AlgOptTable) table).extend( extendedFields ); - final ValidatorTable validatorTable = algOptTable.unwrap( ValidatorTable.class ); + final AlgOptEntity algOptEntity = ((AlgOptEntity) table).extend( extendedFields ); + final ValidatorTable validatorTable = algOptEntity.unwrap( ValidatorTable.class ); return new TableNamespace( validator, validatorTable, ImmutableList.of() ); } return new TableNamespace( validator, table, extendedFields ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 8a96c002d5..b846cbd369 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -105,7 +105,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.algebra.logical.relational.LogicalUnion; @@ -121,6 +121,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.languages.NodeToAlgConverter; @@ -140,12 +141,12 @@ import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptSamplingParameters; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; +import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.AlgOptTableImpl; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -2119,7 +2120,7 @@ private void convertIdentifier( Blackboard bb, SqlIdentifier id, SqlNodeList ext } final String datasetName = datasetStack.isEmpty() ? null : datasetStack.peek(); final boolean[] usedDataset = { false }; - AlgOptTable table = SqlValidatorUtil.getAlgOptTable( fromNamespace, catalogReader, datasetName, usedDataset ); + AlgOptEntity table = SqlValidatorUtil.getAlgOptTable( fromNamespace, catalogReader, datasetName, usedDataset ); if ( extendedColumns != null && extendedColumns.size() > 0 ) { assert table != null; final ValidatorTable validatorTable = table.unwrap( ValidatorTable.class ); @@ -2129,10 +2130,10 @@ private void convertIdentifier( Blackboard bb, SqlIdentifier id, SqlNodeList ext final AlgNode tableRel; if ( config.isConvertTableAccess() ) { tableRel = toAlg( table ); - } else if ( table instanceof AlgOptTableImpl && (((AlgOptTableImpl) table).getTable()) instanceof LogicalRelView ) { + } else if ( table instanceof AlgOptEntityImpl && table.getCatalogEntity() != null && table.getCatalogEntity().entityType == EntityType.VIEW ) { tableRel = LogicalRelViewScan.create( cluster, table ); } else { - tableRel = LogicalScan.create( cluster, table ); + tableRel = LogicalRelScan.create( cluster, table ); } bb.setRoot( tableRel, true ); if ( usedDataset[0] ) { @@ -2163,8 +2164,8 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { final TranslatableTable table = udf.getTable( typeFactory, callBinding.sqlOperands() ); final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); final AlgDataType rowType = table.getRowType( typeFactory ); - AlgOptTable algOptTable = AlgOptTableImpl.create( null, rowType, table, catalogTable, udf.getNameAsId().names ); - AlgNode converted = toAlg( algOptTable ); + AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, udf.getNameAsId().names ); + AlgNode converted = toAlg( algOptEntity ); bb.setRoot( converted, true ); return; } @@ -2863,7 +2864,7 @@ private boolean all( SqlCall call ) { protected AlgNode convertInsert( SqlInsert call ) { - AlgOptTable targetTable = getTargetTable( call ); + AlgOptEntity targetTable = getTargetTable( call ); final AlgDataType targetRowType = validator.getValidatedNodeType( call ); assert targetRowType != null; @@ -2877,7 +2878,7 @@ protected AlgNode convertInsert( SqlInsert call ) { /** * Creates a relational expression to modify a table or modifiable view. */ - private AlgNode createModify( AlgOptTable targetTable, AlgNode source ) { + private AlgNode createModify( AlgOptEntity targetTable, AlgNode source ) { final ModifiableTable modifiableTable = targetTable.unwrap( ModifiableTable.class ); if ( modifiableTable != null && modifiableTable == targetTable.unwrap( Table.class ) ) { return modifiableTable.toModificationAlg( @@ -2906,7 +2907,7 @@ private ToAlgContext createToRelContext() { } - public AlgNode toAlg( final AlgOptTable table ) { + public AlgNode toAlg( final AlgOptEntity table ) { final AlgNode scan = table.toAlg( createToRelContext(), cluster.traitSet() ); final InitializerExpressionFactory ief = @@ -2930,7 +2931,7 @@ public AlgNode toAlg( final AlgOptTable table ) { ++virtualCount; break; default: - list.add( rexBuilder.makeInputRef( scan, AlgOptTableImpl.realOrdinal( table, f.getIndex() ) ) ); + list.add( rexBuilder.makeInputRef( scan, AlgOptEntityImpl.realOrdinal( table, f.getIndex() ) ) ); } } if ( virtualCount > 0 ) { @@ -2942,7 +2943,7 @@ public AlgNode toAlg( final AlgOptTable table ) { } - protected AlgOptTable getTargetTable( SqlNode call ) { + protected AlgOptEntity getTargetTable( SqlNode call ) { final SqlValidatorNamespace targetNs = validator.getSqlNamespace( call ); if ( targetNs.isWrapperFor( SqlValidatorImpl.DmlNamespace.class ) ) { final SqlValidatorImpl.DmlNamespace dmlNamespace = targetNs.unwrap( SqlValidatorImpl.DmlNamespace.class ); @@ -2973,8 +2974,8 @@ protected AlgNode convertColumnList( final SqlInsert call, AlgNode source ) { final List columnExprs = new ArrayList<>(); collectInsertTargets( call, sourceRef, targetColumnNames, columnExprs ); - final AlgOptTable targetTable = getTargetTable( call ); - final AlgDataType targetRowType = AlgOptTableImpl.realRowType( targetTable ); + final AlgOptEntity targetTable = getTargetTable( call ); + final AlgDataType targetRowType = AlgOptEntityImpl.realRowType( targetTable ); final List targetFields = targetRowType.getFieldList(); boolean isDocument = call.getSchemaType() == NamespaceType.DOCUMENT; @@ -3024,7 +3025,7 @@ protected AlgNode convertColumnList( final SqlInsert call, AlgNode source ) { /** * Creates a blackboard for translating the expressions of generated columns in an INSERT statement. */ - private Blackboard createInsertBlackboard( AlgOptTable targetTable, RexNode sourceRef, List targetColumnNames ) { + private Blackboard createInsertBlackboard( AlgOptEntity targetTable, RexNode sourceRef, List targetColumnNames ) { final Map nameToNodeMap = new HashMap<>(); int j = 0; @@ -3083,7 +3084,7 @@ private RexNode castNullLiteralIfNeeded( RexNode node, AlgDataType type ) { * @param columnExprs List of expressions, to be populated */ protected void collectInsertTargets( SqlInsert call, final RexNode sourceRef, final List targetColumnNames, List columnExprs ) { - final AlgOptTable targetTable = getTargetTable( call ); + final AlgOptEntity targetTable = getTargetTable( call ); final AlgDataType tableRowType = targetTable.getRowType(); SqlNodeList targetColumnList = call.getTargetColumnList(); if ( targetColumnList == null ) { @@ -3156,7 +3157,7 @@ protected void collectInsertTargets( SqlInsert call, final RexNode sourceRef, fi private AlgNode convertDelete( SqlDelete call ) { - AlgOptTable targetTable = getTargetTable( call ); + AlgOptEntity targetTable = getTargetTable( call ); AlgNode sourceRel = convertSelect( call.getSourceSelect(), false ); return LogicalModify.create( targetTable, @@ -3179,7 +3180,7 @@ private AlgNode convertUpdate( SqlUpdate call ) { rexNodeSourceExpressionListBuilder.add( rn ); } - AlgOptTable targetTable = getTargetTable( call ); + AlgOptEntity targetTable = getTargetTable( call ); // convert update column list from SqlIdentifier to String final List targetColumnNameList = new ArrayList<>(); @@ -3205,7 +3206,7 @@ private AlgNode convertUpdate( SqlUpdate call ) { private AlgNode convertMerge( SqlMerge call ) { - AlgOptTable targetTable = getTargetTable( call ); + AlgOptEntity targetTable = getTargetTable( call ); // convert update column list from SqlIdentifier to String final List targetColumnNameList = new ArrayList<>(); diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java index 17811b9ab9..b4f8a76fb8 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java @@ -37,7 +37,7 @@ import org.polypheny.db.algebra.externalize.AlgJsonWriter; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalFilter; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; @@ -104,8 +104,8 @@ public void testWriter() { String s = Frameworks.withPlanner( ( cluster, algOptSchema, rootSchema ) -> { rootSchema.add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); - LogicalScan scan = - LogicalScan.create( + LogicalRelScan scan = + LogicalRelScan.create( cluster, algOptSchema.getTableForMember( Arrays.asList( "hr", "emps" ) ) ); final RexBuilder rexBuilder = cluster.getRexBuilder(); diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java index 37e5865763..f724be7c6e 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java @@ -59,11 +59,11 @@ import org.polypheny.db.languages.Parser; import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.nodes.Node; -import org.polypheny.db.plan.AlgOptAbstractTable; +import org.polypheny.db.plan.AlgOptAbstractEntity; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitDef; import org.polypheny.db.plan.AlgTraitSet; @@ -125,7 +125,7 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { }; // "SELECT * FROM myTable" - final AlgOptAbstractTable algOptTable = new AlgOptAbstractTable( algOptSchema, "myTable", table.getRowType( typeFactory ) ) { + final AlgOptAbstractEntity algOptTable = new AlgOptAbstractEntity( algOptSchema, "myTable", table.getRowType( typeFactory ) ) { }; final EnumerableScan tableRel = EnumerableScan.create( cluster, algOptTable ); @@ -413,7 +413,7 @@ public Collection getModifiableCollection() { @Override - public Modify toModificationAlg( AlgOptCluster cluster, AlgOptTable table, Prepare.CatalogReader catalogReader, AlgNode child, Modify.Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public Modify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode child, Modify.Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { return LogicalModify.create( table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java index bbb9dadf3b..98128cb624 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.constant.ConformanceEnum; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.core.AlgFactories; -import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -59,7 +59,7 @@ import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgOptSchemaWithSampling; -import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Context; @@ -278,7 +278,7 @@ public MockRelOptSchema( ValidatorCatalogReader catalogReader, AlgDataTypeFactor @Override - public AlgOptTable getTableForMember( List names ) { + public AlgOptEntity getTableForMember( List names ) { final ValidatorTable table = catalogReader.getTable( names ); final AlgDataType rowType = table.getRowType(); final List collationList = deduceMonotonicity( table ); @@ -318,12 +318,12 @@ private List deduceMonotonicity( ValidatorTable table ) { @Override - public AlgOptTable getTableForMember( List names, final String datasetName, boolean[] usedDataset ) { - final AlgOptTable table = getTableForMember( names ); + public AlgOptEntity getTableForMember( List names, final String datasetName, boolean[] usedDataset ) { + final AlgOptEntity table = getTableForMember( names ); // If they're asking for a sample, just for test purposes, assume there's a table called ":". - AlgOptTable datasetTable = - new DelegatingRelOptTable( table ) { + AlgOptEntity datasetTable = + new DelegatingRelOptEntity( table ) { @Override public List getQualifiedName() { final List list = new ArrayList<>( super.getQualifiedName() ); @@ -349,7 +349,7 @@ protected MockColumnSet createColumnSet( ValidatorTable table, List name /** * Mock column set. */ - protected class MockColumnSet implements AlgOptTable { + protected class MockColumnSet implements AlgOptEntity { private final List names; private final AlgDataType rowType; @@ -403,7 +403,7 @@ public AlgOptSchema getRelOptSchema() { @Override public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { - return LogicalScan.create( context.getCluster(), this ); + return LogicalRelScan.create( context.getCluster(), this ); } @@ -444,7 +444,7 @@ public Expression getExpression( Class clazz ) { @Override - public AlgOptTable extend( List extendedFields ) { + public AlgOptEntity extend( List extendedFields ) { final AlgDataType extendedRowType = AlgDataTypeFactory.DEFAULT.builder() .addAll( rowType.getFieldList() ) .addAll( extendedFields ) @@ -460,12 +460,12 @@ public AlgOptTable extend( List extendedFields ) { /** * Table that delegates to a given table. */ - private static class DelegatingRelOptTable implements AlgOptTable { + private static class DelegatingRelOptEntity implements AlgOptEntity { - private final AlgOptTable parent; + private final AlgOptEntity parent; - DelegatingRelOptTable( AlgOptTable parent ) { + DelegatingRelOptEntity( AlgOptEntity parent ) { this.parent = parent; } @@ -486,7 +486,7 @@ public Expression getExpression( Class clazz ) { @Override - public AlgOptTable extend( List extendedFields ) { + public AlgOptEntity extend( List extendedFields ) { return parent.extend( extendedFields ); } @@ -517,7 +517,7 @@ public AlgOptSchema getRelOptSchema() { @Override public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { - return LogicalScan.create( context.getCluster(), this ); + return LogicalRelScan.create( context.getCluster(), this ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java index 974a3b7a4a..da056ac0d7 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java @@ -71,7 +71,7 @@ import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.PolyphenyDbServerStatement; -import org.polypheny.db.plan.AlgOptAbstractTable; +import org.polypheny.db.plan.AlgOptAbstractEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptPlanner; @@ -157,7 +157,7 @@ public Statistic getStatistic() { } }; - final AlgOptAbstractTable t1 = new AlgOptAbstractTable( algOptSchema, "t1", table.getRowType( typeFactory ) ) { + final AlgOptAbstractEntity t1 = new AlgOptAbstractEntity( algOptSchema, "t1", table.getRowType( typeFactory ) ) { @Override public CatalogTable getCatalogTable() { return null; From 9b97f3dea0527696ff2bf7276dbffffb1685d14a Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 21 Feb 2023 16:42:02 +0100 Subject: [PATCH 015/436] ids everywhere --- .gitignore | 2 +- .../org/polypheny/db/adapter/Adapter.java | 14 +- .../enumerable/EnumerableInterpreter.java | 8 +- .../db/adapter/enumerable/EnumerableScan.java | 46 +- .../enumerable/EnumerableScanRule.java | 6 +- .../EnumerableTableFunctionScan.java | 4 +- .../enumerable/EnumerableTableModify.java | 8 +- .../enumerable/EnumerableTableModifyRule.java | 4 +- ...able.java => AbstractQueryableEntity.java} | 11 +- .../db/adapter/java/ReflectiveSchema.java | 60 +- .../db/algebra/core/AlgFactories.java | 6 +- .../relational/RelationalTransformable.java | 4 +- .../db/algebra/externalize/AlgJsonReader.java | 6 +- .../db/algebra/logical/lpg/LogicalGraph.java | 18 +- .../logical/relational/LogicalRelScan.java | 8 +- .../relational/LogicalRelViewScan.java | 8 +- .../db/algebra/rules/FilterScanRule.java | 10 +- .../db/algebra/rules/ProjectScanRule.java | 8 +- .../db/algebra/stream/StreamRules.java | 20 +- .../polypheny/db/interpreter/Bindables.java | 26 +- .../polypheny/db/interpreter/ScanNode.java | 26 +- .../org/polypheny/db/plan/AlgOptEntity.java | 4 +- .../db/prepare/AlgOptEntityImpl.java | 120 +- .../db/prepare/PolyphenyDbCatalogReader.java | 22 +- .../org/polypheny/db/prepare/Prepare.java | 18 +- .../db/prepare/QueryableAlgBuilder.java | 12 +- .../db/schema/AbstractPolyphenyDbSchema.java | 89 +- ....java => CustomColumnResolvingEntity.java} | 4 +- .../db/schema/{Table.java => Entity.java} | 19 +- ...nsibleTable.java => ExtensibleEntity.java} | 10 +- ...erableTable.java => FilterableEntity.java} | 4 +- .../org/polypheny/db/schema/Function.java | 2 +- .../db/schema/LogicalCollection.java | 2 +- .../{LogicalTable.java => LogicalEntity.java} | 9 +- .../polypheny/db/schema/LogicalRelView.java | 2 +- .../polypheny/db/schema/LogicalSchema.java | 24 +- .../db/schema/ModifiableCollection.java | 2 +- ...fiableTable.java => ModifiableEntity.java} | 5 +- .../db/schema/{Schema.java => Namespace.java} | 264 +- .../java/org/polypheny/db/schema/Path.java | 4 +- .../db/schema/PolyphenyDbSchema.java | 29 +- ....java => ProjectableFilterableEntity.java} | 6 +- ...eryableTable.java => QueryableEntity.java} | 4 +- ...annableTable.java => ScannableEntity.java} | 2 +- .../org/polypheny/db/schema/SchemaPlus.java | 13 +- .../polypheny/db/schema/SchemaVersion.java | 2 +- .../java/org/polypheny/db/schema/Schemas.java | 76 +- .../db/schema/SimplePolyphenyDbSchema.java | 56 +- .../org/polypheny/db/schema/Statistic.java | 2 +- ...amableTable.java => StreamableEntity.java} | 4 +- .../org/polypheny/db/schema/TableFactory.java | 4 +- .../org/polypheny/db/schema/TableMacro.java | 4 +- .../org/polypheny/db/schema/TableType.java | 241 ++ ...ableTable.java => TranslatableEntity.java} | 4 +- ...AbstractTable.java => AbstractEntity.java} | 21 +- ...ractSchema.java => AbstractNamespace.java} | 36 +- .../schema/impl/AbstractTableQueryable.java | 12 +- ...ngSchema.java => DelegatingNamespace.java} | 56 +- .../db/schema/impl/TableFunctionImpl.java | 26 +- .../db/schema/impl/TableMacroImpl.java | 8 +- .../org/polypheny/db/util/BuiltInMethod.java | 109 +- .../org/polypheny/db/util/ValidatorUtil.java | 4 +- .../db/runtime/PolyphenyDbResource.properties | 130 +- .../db/catalog/MockCatalogReader.java | 87 +- .../db/catalog/MockCatalogReaderDocument.java | 2 +- .../db/catalog/MockCatalogReaderDynamic.java | 2 +- .../db/catalog/MockCatalogReaderExtended.java | 2 +- .../db/catalog/MockCatalogReaderSimple.java | 6 +- .../org/polypheny/db/plan/RelOptUtilTest.java | 2 +- .../db/schemas/HrClusteredSchema.java | 29 +- .../java/org/polypheny/db/test/JdbcTest.java | 10 +- ...ableTest.java => ScannableEntityTest.java} | 30 +- .../org/polypheny/db/test/HepPlannerTest.xml | 68 +- .../org/polypheny/db/test/RelOptRulesTest.xml | 2216 ++++++++--------- .../db/test/SqlToRelConverterTest.xml | 976 ++++---- core/src/test/resources/sql/agg.iq | 86 +- core/src/test/resources/sql/blank.iq | 26 +- core/src/test/resources/sql/join.iq | 24 +- core/src/test/resources/sql/lateral.iq | 10 +- core/src/test/resources/sql/misc.iq | 84 +- core/src/test/resources/sql/schema.iq | 64 +- core/src/test/resources/sql/sequence.iq | 4 +- core/src/test/resources/sql/some.iq | 4 +- core/src/test/resources/sql/sort.iq | 12 +- core/src/test/resources/sql/sub-query.iq | 170 +- core/src/test/resources/sql/table.iq | 36 +- core/src/test/resources/sql/table_as.iq | 56 +- core/src/test/resources/sql/type.iq | 14 +- core/src/test/resources/sql/view.iq | 6 +- .../db/processing/DataMigratorImpl.java | 8 +- .../db/routing/routers/AbstractDqlRouter.java | 10 +- .../db/routing/routers/DmlRouterImpl.java | 14 +- .../routers/FullPlacementQueryRouter.java | 6 +- .../db/routing/routers/IcarusRouter.java | 6 +- .../db/routing/routers/SimpleRouter.java | 6 +- .../db/schema/PolySchemaBuilder.java | 73 +- .../db/transaction/EntityAccessMap.java | 6 +- .../org/polypheny/db/misc/AlgBuilderTest.java | 25 +- .../statistics/StatisticsManagerImpl.java | 3 +- ...ontailTable.java => CottontailEntity.java} | 21 +- .../adapter/cottontail/CottontailPlugin.java | 14 +- .../adapter/cottontail/CottontailSchema.java | 25 +- .../cottontail/algebra/CottontailAlg.java | 4 +- .../cottontail/algebra/CottontailScan.java | 6 +- .../algebra/CottontailTableModify.java | 6 +- .../CottontailTableModificationRule.java | 12 +- .../db/adapter/csv/CsvFilterableTable.java | 11 +- .../db/adapter/csv/CsvScannableTable.java | 11 +- .../polypheny/db/adapter/csv/CsvSchema.java | 34 +- .../polypheny/db/adapter/csv/CsvSource.java | 12 +- .../polypheny/db/adapter/csv/CsvTable.java | 12 +- .../db/adapter/csv/CsvTranslatableTable.java | 6 +- .../db/adapter/druid/DruidConnectionImpl.java | 2 +- .../db/adapter/druid/DruidDateTimeUtils.java | 6 +- .../{DruidTable.java => DruidEntity.java} | 24 +- .../db/adapter/druid/DruidExpressions.java | 2 +- .../db/adapter/druid/DruidQuery.java | 18 +- .../db/adapter/druid/DruidSchema.java | 20 +- .../db/adapter/druid/DruidTableFactory.java | 10 +- .../druid/ExtractionDimensionSpec.java | 2 +- .../adapter/druid/DruidQueryFilterTest.java | 4 +- ...rchTable.java => ElasticsearchEntity.java} | 16 +- .../elasticsearch/ElasticsearchMethod.java | 2 +- .../elasticsearch/ElasticsearchRel.java | 2 +- .../elasticsearch/ElasticsearchScan.java | 4 +- .../elasticsearch/ElasticsearchSchema.java | 29 +- .../ElasticsearchToEnumerableConverter.java | 2 +- .../polypheny/db/adapter/file/FileAlg.java | 4 +- .../polypheny/db/adapter/file/FilePlugin.java | 14 +- .../db/adapter/file/FileStoreSchema.java | 19 +- ...Table.java => FileTranslatableEntity.java} | 17 +- .../db/adapter/file/algebra/FileRules.java | 10 +- .../db/adapter/file/algebra/FileScan.java | 6 +- .../adapter/file/algebra/FileTableModify.java | 4 +- .../polypheny/db/adapter/file/source/Qfs.java | 12 +- .../db/adapter/file/source/QfsSchema.java | 21 +- .../db/adapter/geode/algebra/GeodeAlg.java | 2 +- .../{GeodeTable.java => GeodeEntity.java} | 16 +- .../db/adapter/geode/algebra/GeodeScan.java | 4 +- .../db/adapter/geode/algebra/GeodeSchema.java | 25 +- .../algebra/GeodeToEnumerableConverter.java | 6 +- ...e.java => GeodeSimpleScannableEntity.java} | 8 +- .../geode/simple/GeodeSimpleSchema.java | 21 +- .../db/hsqldb/stores/HsqldbStore.java | 8 +- .../html/{HtmlTable.java => HtmlEntity.java} | 12 +- .../polypheny/db/adapter/html/HtmlScan.java | 6 +- .../polypheny/db/adapter/html/HtmlSchema.java | 28 +- .../html/{JsonTable.java => JsonEntity.java} | 8 +- .../src/test/resources/sales/DEPTS.html | 28 +- .../src/test/resources/sales/EMPS.html | 32 +- .../src/test/resources/tableNoTH.html | 4 +- .../src/test/resources/tableNoTheadTbody.html | 4 +- .../src/test/resources/tableOK.html | 4 +- .../src/test/resources/tableX2.html | 8 +- .../jdbc/{JdbcTable.java => JdbcEntity.java} | 21 +- .../db/adapter/jdbc/JdbcImplementor.java | 7 +- .../polypheny/db/adapter/jdbc/JdbcRules.java | 10 +- .../polypheny/db/adapter/jdbc/JdbcScan.java | 4 +- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 61 +- .../adapter/jdbc/rel2sql/SqlImplementor.java | 74 +- .../jdbc/sources/AbstractJdbcSource.java | 4 +- .../jdbc/stores/AbstractJdbcStore.java | 4 +- .../jdbc/rel2sql/AlgToSqlConverterTest.java | 36 +- .../db/adapter/jdbc/rel2sql/PlannerTest.java | 2 +- .../rel2sql/RelToSqlConverterStructsTest.java | 38 +- .../monetdb/sources/MonetdbSource.java | 8 +- .../adapter/monetdb/stores/MonetdbStore.java | 8 +- .../db/adapter/mongodb/MongoEntity.java | 13 +- .../db/adapter/mongodb/MongoPlugin.java | 18 +- .../db/adapter/mongodb/MongoRowType.java | 2 +- .../db/adapter/mongodb/MongoRules.java | 14 +- .../db/adapter/mongodb/MongoSchema.java | 31 +- .../mongodb/MongoAdapterTest.java.disabled | 28 +- .../languages/mql2alg/MqlToAlgConverter.java | 6 +- .../db/adapter/jdbc/MysqlSourcePlugin.java | 8 +- .../db/adapter/neo4j/Neo4jPlugin.java | 13 +- .../polypheny/db/adapter/neo4j/NeoEntity.java | 21 +- .../polypheny/db/adapter/neo4j/NeoGraph.java | 16 +- .../{NeoNamespace.java => NeoSchema.java} | 18 +- .../neo4j/rules/relational/NeoScan.java | 2 +- .../pig/{PigTable.java => PigEntity.java} | 8 +- .../org/polypheny/db/adapter/pig/PigScan.java | 6 +- .../polypheny/db/adapter/pig/PigSchema.java | 18 +- .../db/test/PigAlgBuilderStyleTest.java | 10 +- .../polypheny/db/test/PigRelBuilderTest.java | 2 +- .../postgres/source/PostgresqlSource.java | 8 +- .../postgres/store/PostgresqlStore.java | 8 +- .../sql-language/src/main/codegen/config.fmpp | 2 +- .../src/main/codegen/includes/parserImpls.ftl | 70 +- .../language/validate/DelegatingScope.java | 10 +- .../db/sql/language/validate/EmptyScope.java | 14 +- .../validate/SqlUserDefinedTableMacro.java | 4 +- .../language/validate/SqlValidatorImpl.java | 34 +- .../language/validate/SqlValidatorUtil.java | 18 +- .../sql/language/validate/TableNamespace.java | 12 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 25 +- .../org/polypheny/db/sql/AlgWriterTest.java | 4 +- .../org/polypheny/db/sql/FrameworksTest.java | 36 +- .../org/polypheny/db/sql/InterpreterTest.java | 30 +- .../org/polypheny/db/sql/PlannerTest.java | 70 +- .../java/org/polypheny/db/sql/Smalls.java | 57 +- .../polypheny/db/sql/SortRemoveRuleTest.java | 2 +- .../validate/LexCaseSensitiveTest.java | 2 +- ...st.java => NamespaceToJsonMapperTest.java} | 2 +- .../db/sql/volcano/TraitPropagationTest.java | 12 +- .../org/polypheny/db/sql/AlgOptRulesTest.xml | 2216 ++++++++--------- .../org/polypheny/db/sql/HepPlannerTest.xml | 68 +- .../db/sql/language/SqlToAlgConverterTest.xml | 976 ++++---- .../src/test/resources/sql/agg.iq | 86 +- .../src/test/resources/sql/blank.iq | 26 +- .../src/test/resources/sql/join.iq | 24 +- .../src/test/resources/sql/lateral.iq | 10 +- .../src/test/resources/sql/misc.iq | 84 +- .../src/test/resources/sql/schema.iq | 64 +- .../src/test/resources/sql/sequence.iq | 4 +- .../src/test/resources/sql/some.iq | 4 +- .../src/test/resources/sql/sort.iq | 12 +- .../src/test/resources/sql/sub-query.iq | 170 +- .../src/test/resources/sql/table.iq | 36 +- .../src/test/resources/sql/table_as.iq | 56 +- .../src/test/resources/sql/type.iq | 14 +- .../src/test/resources/sql/view.iq | 6 +- .../polypheny/db/webui/QueryPlanBuilder.java | 20 - 223 files changed, 5832 insertions(+), 5574 deletions(-) rename core/src/main/java/org/polypheny/db/adapter/java/{AbstractQueryableTable.java => AbstractQueryableEntity.java} (85%) rename core/src/main/java/org/polypheny/db/schema/{CustomColumnResolvingTable.java => CustomColumnResolvingEntity.java} (95%) rename core/src/main/java/org/polypheny/db/schema/{Table.java => Entity.java} (94%) rename core/src/main/java/org/polypheny/db/schema/{ExtensibleTable.java => ExtensibleEntity.java} (91%) rename core/src/main/java/org/polypheny/db/schema/{FilterableTable.java => FilterableEntity.java} (96%) rename core/src/main/java/org/polypheny/db/schema/{LogicalTable.java => LogicalEntity.java} (94%) rename core/src/main/java/org/polypheny/db/schema/{ModifiableTable.java => ModifiableEntity.java} (94%) rename core/src/main/java/org/polypheny/db/schema/{Schema.java => Namespace.java} (53%) rename core/src/main/java/org/polypheny/db/schema/{ProjectableFilterableTable.java => ProjectableFilterableEntity.java} (96%) rename core/src/main/java/org/polypheny/db/schema/{QueryableTable.java => QueryableEntity.java} (94%) rename core/src/main/java/org/polypheny/db/schema/{ScannableTable.java => ScannableEntity.java} (97%) rename core/src/main/java/org/polypheny/db/schema/{StreamableTable.java => StreamableEntity.java} (96%) create mode 100644 core/src/main/java/org/polypheny/db/schema/TableType.java rename core/src/main/java/org/polypheny/db/schema/{TranslatableTable.java => TranslatableEntity.java} (93%) rename core/src/main/java/org/polypheny/db/schema/impl/{AbstractTable.java => AbstractEntity.java} (84%) rename core/src/main/java/org/polypheny/db/schema/impl/{AbstractSchema.java => AbstractNamespace.java} (84%) rename core/src/main/java/org/polypheny/db/schema/impl/{DelegatingSchema.java => DelegatingNamespace.java} (65%) rename core/src/test/java/org/polypheny/db/test/{ScannableTableTest.java => ScannableEntityTest.java} (95%) rename plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/{CottontailTable.java => CottontailEntity.java} (91%) rename plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/{DruidTable.java => DruidEntity.java} (87%) rename plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/{ElasticsearchTable.java => ElasticsearchEntity.java} (97%) rename plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/{FileTranslatableTable.java => FileTranslatableEntity.java} (91%) rename plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/{GeodeTable.java => GeodeEntity.java} (96%) rename plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/{GeodeSimpleScannableTable.java => GeodeSimpleScannableEntity.java} (91%) rename plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/{HtmlTable.java => HtmlEntity.java} (90%) rename plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/{JsonTable.java => JsonEntity.java} (90%) rename plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/{JdbcTable.java => JdbcEntity.java} (94%) rename plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/{NeoNamespace.java => NeoSchema.java} (84%) rename plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/{PigTable.java => PigEntity.java} (93%) rename plugins/sql-language/src/test/java/org/polypheny/db/sql/map/{SchemaToJsonMapperTest.java => NamespaceToJsonMapperTest.java} (98%) diff --git a/.gitignore b/.gitignore index 65c1bb1248..a66e3d34f2 100644 --- a/.gitignore +++ b/.gitignore @@ -221,7 +221,7 @@ nbdist/ !/gradle/wrapper/gradle-wrapper.jar /geode-adapter/vf.gf.server.status.cmd -/core/custom-schema-model.json +/core/custom-namespace-model.json /testTestCsv/** !/libs/avatica-1.16.0-POLYPHENYDB-shaded.jar !/libs/avatica-server-1.16.0-POLYPHENYDB.jar diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index d5b97e93fb..6344a42652 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -65,9 +65,9 @@ import org.polypheny.db.information.InformationPage; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; import org.polypheny.db.transaction.PolyXid; @Getter @@ -320,11 +320,11 @@ public Adapter( int adapterId, String uniqueName, Map settings ) } - public abstract void createNewSchema( SchemaPlus rootSchema, String name ); + public abstract void createNewSchema( SchemaPlus rootSchema, String name, Long id ); - public abstract Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ); + public abstract Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ); - public abstract Schema getCurrentSchema(); + public abstract Namespace getCurrentSchema(); public void createGraphNamespace( SchemaPlus rootSchema, String name, long id ) { @@ -332,12 +332,12 @@ public void createGraphNamespace( SchemaPlus rootSchema, String name, long id ) } - public Table createDocumentSchema( CatalogCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { + public Entity createDocumentSchema( CatalogCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { throw new UnsupportedOperationException( "It is not supported to create a document with this adapter." ); } - public Schema getCurrentGraphNamespace() { + public Namespace getCurrentGraphNamespace() { throw new UnsupportedOperationException( "It is not supported to create a graph with this adapter." ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableInterpreter.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableInterpreter.java index a408a51b74..1d396c8def 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableInterpreter.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableInterpreter.java @@ -47,16 +47,16 @@ import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.FilterableTable; -import org.polypheny.db.schema.ProjectableFilterableTable; +import org.polypheny.db.schema.FilterableEntity; +import org.polypheny.db.schema.ProjectableFilterableEntity; import org.polypheny.db.util.BuiltInMethod; /** * Relational expression that executes its children using an interpreter. * - * Although quite a few kinds of {@link AlgNode} can be interpreted, this is only created by default for {@link FilterableTable} and - * {@link ProjectableFilterableTable}. + * Although quite a few kinds of {@link AlgNode} can be interpreted, this is only created by default for {@link FilterableEntity} and + * {@link ProjectableFilterableEntity}. */ public class EnumerableInterpreter extends SingleAlg implements EnumerableAlg { diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java index b35398511e..22c9911f42 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java @@ -63,12 +63,12 @@ import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.volcano.VolcanoCost; -import org.polypheny.db.schema.FilterableTable; -import org.polypheny.db.schema.ProjectableFilterableTable; -import org.polypheny.db.schema.QueryableTable; -import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.StreamableTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.FilterableEntity; +import org.polypheny.db.schema.ProjectableFilterableEntity; +import org.polypheny.db.schema.QueryableEntity; +import org.polypheny.db.schema.ScannableEntity; +import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.util.BuiltInMethod; @@ -96,13 +96,13 @@ public EnumerableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity * Creates an EnumerableScan. */ public static EnumerableScan create( AlgOptCluster cluster, AlgOptEntity algOptEntity ) { - final Table table = algOptEntity.unwrap( Table.class ); - Class elementType = EnumerableScan.deduceElementType( table ); + final Entity entity = algOptEntity.unwrap( Entity.class ); + Class elementType = EnumerableScan.deduceElementType( entity ); final AlgTraitSet traitSet = cluster.traitSetOf( EnumerableConvention.INSTANCE ) .replaceIfs( AlgCollationTraitDef.INSTANCE, () -> { - if ( table != null ) { - return table.getStatistic().getCollations(); + if ( entity != null ) { + return entity.getStatistic().getCollations(); } return ImmutableList.of(); } ); @@ -127,25 +127,25 @@ public int hashCode() { /** * Returns whether EnumerableScan can generate code to handle a particular variant of the Table SPI. */ - public static boolean canHandle( Table table ) { + public static boolean canHandle( Entity entity ) { // FilterableTable and ProjectableFilterableTable cannot be handled in/ enumerable convention because they might reject filters and those filters would need to be handled dynamically. - return table instanceof QueryableTable || table instanceof ScannableTable; + return entity instanceof QueryableEntity || entity instanceof ScannableEntity; } - public static Class deduceElementType( Table table ) { - if ( table instanceof QueryableTable ) { - final QueryableTable queryableTable = (QueryableTable) table; + public static Class deduceElementType( Entity entity ) { + if ( entity instanceof QueryableEntity ) { + final QueryableEntity queryableTable = (QueryableEntity) entity; final Type type = queryableTable.getElementType(); if ( type instanceof Class ) { return (Class) type; } else { return Object[].class; } - } else if ( table instanceof ScannableTable - || table instanceof FilterableTable - || table instanceof ProjectableFilterableTable - || table instanceof StreamableTable ) { + } else if ( entity instanceof ScannableEntity + || entity instanceof FilterableEntity + || entity instanceof ProjectableFilterableEntity + || entity instanceof StreamableEntity ) { return Object[].class; } else { return Object.class; @@ -154,7 +154,7 @@ public static Class deduceElementType( Table table ) { public static JavaRowFormat deduceFormat( AlgOptEntity table ) { - final Class elementType = deduceElementType( table.unwrap( Table.class ) ); + final Class elementType = deduceElementType( table.unwrap( Entity.class ) ); return elementType == Object[].class ? JavaRowFormat.ARRAY : JavaRowFormat.CUSTOM; @@ -190,9 +190,9 @@ private Expression toRows( PhysType physType, Expression expression ) { if ( physType.getFormat() == JavaRowFormat.SCALAR && Object[].class.isAssignableFrom( elementType ) && getRowType().getFieldCount() == 1 - && (table.unwrap( ScannableTable.class ) != null - || table.unwrap( FilterableTable.class ) != null - || table.unwrap( ProjectableFilterableTable.class ) != null) ) { + && (table.unwrap( ScannableEntity.class ) != null + || table.unwrap( FilterableEntity.class ) != null + || table.unwrap( ProjectableFilterableEntity.class ) != null) ) { return Expressions.call( BuiltInMethod.SLICE0.method, expression ); } JavaRowFormat oldFormat = format(); diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java index 033e94611e..8c9fa61a00 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java @@ -42,7 +42,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; import org.polypheny.db.tools.AlgBuilderFactory; @@ -65,8 +65,8 @@ public EnumerableScanRule( AlgBuilderFactory algBuilderFactory ) { public AlgNode convert( AlgNode alg ) { LogicalRelScan scan = (LogicalRelScan) alg; final AlgOptEntity algOptEntity = scan.getTable(); - final Table table = algOptEntity.unwrap( Table.class ); - if ( !EnumerableScan.canHandle( table ) ) { + final Entity entity = algOptEntity.unwrap( Entity.class ); + if ( !EnumerableScan.canHandle( entity ) ) { return null; } final Expression expression = algOptEntity.getExpression( Object.class ); diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableFunctionScan.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableFunctionScan.java index f6bb526184..e7e5d33f70 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableFunctionScan.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableFunctionScan.java @@ -51,7 +51,7 @@ import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.QueryableTable; +import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.impl.TableFunctionImpl; @@ -107,7 +107,7 @@ private boolean isQueryable() { } final TableFunctionImpl tableFunction = (TableFunctionImpl) udtf.getFunction(); final Method method = tableFunction.method; - return QueryableTable.class.isAssignableFrom( method.getReturnType() ); + return QueryableEntity.class.isAssignableFrom( method.getReturnType() ); } } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java index 87a81f846f..2c865b529e 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java @@ -55,7 +55,7 @@ import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.util.BuiltInMethod; @@ -78,7 +78,7 @@ public EnumerableTableModify( AlgOptCluster cluster, AlgTraitSet traits, AlgOptE super( cluster, traits, table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); assert child.getConvention() instanceof EnumerableConvention; assert getConvention() instanceof EnumerableConvention; - final ModifiableTable modifiableTable = table.unwrap( ModifiableTable.class ); + final ModifiableEntity modifiableTable = table.unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { throw new AssertionError(); // TODO: user error in validator } @@ -97,9 +97,9 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { final Result result = implementor.visitChild( this, 0, (EnumerableAlg) getInput(), pref ); Expression childExp = builder.append( "child", result.block ); final ParameterExpression collectionParameter = Expressions.parameter( Collection.class, builder.newName( "collection" ) ); - final Expression expression = table.getExpression( ModifiableTable.class ); + final Expression expression = table.getExpression( ModifiableEntity.class ); assert expression != null; // TODO: user error in validator - assert ModifiableTable.class.isAssignableFrom( Types.toClass( expression.getType() ) ) : expression.getType(); + assert ModifiableEntity.class.isAssignableFrom( Types.toClass( expression.getType() ) ) : expression.getType(); builder.add( Expressions.declare( Modifier.FINAL, diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyRule.java index 2d54bb5d96..283ffca3e6 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyRule.java @@ -40,7 +40,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.tools.AlgBuilderFactory; @@ -69,7 +69,7 @@ public AlgNode convert( AlgNode alg ) { // return EnumerableRules.ENUMERABLE_TABLE_MODIFY_TO_STREAMER_RULE.convert( alg ); return null; } - final ModifiableTable modifiableTable = modify.getTable().unwrap( ModifiableTable.class ); + final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableTable.java b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java similarity index 85% rename from core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableTable.java rename to core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java index e703e04ad1..2c038a1e1c 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableTable.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java @@ -36,21 +36,22 @@ import java.lang.reflect.Type; import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.schema.QueryableTable; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.impl.AbstractEntity; /** - * Abstract base class for implementing {@link org.polypheny.db.schema.Table}. + * Abstract base class for implementing {@link Entity}. */ -public abstract class AbstractQueryableTable extends AbstractTable implements QueryableTable { +public abstract class AbstractQueryableEntity extends AbstractEntity implements QueryableEntity { protected final Type elementType; - protected AbstractQueryableTable( Type elementType ) { + protected AbstractQueryableEntity( Type elementType ) { super(); this.elementType = elementType; } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index e659c9bbe1..1cbecbe8e3 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -59,17 +59,18 @@ import org.polypheny.db.algebra.AlgReferentialConstraint; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; -import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Namespace; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; -import org.polypheny.db.schema.Table; import org.polypheny.db.schema.TableMacro; -import org.polypheny.db.schema.TranslatableTable; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.TranslatableEntity; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.schema.impl.ReflectiveFunctionBase; import org.polypheny.db.util.BuiltInMethod; @@ -77,13 +78,13 @@ /** - * Implementation of {@link Schema} that exposes the public fields and methods in a Java object. + * Implementation of {@link Namespace} that exposes the public fields and methods in a Java object. */ -public class ReflectiveSchema extends AbstractSchema { +public class ReflectiveSchema extends AbstractNamespace implements Schema { private final Class clazz; private Object target; - private Map tableMap; + private Map tableMap; private Multimap functionMap; @@ -91,9 +92,10 @@ public class ReflectiveSchema extends AbstractSchema { * Creates a ReflectiveSchema. * * @param target Object whose fields will be sub-objects of the schema + * @param id */ - public ReflectiveSchema( Object target ) { - super(); + public ReflectiveSchema( Object target, long id ) { + super( id ); this.clazz = target.getClass(); this.target = target; } @@ -116,7 +118,7 @@ public Object getTarget() { @Override - public Map getTableMap() { + public Map getTableMap() { if ( tableMap == null ) { tableMap = createTableMap(); } @@ -124,17 +126,17 @@ public Map getTableMap() { } - private Map createTableMap() { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + private Map createTableMap() { + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( Field field : clazz.getFields() ) { final String fieldName = field.getName(); - final Table table = fieldRelation( field ); - if ( table == null ) { + final Entity entity = fieldRelation( field ); + if ( entity == null ) { continue; } - builder.put( fieldName, table ); + builder.put( fieldName, entity ); } - Map tableMap = builder.build(); + Map tableMap = builder.build(); // Unique-Key - Foreign-Key for ( Field field : clazz.getFields() ) { if ( AlgReferentialConstraint.class.isAssignableFrom( field.getType() ) ) { @@ -144,7 +146,7 @@ private Map createTableMap() { } catch ( IllegalAccessException e ) { throw new RuntimeException( "Error while accessing field " + field, e ); } - FieldTable table = (FieldTable) tableMap.get( Util.last( rc.getSourceQualifiedName() ) ); + FieldEntity table = (FieldEntity) tableMap.get( Util.last( rc.getSourceQualifiedName() ) ); assert table != null; table.statistic = Statistics.of( ImmutableList.copyOf( Iterables.concat( table.getStatistic().getReferentialConstraints(), Collections.singleton( rc ) ) ) ); } @@ -169,7 +171,7 @@ private Multimap createFunctionMap() { if ( method.getDeclaringClass() == Object.class || methodName.equals( "toString" ) ) { continue; } - if ( TranslatableTable.class.isAssignableFrom( method.getReturnType() ) ) { + if ( TranslatableEntity.class.isAssignableFrom( method.getReturnType() ) ) { final TableMacro tableMacro = new MethodTableMacro( this, method ); builder.put( methodName, tableMacro ); } @@ -193,7 +195,7 @@ Expression getTargetExpression( SchemaPlus parentSchema, String name ) { /** * Returns a table based on a particular field of this schema. If the field is not of the right type to be a relation, returns null. */ - private Table fieldRelation( final Field field ) { + private Entity fieldRelation( final Field field ) { final Type elementType = getElementType( field.getType() ); if ( elementType == null ) { return null; @@ -205,7 +207,7 @@ private Table fieldRelation( final Field field ) { throw new RuntimeException( "Error while accessing field " + field, e ); } @SuppressWarnings("unchecked") final Enumerable enumerable = toEnumerable( o ); - return new FieldTable<>( field, elementType, enumerable ); + return new FieldEntity<>( field, elementType, enumerable ); } @@ -241,13 +243,13 @@ private static Enumerable toEnumerable( final Object o ) { /** * Table that is implemented by reading from a Java object. */ - private static class ReflectiveTable extends AbstractQueryableTable implements Table, ScannableTable { + private static class ReflectiveEntity extends AbstractQueryableEntity implements Entity, ScannableEntity { private final Type elementType; private final Enumerable enumerable; - ReflectiveTable( Type elementType, Enumerable enumerable ) { + ReflectiveEntity( Type elementType, Enumerable enumerable ) { super( elementType ); this.elementType = elementType; this.enumerable = enumerable; @@ -303,7 +305,7 @@ private static class MethodTableMacro extends ReflectiveFunctionBase implements MethodTableMacro( ReflectiveSchema schema, Method method ) { super( method ); this.schema = schema; - assert TranslatableTable.class.isAssignableFrom( method.getReturnType() ) : "Method should return TranslatableTable so the macro can be expanded"; + assert TranslatableEntity.class.isAssignableFrom( method.getReturnType() ) : "Method should return TranslatableTable so the macro can be expanded"; } @@ -313,10 +315,10 @@ public String toString() { @Override - public TranslatableTable apply( final List arguments ) { + public TranslatableEntity apply( final List arguments ) { try { final Object o = method.invoke( schema.getTarget(), arguments.toArray() ); - return (TranslatableTable) o; + return (TranslatableEntity) o; } catch ( IllegalAccessException | InvocationTargetException e ) { throw new RuntimeException( e ); } @@ -330,18 +332,18 @@ public TranslatableTable apply( final List arguments ) { * * @param element type */ - private static class FieldTable extends ReflectiveTable { + private static class FieldEntity extends ReflectiveEntity { private final Field field; private Statistic statistic; - FieldTable( Field field, Type elementType, Enumerable enumerable ) { + FieldEntity( Field field, Type elementType, Enumerable enumerable ) { this( field, elementType, enumerable, Statistics.UNKNOWN ); } - FieldTable( Field field, Type elementType, Enumerable enumerable, Statistic statistic ) { + FieldEntity( Field field, Type elementType, Enumerable enumerable, Statistic statistic ) { super( elementType, enumerable ); this.field = field; this.statistic = statistic; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java index e32d341a47..ce6f0681aa 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java @@ -73,7 +73,7 @@ import org.polypheny.db.plan.Contexts; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.util.ImmutableBitSet; @@ -562,7 +562,7 @@ public AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ) { /** - * Creates a {@link ScanFactory} that can expand {@link TranslatableTable} instances. + * Creates a {@link ScanFactory} that can expand {@link TranslatableEntity} instances. * * @param scanFactory Factory for non-translatable tables * @return Table scan factory @@ -570,7 +570,7 @@ public AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ) { @Nonnull public static ScanFactory expandingScanFactory( @Nonnull ScanFactory scanFactory ) { return ( cluster, table ) -> { - final TranslatableTable translatableTable = table.unwrap( TranslatableTable.class ); + final TranslatableEntity translatableTable = table.unwrap( TranslatableEntity.class ); if ( translatableTable != null ) { final ToAlgContext toAlgContext = () -> cluster; return translatableTable.toAlg( toAlgContext, table, cluster.traitSet() ); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java index f1fbeed88e..10b463887b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java @@ -22,7 +22,7 @@ import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; /** @@ -39,7 +39,7 @@ default CatalogReader getCatalogReader() { static Modify getModify( AlgOptEntity table, CatalogReader catalogReader, AlgNode alg, Operation operation ) { - return table.unwrap( ModifiableTable.class ).toModificationAlg( alg.getCluster(), table, catalogReader, alg, operation, null, null, true ); + return table.unwrap( ModifiableEntity.class ).toModificationAlg( alg.getCluster(), table, catalogReader, alg, operation, null, null, true ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java b/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java index 5c95b55ce4..b3925add6f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java +++ b/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java @@ -58,7 +58,7 @@ import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.Util; @@ -80,10 +80,10 @@ public class AlgJsonReader { private AlgNode lastAlg; - public AlgJsonReader( AlgOptCluster cluster, AlgOptSchema algOptSchema, Schema schema ) { + public AlgJsonReader( AlgOptCluster cluster, AlgOptSchema algOptSchema, Namespace namespace ) { this.cluster = cluster; this.algOptSchema = algOptSchema; - Util.discard( schema ); + Util.discard( namespace ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java index 06f7209926..e20bd78e38 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java @@ -29,19 +29,19 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.prepare.Prepare.CatalogReader; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.Table; import org.polypheny.db.schema.TranslatableGraph; import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.type.PolyType; @Getter -public class LogicalGraph implements RelationalTransformable, Schema, Graph, TranslatableGraph { +public class LogicalGraph implements RelationalTransformable, Namespace, Graph, TranslatableGraph { private final long id; @@ -61,13 +61,13 @@ public List getRelationalEquivalent( List values, List getTableNames() { + public Set getEntityNames() { return Set.of(); } @@ -97,13 +97,13 @@ public Set getFunctionNames() { @Override - public Schema getSubSchema( String name ) { + public Namespace getSubNamespace( String name ) { return null; } @Override - public Set getSubSchemaNames() { + public Set getSubNamespaceNames() { return Set.of(); } @@ -121,7 +121,7 @@ public boolean isMutable() { @Override - public Schema snapshot( SchemaVersion version ) { + public Namespace snapshot( SchemaVersion version ) { return new LogicalGraph( id ); } @@ -133,7 +133,7 @@ public C unwrap( Class aClass ) { @Override - public AlgNode toAlg( ToAlgContext context, Graph graph ) { + public AlgNode toAlg( ToAlgContext context, org.polypheny.db.schema.graph.Graph graph ) { throw new RuntimeException( "toAlg() is not implemented for Logical Graphs!" ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java index daa822c034..62647d8dc2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java @@ -44,8 +44,8 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.Table; /** @@ -105,7 +105,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { * @param algOptEntity Table */ public static LogicalRelScan create( AlgOptCluster cluster, final AlgOptEntity algOptEntity ) { - final Table table = algOptEntity.unwrap( Table.class ); + final Entity entity = algOptEntity.unwrap( Entity.class ); final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ) @@ -113,8 +113,8 @@ public static LogicalRelScan create( AlgOptCluster cluster, final AlgOptEntity a .replaceIfs( AlgCollationTraitDef.INSTANCE, () -> { - if ( table != null ) { - return table.getStatistic().getCollations(); + if ( entity != null ) { + return entity.getStatistic().getCollations(); } return ImmutableList.of(); } ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java index ba0998cdcb..589370ae28 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java @@ -32,7 +32,7 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; public class LogicalRelViewScan extends Scan { @@ -51,15 +51,15 @@ public LogicalRelViewScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEn public static AlgNode create( AlgOptCluster cluster, final AlgOptEntity algOptEntity ) { - final Table table = algOptEntity.unwrap( Table.class ); + final Entity entity = algOptEntity.unwrap( Entity.class ); final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ) .replaceIfs( AlgCollationTraitDef.INSTANCE, () -> { - if ( table != null ) { - return table.getStatistic().getCollations(); + if ( entity != null ) { + return entity.getStatistic().getCollations(); } return ImmutableList.of(); } ); diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java index cb3ce663a3..b9f7c3b299 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java @@ -46,8 +46,8 @@ import org.polypheny.db.plan.AlgOptRuleOperand; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexUtil; -import org.polypheny.db.schema.FilterableTable; -import org.polypheny.db.schema.ProjectableFilterableTable; +import org.polypheny.db.schema.FilterableEntity; +import org.polypheny.db.schema.ProjectableFilterableEntity; import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.util.ImmutableIntList; import org.polypheny.db.util.mapping.Mapping; @@ -58,8 +58,8 @@ * Planner rule that converts * a {@link Filter} * on a {@link Scan} - * of a {@link FilterableTable} - * or a {@link ProjectableFilterableTable} + * of a {@link FilterableEntity} + * or a {@link ProjectableFilterableEntity} * to a {@link BindableScan}. * * The {@link #INTERPRETER} variant allows an intervening {@link org.polypheny.db.adapter.enumerable.EnumerableInterpreter}. @@ -118,7 +118,7 @@ protected FilterScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilde public static boolean test( Scan scan ) { // We can only push filters into a FilterableTable or ProjectableFilterableTable. final AlgOptEntity table = scan.getTable(); - return table.unwrap( FilterableTable.class ) != null || table.unwrap( ProjectableFilterableTable.class ) != null; + return table.unwrap( FilterableEntity.class ) != null || table.unwrap( ProjectableFilterableEntity.class ) != null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java index 322f1184db..21c1cde9ae 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java @@ -46,7 +46,7 @@ import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.plan.AlgOptRuleOperand; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ProjectableFilterableTable; +import org.polypheny.db.schema.ProjectableFilterableEntity; import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.util.ImmutableIntList; import org.polypheny.db.util.mapping.Mapping; @@ -57,7 +57,7 @@ /** * Planner rule that converts a {@link Project} * on a {@link Scan} - * of a {@link ProjectableFilterableTable} + * of a {@link ProjectableFilterableEntity} * to a {@link BindableScan}. * * The {@link #INTERPRETER} variant allows an intervening {@link EnumerableInterpreter}. @@ -111,13 +111,13 @@ public ProjectScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilderF protected static boolean test( Scan scan ) { // We can only push projects into a ProjectableFilterableTable. final AlgOptEntity table = scan.getTable(); - return table.unwrap( ProjectableFilterableTable.class ) != null; + return table.unwrap( ProjectableFilterableEntity.class ) != null; } protected void apply( AlgOptRuleCall call, Project project, Scan scan ) { final AlgOptEntity table = scan.getTable(); - assert table.unwrap( ProjectableFilterableTable.class ) != null; + assert table.unwrap( ProjectableFilterableEntity.class ) != null; final TargetMapping mapping = project.getMapping(); if ( mapping == null || Mappings.isIdentity( mapping ) ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java index cc627ee19f..4af15d5d8f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java @@ -57,12 +57,12 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.AlgOptEntityImpl; -import org.polypheny.db.schema.StreamableTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.util.Util; @@ -246,7 +246,7 @@ public void onMatch( AlgOptRuleCall call ) { /** - * Planner rule that pushes a {@link Delta} into a {@link Scan} of a {@link StreamableTable}. + * Planner rule that pushes a {@link Delta} into a {@link Scan} of a {@link StreamableEntity}. * * Very likely, the stream was only represented as a table for uniformity with the other relations in the system. The Delta disappears and the stream can be implemented directly. */ @@ -270,14 +270,14 @@ public void onMatch( AlgOptRuleCall call ) { final Scan scan = call.alg( 1 ); final AlgOptCluster cluster = delta.getCluster(); final AlgOptEntity algOptEntity = scan.getTable(); - final StreamableTable streamableTable = algOptEntity.unwrap( StreamableTable.class ); + final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); if ( streamableTable != null ) { - final Table table1 = streamableTable.stream(); - final CatalogTable catalogTable = Catalog.getInstance().getTable( table1.getTableId() ); + final Entity entity1 = streamableTable.stream(); + final CatalogTable catalogTable = Catalog.getInstance().getTable( entity1.getId() ); final AlgOptEntity algOptEntity2 = AlgOptEntityImpl.create( algOptEntity.getRelOptSchema(), algOptEntity.getRowType(), - table1, + entity1, catalogTable, ImmutableList.builder() .addAll( algOptEntity.getQualifiedName() ) @@ -291,7 +291,7 @@ public void onMatch( AlgOptRuleCall call ) { /** - * Planner rule that converts {@link Delta} over a {@link Scan} of a table other than {@link StreamableTable} to an empty {@link Values}. + * Planner rule that converts {@link Delta} over a {@link Scan} of a table other than {@link StreamableEntity} to an empty {@link Values}. */ public static class DeltaScanToEmptyRule extends AlgOptRule { @@ -312,7 +312,7 @@ public void onMatch( AlgOptRuleCall call ) { final Delta delta = call.alg( 0 ); final Scan scan = call.alg( 1 ); final AlgOptEntity algOptEntity = scan.getTable(); - final StreamableTable streamableTable = algOptEntity.unwrap( StreamableTable.class ); + final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); final AlgBuilder builder = call.builder(); if ( streamableTable == null ) { call.transformTo( builder.values( delta.getRowType() ).build() ); diff --git a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java index 5436cff2de..0e4753f2a8 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java @@ -88,10 +88,10 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.FilterableTable; -import org.polypheny.db.schema.ProjectableFilterableTable; -import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.FilterableEntity; +import org.polypheny.db.schema.ProjectableFilterableEntity; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.ImmutableIntList; @@ -179,7 +179,7 @@ public void onMatch( AlgOptRuleCall call ) { /** - * Scan of a table that implements {@link ScannableTable} and therefore can be converted into an {@link Enumerable}. + * Scan of a table that implements {@link ScannableEntity} and therefore can be converted into an {@link Enumerable}. */ public static class BindableScan extends Scan implements BindableAlg { @@ -212,13 +212,13 @@ public static BindableScan create( AlgOptCluster cluster, AlgOptEntity algOptEnt * Creates a BindableScan. */ public static BindableScan create( AlgOptCluster cluster, AlgOptEntity algOptEntity, List filters, List projects ) { - final Table table = algOptEntity.unwrap( Table.class ); + final Entity entity = algOptEntity.unwrap( Entity.class ); final AlgTraitSet traitSet = cluster.traitSetOf( BindableConvention.INSTANCE ) - .replace( table.getSchemaType().getModelTrait() ) + .replace( entity.getSchemaType().getModelTrait() ) .replaceIfs( AlgCollationTraitDef.INSTANCE, () -> { - if ( table != null ) { - return table.getStatistic().getCollations(); + if ( entity != null ) { + return entity.getStatistic().getCollations(); } return ImmutableList.of(); } ); @@ -275,16 +275,16 @@ public String algCompareString() { public static boolean canHandle( AlgOptEntity table ) { - return table.unwrap( ScannableTable.class ) != null - || table.unwrap( FilterableTable.class ) != null - || table.unwrap( ProjectableFilterableTable.class ) != null; + return table.unwrap( ScannableEntity.class ) != null + || table.unwrap( FilterableEntity.class ) != null + || table.unwrap( ProjectableFilterableEntity.class ) != null; } @Override public Enumerable bind( DataContext dataContext ) { // TODO: filterable and projectable - return table.unwrap( ScannableTable.class ).scan( dataContext ); + return table.unwrap( ScannableEntity.class ).scan( dataContext ); } diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index d33b18bdbd..7d27e209b7 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -57,10 +57,10 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexUtil; import org.polypheny.db.runtime.Enumerables; -import org.polypheny.db.schema.FilterableTable; -import org.polypheny.db.schema.ProjectableFilterableTable; -import org.polypheny.db.schema.QueryableTable; -import org.polypheny.db.schema.ScannableTable; +import org.polypheny.db.schema.FilterableEntity; +import org.polypheny.db.schema.ProjectableFilterableEntity; +import org.polypheny.db.schema.QueryableEntity; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.util.ImmutableBitSet; @@ -93,15 +93,15 @@ public void run() { */ static ScanNode create( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects ) { final AlgOptEntity algOptEntity = alg.getTable(); - final ProjectableFilterableTable pfTable = algOptEntity.unwrap( ProjectableFilterableTable.class ); + final ProjectableFilterableEntity pfTable = algOptEntity.unwrap( ProjectableFilterableEntity.class ); if ( pfTable != null ) { return createProjectableFilterable( compiler, alg, filters, projects, pfTable ); } - final FilterableTable filterableTable = algOptEntity.unwrap( FilterableTable.class ); + final FilterableEntity filterableTable = algOptEntity.unwrap( FilterableEntity.class ); if ( filterableTable != null ) { return createFilterable( compiler, alg, filters, projects, filterableTable ); } - final ScannableTable scannableTable = algOptEntity.unwrap( ScannableTable.class ); + final ScannableEntity scannableTable = algOptEntity.unwrap( ScannableEntity.class ); if ( scannableTable != null ) { return createScannable( compiler, alg, filters, projects, scannableTable ); } @@ -110,7 +110,7 @@ static ScanNode create( Compiler compiler, Scan alg, ImmutableList filt if ( enumerable != null ) { return createEnumerable( compiler, alg, enumerable, null, filters, projects ); } - final QueryableTable queryableTable = algOptEntity.unwrap( QueryableTable.class ); + final QueryableEntity queryableTable = algOptEntity.unwrap( QueryableEntity.class ); if ( queryableTable != null ) { return createQueryable( compiler, alg, filters, projects, queryableTable ); } @@ -118,19 +118,19 @@ static ScanNode create( Compiler compiler, Scan alg, ImmutableList filt } - private static ScanNode createScannable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, ScannableTable scannableTable ) { + private static ScanNode createScannable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, ScannableEntity scannableTable ) { final Enumerable rowEnumerable = Enumerables.toRow( scannableTable.scan( compiler.getDataContext() ) ); return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } - private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, QueryableTable queryableTable ) { + private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, QueryableEntity queryableTable ) { final DataContext root = compiler.getDataContext(); final AlgOptEntity algOptEntity = alg.getTable(); final Type elementType = queryableTable.getElementType(); SchemaPlus schema = root.getRootSchema(); for ( String name : Util.skipLast( algOptEntity.getQualifiedName() ) ) { - schema = schema.getSubSchema( name ); + schema = schema.getSubNamespace( name ); } final Enumerable rowEnumerable; if ( elementType instanceof Class ) { @@ -163,7 +163,7 @@ private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableL } - private static ScanNode createFilterable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, FilterableTable filterableTable ) { + private static ScanNode createFilterable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, FilterableEntity filterableTable ) { final DataContext root = compiler.getDataContext(); final List mutableFilters = Lists.newArrayList( filters ); final Enumerable enumerable = filterableTable.scan( root, mutableFilters ); @@ -177,7 +177,7 @@ private static ScanNode createFilterable( Compiler compiler, Scan alg, Immutable } - private static ScanNode createProjectableFilterable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, ProjectableFilterableTable pfTable ) { + private static ScanNode createProjectableFilterable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, ProjectableFilterableEntity pfTable ) { final DataContext root = compiler.getDataContext(); final ImmutableIntList originalProjects = projects; for ( ; ; ) { diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java index 362032033a..c370d43063 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java @@ -46,7 +46,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.ColumnStrategy; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.ImmutableBitSet; @@ -137,7 +137,7 @@ public interface AlgOptEntity extends Wrapper { List getColumnStrategies(); @Deprecated - default Table getTable() { + default Entity getTable() { return null; } diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index 0502be745d..fbb1f56402 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -65,17 +65,17 @@ import org.polypheny.db.prepare.Prepare.AbstractPreparingEntity; import org.polypheny.db.runtime.Hook; import org.polypheny.db.schema.ColumnStrategy; -import org.polypheny.db.schema.FilterableTable; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.FilterableEntity; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.ProjectableFilterableTable; -import org.polypheny.db.schema.QueryableTable; -import org.polypheny.db.schema.ScannableTable; +import org.polypheny.db.schema.ProjectableFilterableEntity; +import org.polypheny.db.schema.QueryableEntity; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.StreamableTable; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.StreamableEntity; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.AccessType; import org.polypheny.db.util.ImmutableBitSet; @@ -93,7 +93,7 @@ public class AlgOptEntityImpl extends AbstractPreparingEntity { private final AlgDataType rowType; @Getter @Nullable - private final Table table; + private final Entity entity; @Getter @Nullable @@ -114,14 +114,14 @@ private AlgOptEntityImpl( AlgOptSchema schema, AlgDataType rowType, List names, - Table table, + Entity entity, CatalogEntity catalogEntity, Function, Expression> expressionFunction, Double rowCount ) { this.schema = schema; this.rowType = Objects.requireNonNull( rowType ); this.names = ImmutableList.copyOf( names ); - this.table = table; // may be null + this.entity = entity; // may be null this.catalogEntity = catalogEntity; this.expressionFunction = expressionFunction; // may be null this.rowCount = rowCount; // may be null @@ -134,15 +134,15 @@ public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, final PolyphenyDbSchema.TableEntry tableEntry, CatalogEntity catalogEntity, Double count ) { - final Table table = tableEntry.getTable(); + final Entity entity = tableEntry.getTable(); Double rowCount; if ( count == null ) { - rowCount = table.getStatistic().getRowCount(); + rowCount = entity.getStatistic().getRowCount(); } else { rowCount = count; } - return new AlgOptEntityImpl( schema, rowType, tableEntry.path(), table, catalogEntity, getClassExpressionFunction( tableEntry, table ), rowCount ); + return new AlgOptEntityImpl( schema, rowType, tableEntry.path(), entity, catalogEntity, getClassExpressionFunction( tableEntry, entity ), rowCount ); } @@ -150,25 +150,25 @@ public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, * Creates a copy of this RelOptTable. The new RelOptTable will have newRowType. */ public AlgOptEntityImpl copy( AlgDataType newRowType ) { - return new AlgOptEntityImpl( this.schema, newRowType, this.names, this.table, this.catalogEntity, this.expressionFunction, this.rowCount ); + return new AlgOptEntityImpl( this.schema, newRowType, this.names, this.entity, this.catalogEntity, this.expressionFunction, this.rowCount ); } - private static Function, Expression> getClassExpressionFunction( PolyphenyDbSchema.TableEntry tableEntry, Table table ) { - return getClassExpressionFunction( tableEntry.schema.plus(), tableEntry.name, table ); + private static Function, Expression> getClassExpressionFunction( PolyphenyDbSchema.TableEntry tableEntry, Entity entity ) { + return getClassExpressionFunction( tableEntry.schema.plus(), tableEntry.name, entity ); } - private static Function, Expression> getClassExpressionFunction( final SchemaPlus schema, final String tableName, final Table table ) { - if ( table instanceof QueryableTable ) { - final QueryableTable queryableTable = (QueryableTable) table; + private static Function, Expression> getClassExpressionFunction( final SchemaPlus schema, final String tableName, final Entity entity ) { + if ( entity instanceof QueryableEntity ) { + final QueryableEntity queryableTable = (QueryableEntity) entity; return clazz -> queryableTable.getExpression( schema, tableName, clazz ); - } else if ( table instanceof ScannableTable - || table instanceof FilterableTable - || table instanceof ProjectableFilterableTable ) { - return clazz -> Schemas.tableExpression( schema, Object[].class, tableName, table.getClass() ); - } else if ( table instanceof StreamableTable ) { - return getClassExpressionFunction( schema, tableName, ((StreamableTable) table).stream() ); + } else if ( entity instanceof ScannableEntity + || entity instanceof FilterableEntity + || entity instanceof ProjectableFilterableEntity ) { + return clazz -> Schemas.tableExpression( schema, Object[].class, tableName, entity.getClass() ); + } else if ( entity instanceof StreamableEntity ) { + return getClassExpressionFunction( schema, tableName, ((StreamableEntity) entity).stream() ); } else { return input -> { throw new UnsupportedOperationException(); @@ -177,11 +177,11 @@ private static Function, Expression> getClassExpressionFunction( final } - public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Table table, CatalogEntity catalogEntity, ImmutableList names ) { - assert table instanceof TranslatableTable - || table instanceof ScannableTable - || table instanceof ModifiableTable; - return new AlgOptEntityImpl( schema, rowType, names, table, catalogEntity, null, null ); + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Entity entity, CatalogEntity catalogEntity, ImmutableList names ) { + assert entity instanceof TranslatableEntity + || entity instanceof ScannableEntity + || entity instanceof ModifiableEntity; + return new AlgOptEntityImpl( schema, rowType, names, entity, catalogEntity, null, null ); } @@ -190,11 +190,11 @@ public T unwrap( Class clazz ) { if ( clazz.isInstance( this ) ) { return clazz.cast( this ); } - if ( clazz.isInstance( table ) ) { - return clazz.cast( table ); + if ( clazz.isInstance( entity ) ) { + return clazz.cast( entity ); } - if ( table instanceof Wrapper ) { - final T t = ((Wrapper) table).unwrap( clazz ); + if ( entity instanceof Wrapper ) { + final T t = ((Wrapper) entity).unwrap( clazz ); if ( t != null ) { return t; } @@ -216,13 +216,13 @@ public Expression getExpression( Class clazz ) { @Override - protected AlgOptEntity extend( Table extendedTable ) { - final AlgDataType extendedRowType = extendedTable.getRowType( AlgDataTypeFactory.DEFAULT ); + protected AlgOptEntity extend( Entity extendedEntity ) { + final AlgDataType extendedRowType = extendedEntity.getRowType( AlgDataTypeFactory.DEFAULT ); return new AlgOptEntityImpl( getRelOptSchema(), extendedRowType, getQualifiedName(), - extendedTable, + extendedEntity, null, expressionFunction, getRowCount() ); @@ -233,13 +233,13 @@ protected AlgOptEntity extend( Table extendedTable ) { public boolean equals( Object obj ) { return obj instanceof AlgOptEntityImpl && this.rowType.equals( ((AlgOptEntityImpl) obj).getRowType() ) - && this.table == ((AlgOptEntityImpl) obj).table; + && this.entity == ((AlgOptEntityImpl) obj).entity; } @Override public int hashCode() { - return (this.table == null) ? super.hashCode() : this.table.hashCode(); + return (this.entity == null) ? super.hashCode() : this.entity.hashCode(); } @@ -248,8 +248,8 @@ public double getRowCount() { if ( rowCount != null ) { return rowCount; } - if ( table != null ) { - final Double rowCount = table.getStatistic().getRowCount(); + if ( entity != null ) { + final Double rowCount = entity.getStatistic().getRowCount(); if ( rowCount != null ) { return rowCount; } @@ -284,7 +284,7 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { } } final AlgOptEntity algOptEntity = - new AlgOptEntityImpl( this.schema, b.build(), this.names, this.table, this.catalogEntity, this.expressionFunction, this.rowCount ) { + new AlgOptEntityImpl( this.schema, b.build(), this.names, this.entity, this.catalogEntity, this.expressionFunction, this.rowCount ) { @Override public T unwrap( Class clazz ) { if ( clazz.isAssignableFrom( InitializerExpressionFactory.class ) ) { @@ -296,19 +296,19 @@ public T unwrap( Class clazz ) { return algOptEntity.toAlg( context, traitSet ); } - if ( table instanceof TranslatableTable ) { - return ((TranslatableTable) table).toAlg( context, this, traitSet ); + if ( entity instanceof TranslatableEntity ) { + return ((TranslatableEntity) entity).toAlg( context, this, traitSet ); } final AlgOptCluster cluster = context.getCluster(); if ( Hook.ENABLE_BINDABLE.get( false ) ) { return LogicalRelScan.create( cluster, this ); } - if ( PolyphenyDbPrepareImpl.ENABLE_ENUMERABLE && table instanceof QueryableTable ) { + if ( PolyphenyDbPrepareImpl.ENABLE_ENUMERABLE && entity instanceof QueryableEntity ) { return EnumerableScan.create( cluster, this ); } - if ( table instanceof ScannableTable - || table instanceof FilterableTable - || table instanceof ProjectableFilterableTable ) { + if ( entity instanceof ScannableEntity + || entity instanceof FilterableEntity + || entity instanceof ProjectableFilterableEntity ) { return LogicalRelScan.create( cluster, this ); } if ( PolyphenyDbPrepareImpl.ENABLE_ENUMERABLE ) { @@ -320,8 +320,8 @@ public T unwrap( Class clazz ) { @Override public List getCollationList() { - if ( table != null ) { - return table.getStatistic().getCollations(); + if ( entity != null ) { + return entity.getStatistic().getCollations(); } return ImmutableList.of(); } @@ -329,8 +329,8 @@ public List getCollationList() { @Override public AlgDistribution getDistribution() { - if ( table != null ) { - return table.getStatistic().getDistribution(); + if ( entity != null ) { + return entity.getStatistic().getDistribution(); } return AlgDistributionTraitDef.INSTANCE.getDefault(); } @@ -338,8 +338,8 @@ public AlgDistribution getDistribution() { @Override public boolean isKey( ImmutableBitSet columns ) { - if ( table != null ) { - return table.getStatistic().isKey( columns ); + if ( entity != null ) { + return entity.getStatistic().isKey( columns ); } return false; } @@ -347,8 +347,8 @@ public boolean isKey( ImmutableBitSet columns ) { @Override public List getReferentialConstraints() { - if ( table != null ) { - return table.getStatistic().getReferentialConstraints(); + if ( entity != null ) { + return entity.getStatistic().getReferentialConstraints(); } return ImmutableList.of(); } @@ -364,9 +364,9 @@ public AlgDataType getRowType() { public boolean supportsModality( Modality modality ) { switch ( modality ) { case STREAM: - return table instanceof StreamableTable; + return entity instanceof StreamableEntity; default: - return !(table instanceof StreamableTable); + return !(entity instanceof StreamableEntity); } } @@ -379,7 +379,7 @@ public List getQualifiedName() { @Override public Monotonicity getMonotonicity( String columnName ) { - for ( AlgCollation collation : table.getStatistic().getCollations() ) { + for ( AlgCollation collation : entity.getStatistic().getCollations() ) { final AlgFieldCollation fieldCollation = collation.getFieldCollations().get( 0 ); final int fieldIndex = fieldCollation.getFieldIndex(); if ( fieldIndex < rowType.getFieldCount() && rowType.getFieldNames().get( fieldIndex ).equals( columnName ) ) { diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index 02a0aee464..256cd16b24 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -51,8 +51,8 @@ import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.PreparingEntity; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.Table; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.util.Moniker; @@ -83,15 +83,15 @@ public PreparingEntity getTable( final List names ) { // First look in the default schema, if any. If not found, look in the root schema. PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); if ( entry != null ) { - final Table table = entry.getTable(); - CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); - if ( table instanceof Wrapper ) { - final PreparingEntity algOptTable = ((Wrapper) table).unwrap( PreparingEntity.class ); + final Entity entity = entry.getTable(); + CatalogTable catalogTable = Catalog.getInstance().getTable( entity.getId() ); + if ( entity instanceof Wrapper ) { + final PreparingEntity algOptTable = ((Wrapper) entity).unwrap( PreparingEntity.class ); if ( algOptTable != null ) { return algOptTable; } } - return AlgOptEntityImpl.create( this, table.getRowType( typeFactory ), entry, catalogTable, null ); + return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, catalogTable, null ); } return null; } @@ -102,9 +102,9 @@ public AlgOptEntity getCollection( final List names ) { // First look in the default schema, if any. If not found, look in the root schema. PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); if ( entry != null ) { - final Table table = entry.getTable(); - CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); - return AlgOptEntityImpl.create( this, table.getRowType( typeFactory ), entry, catalogTable, null ); + final Entity entity = entry.getTable(); + CatalogTable catalogTable = Catalog.getInstance().getTable( entity.getId() ); + return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, catalogTable, null ); } return null; } @@ -112,8 +112,8 @@ public AlgOptEntity getCollection( final List names ) { @Override public Graph getGraph( final String name ) { - PolyphenyDbSchema schema = rootSchema.getSubSchema( name, true ); - return schema == null ? null : (Graph) schema.getSchema(); + PolyphenyDbSchema schema = rootSchema.getSubNamespace( name, true ); + return schema == null ? null : (Graph) schema.getNamespace(); } diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index d49a201039..76db022daa 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -71,8 +71,8 @@ import org.polypheny.db.runtime.Hook; import org.polypheny.db.runtime.Typed; import org.polypheny.db.schema.ColumnStrategy; -import org.polypheny.db.schema.ExtensibleTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.ExtensibleEntity; import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.tools.Program; import org.polypheny.db.tools.Programs; @@ -269,25 +269,25 @@ public abstract static class AbstractPreparingEntity implements PreparingEntity @Override public final AlgOptEntity extend( List extendedFields ) { - final Table table = unwrap( Table.class ); + final Entity entity = unwrap( Entity.class ); // Get the set of extended columns that do not have the same name as a column in the base table. final List baseColumns = getRowType().getFieldList(); final List dedupedFields = AlgOptUtil.deduplicateColumns( baseColumns, extendedFields ); final List dedupedExtendedFields = dedupedFields.subList( baseColumns.size(), dedupedFields.size() ); - if ( table instanceof ExtensibleTable ) { - final Table extendedTable = ((ExtensibleTable) table).extend( dedupedExtendedFields ); - return extend( extendedTable ); + if ( entity instanceof ExtensibleEntity ) { + final Entity extendedEntity = ((ExtensibleEntity) entity).extend( dedupedExtendedFields ); + return extend( extendedEntity ); } - throw new RuntimeException( "Cannot extend " + table ); + throw new RuntimeException( "Cannot extend " + entity ); } /** - * Implementation-specific code to instantiate a new {@link AlgOptEntity} based on a {@link Table} that has been extended. + * Implementation-specific code to instantiate a new {@link AlgOptEntity} based on a {@link Entity} that has been extended. */ - protected abstract AlgOptEntity extend( Table extendedTable ); + protected abstract AlgOptEntity extend( Entity extendedEntity ); @Override diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index d6c570fd37..f1d2c369b5 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -68,8 +68,8 @@ import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.QueryableTable; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.QueryableEntity; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; @@ -107,8 +107,8 @@ AlgNode toAlg( Queryable queryable ) { } if ( queryable instanceof AbstractTableQueryable ) { final AbstractTableQueryable tableQueryable = (AbstractTableQueryable) queryable; - final QueryableTable table = tableQueryable.table; - final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); + final QueryableEntity table = tableQueryable.table; + final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getId() ); final PolyphenyDbSchema.TableEntry tableEntry = PolyphenyDbSchema .from( tableQueryable.schema ) @@ -119,8 +119,8 @@ AlgNode toAlg( Queryable queryable ) { tableEntry, catalogTable, null ); - if ( table instanceof TranslatableTable ) { - return ((TranslatableTable) table).toAlg( translator.toAlgContext(), algOptTable, translator.cluster.traitSet() ); + if ( table instanceof TranslatableEntity ) { + return ((TranslatableEntity) table).toAlg( translator.toAlgContext(), algOptTable, translator.cluster.traitSet() ); } else { return LogicalRelScan.create( translator.cluster, algOptTable ); } diff --git a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java index d8d1fdb4cb..ec98e6fe32 100644 --- a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java @@ -78,14 +78,14 @@ public abstract class AbstractPolyphenyDbSchema implements PolyphenyDbSchema { private final boolean caseSensitive; @Getter @Setter - public Schema schema; + public Namespace namespace; @Getter public final String name; @Getter public final NamespaceType namespaceType; /** - * Tables explicitly defined in this schema. Does not include tables in {@link #schema}. + * Tables explicitly defined in this schema. Does not include tables in {@link #namespace}. */ @Getter protected final NameMap tableMap; @@ -99,7 +99,7 @@ public abstract class AbstractPolyphenyDbSchema implements PolyphenyDbSchema { protected AbstractPolyphenyDbSchema( AbstractPolyphenyDbSchema parent, - Schema schema, + Namespace namespace, String name, NamespaceType type, boolean caseSensitive, @@ -111,7 +111,7 @@ protected AbstractPolyphenyDbSchema( NameMap nullaryFunctionMap, List> path ) { this.parent = parent; - this.schema = schema; + this.namespace = namespace; this.name = name; this.namespaceType = type; if ( tableMap == null ) { @@ -150,32 +150,32 @@ protected AbstractPolyphenyDbSchema( * @param name Schema name */ public static PolyphenyDbSchema createRootSchema( String name ) { - final Schema schema = new RootSchema(); - return new SimplePolyphenyDbSchema( null, schema, name, NamespaceType.getDefault(), false ); + final Namespace namespace = new RootSchema(); + return new SimplePolyphenyDbSchema( null, namespace, name, NamespaceType.getDefault(), false ); } /** - * Returns a sub-schema with a given name that is defined implicitly (that is, by the underlying {@link Schema} object, - * not explicitly by a call to {@link #add(String, Schema, NamespaceType)}), or null. + * Returns a sub-schema with a given name that is defined implicitly (that is, by the underlying {@link Namespace} object, + * not explicitly by a call to {@link #add(String, Namespace, NamespaceType)}), or null. */ protected abstract PolyphenyDbSchema getImplicitSubSchema( String schemaName, boolean caseSensitive ); /** - * Returns a table with a given name that is defined implicitly (that is, by the underlying {@link Schema} object, - * not explicitly by a call to {@link #add(String, Table)}), or null. + * Returns a table with a given name that is defined implicitly (that is, by the underlying {@link Namespace} object, + * not explicitly by a call to {@link #add(String, Entity)}), or null. */ protected abstract TableEntry getImplicitTable( String tableName ); /** - * Returns a type with a given name that is defined implicitly (that is, by the underlying {@link Schema} object, + * Returns a type with a given name that is defined implicitly (that is, by the underlying {@link Namespace} object, * not explicitly by a call to {@link #add(String, AlgProtoDataType)}), or null. */ protected abstract TypeEntry getImplicitType( String name, boolean caseSensitive ); /** * Returns table function with a given name and zero arguments that is defined implicitly (that is, by the underlying - * {@link Schema} object, not explicitly by a call to {@link #add(String, Function)}), or null. + * {@link Namespace} object, not explicitly by a call to {@link #add(String, Function)}), or null. */ protected abstract TableEntry getImplicitTableBasedOnNullaryFunction( String tableName, boolean caseSensitive ); @@ -207,7 +207,7 @@ public static PolyphenyDbSchema createRootSchema( String name ) { /** * Adds implicit table functions to a builder. */ - protected abstract void addImplicitTablesBasedOnNullaryFunctionsToBuilder( ImmutableSortedMap.Builder builder ); + protected abstract void addImplicitTablesBasedOnNullaryFunctionsToBuilder( ImmutableSortedMap.Builder builder ); /** * Returns a snapshot representation of this PolyphenyDbSchema. @@ -220,8 +220,8 @@ public static PolyphenyDbSchema createRootSchema( String name ) { /** * Creates a TableEntryImpl with no SQLs. */ - protected TableEntryImpl tableEntry( String name, Table table ) { - return new TableEntryImpl( this, name, table ); + protected TableEntryImpl tableEntry( String name, Entity entity ) { + return new TableEntryImpl( this, name, entity ); } @@ -237,8 +237,8 @@ protected TypeEntryImpl typeEntry( String name, AlgProtoDataType algProtoDataTyp * Defines a table within this schema. */ @Override - public TableEntry add( String tableName, Table table ) { - final TableEntryImpl entry = new TableEntryImpl( this, tableName, table ); + public TableEntry add( String tableName, Entity entity ) { + final TableEntryImpl entry = new TableEntryImpl( this, tableName, entity ); tableMap.put( tableName, entry ); return entry; } @@ -306,12 +306,12 @@ public List path( String name ) { @Override - public final PolyphenyDbSchema getSubSchema( String schemaName, boolean caseSensitive ) { + public final PolyphenyDbSchema getSubNamespace( String namespaceName, boolean caseSensitive ) { // Check explicit schemas. - for ( Map.Entry entry : subSchemaMap.range( schemaName, caseSensitive ).entrySet() ) { + for ( Map.Entry entry : subSchemaMap.range( namespaceName, caseSensitive ).entrySet() ) { return entry.getValue(); } - return getImplicitSubSchema( schemaName, caseSensitive ); + return getImplicitSubSchema( namespaceName, caseSensitive ); } @@ -338,7 +338,7 @@ public final TableEntry getTable( String originalTableName ) { fieldInfo.add( new AlgDataTypeFieldImpl( "properties", 1, typeFactory.createPolyType( PolyType.VARCHAR, 2064 ) ) ); fieldInfo.add( new AlgDataTypeFieldImpl( "labels", 2, typeFactory.createArrayType( typeFactory.createPolyType( PolyType.VARCHAR, 255 ), -1 ) ) ); - return new TableEntryImpl( this, tableName, new LogicalTable( -1, name, tableName, List.of(), List.of(), AlgDataTypeImpl.proto( fieldInfo.build() ), NamespaceType.GRAPH ) ); + return new TableEntryImpl( this, tableName, new LogicalEntity( -1, name, tableName, List.of(), List.of(), AlgDataTypeImpl.proto( fieldInfo.build() ), NamespaceType.GRAPH ) ); } else if ( namespaceType == NamespaceType.DOCUMENT ) { for ( Map.Entry entry : tableMap.map().entrySet().stream().filter( t -> t.getKey().split( "_" )[0].equalsIgnoreCase( tableName.split( "_" )[0] ) ).collect( Collectors.toList() ) ) { return entry.getValue(); @@ -350,7 +350,7 @@ public final TableEntry getTable( String originalTableName ) { @Override public SchemaPlus plus() { - return new SchemaPlusImpl(); + return new SchemaPlusImpl( getNamespace().getId() ); } @@ -374,8 +374,8 @@ public List> getPath() { /** - * Returns a collection of sub-schemas, both explicit (defined using {@link #add(String, Schema, NamespaceType)}) - * and implicit (defined using {@link Schema#getSubSchemaNames()} and {@link Schema#getSubSchema(String)}). + * Returns a collection of sub-schemas, both explicit (defined using {@link #add(String, Namespace, NamespaceType)}) + * and implicit (defined using {@link Namespace#getSubNamespaceNames()} and {@link Namespace#getSubNamespace(String)}). */ @Override public final NavigableMap getSubSchemaMap() { @@ -467,8 +467,8 @@ public final TableEntry getTableBasedOnNullaryFunction( String tableName, boolea final Function function = entry.getValue().getFunction(); if ( function instanceof TableMacro ) { assert function.getParameters().isEmpty(); - final Table table = ((TableMacro) function).apply( ImmutableList.of() ); - return tableEntry( tableName, table ); + final Entity entity = ((TableMacro) function).apply( ImmutableList.of() ); + return tableEntry( tableName, entity ); } } return getImplicitTableBasedOnNullaryFunction( tableName, caseSensitive ); @@ -480,6 +480,15 @@ public final TableEntry getTableBasedOnNullaryFunction( String tableName, boolea */ private class SchemaPlusImpl implements SchemaPlus { + @Getter + private final long id; + + + public SchemaPlusImpl( long id ) { + this.id = id; + } + + @Override public AbstractPolyphenyDbSchema polyphenyDbSchema() { return AbstractPolyphenyDbSchema.this; @@ -500,7 +509,7 @@ public String getName() { @Override public boolean isMutable() { - return schema.isMutable(); + return namespace.isMutable(); } @@ -517,26 +526,26 @@ public boolean isCacheEnabled() { @Override - public Schema snapshot( SchemaVersion version ) { + public Namespace snapshot( SchemaVersion version ) { throw new UnsupportedOperationException(); } @Override public Expression getExpression( SchemaPlus parentSchema, String name ) { - return schema.getExpression( parentSchema, name ); + return namespace.getExpression( parentSchema, name ); } @Override - public Table getTable( String name ) { + public Entity getEntity( String name ) { final TableEntry entry = AbstractPolyphenyDbSchema.this.getTable( name ); return entry == null ? null : entry.getTable(); } @Override - public NavigableSet getTableNames() { + public NavigableSet getEntityNames() { return AbstractPolyphenyDbSchema.this.getTableNames(); } @@ -567,21 +576,21 @@ public NavigableSet getFunctionNames() { @Override - public SchemaPlus getSubSchema( String name ) { - final PolyphenyDbSchema subSchema = AbstractPolyphenyDbSchema.this.getSubSchema( name, true ); + public SchemaPlus getSubNamespace( String name ) { + final PolyphenyDbSchema subSchema = AbstractPolyphenyDbSchema.this.getSubNamespace( name, true ); return subSchema == null ? null : subSchema.plus(); } @Override - public Set getSubSchemaNames() { + public Set getSubNamespaceNames() { return AbstractPolyphenyDbSchema.this.getSubSchemaMap().keySet(); } @Override - public SchemaPlus add( String name, Schema schema, NamespaceType namespaceType ) { - final PolyphenyDbSchema polyphenyDbSchema = AbstractPolyphenyDbSchema.this.add( name, schema, namespaceType ); + public SchemaPlus add( String name, Namespace namespace, NamespaceType namespaceType ) { + final PolyphenyDbSchema polyphenyDbSchema = AbstractPolyphenyDbSchema.this.add( name, namespace, namespaceType ); return polyphenyDbSchema.plus(); } @@ -594,8 +603,8 @@ public T unwrap( Class clazz ) { if ( clazz.isInstance( AbstractPolyphenyDbSchema.this ) ) { return clazz.cast( AbstractPolyphenyDbSchema.this ); } - if ( clazz.isInstance( AbstractPolyphenyDbSchema.this.schema ) ) { - return clazz.cast( AbstractPolyphenyDbSchema.this.schema ); + if ( clazz.isInstance( AbstractPolyphenyDbSchema.this.namespace ) ) { + return clazz.cast( AbstractPolyphenyDbSchema.this.namespace ); } throw new ClassCastException( "not a " + clazz ); } @@ -608,8 +617,8 @@ public void setPath( ImmutableList> path ) { @Override - public void add( String name, Table table ) { - AbstractPolyphenyDbSchema.this.add( name, table ); + public void add( String name, Entity entity ) { + AbstractPolyphenyDbSchema.this.add( name, entity ); } diff --git a/core/src/main/java/org/polypheny/db/schema/CustomColumnResolvingTable.java b/core/src/main/java/org/polypheny/db/schema/CustomColumnResolvingEntity.java similarity index 95% rename from core/src/main/java/org/polypheny/db/schema/CustomColumnResolvingTable.java rename to core/src/main/java/org/polypheny/db/schema/CustomColumnResolvingEntity.java index d57349ed80..c3ee83ec52 100644 --- a/core/src/main/java/org/polypheny/db/schema/CustomColumnResolvingTable.java +++ b/core/src/main/java/org/polypheny/db/schema/CustomColumnResolvingEntity.java @@ -42,14 +42,14 @@ /** - * Extension to {@link Table} that specifies a custom way to resolve column names. + * Extension to {@link Entity} that specifies a custom way to resolve column names. * * It is optional for a Table to implement this interface. If Table does not implement this interface, column resolving will * be performed in the default way. * * NOTE: This class is experimental and subject to change/removal without notice. */ -public interface CustomColumnResolvingTable extends Table { +public interface CustomColumnResolvingEntity extends Entity { /** * Resolve a column based on the name components. One or more the input name components can be resolved to one field in diff --git a/core/src/main/java/org/polypheny/db/schema/Table.java b/core/src/main/java/org/polypheny/db/schema/Entity.java similarity index 94% rename from core/src/main/java/org/polypheny/db/schema/Table.java rename to core/src/main/java/org/polypheny/db/schema/Entity.java index 840f655aca..6b03a54586 100644 --- a/core/src/main/java/org/polypheny/db/schema/Table.java +++ b/core/src/main/java/org/polypheny/db/schema/Entity.java @@ -45,8 +45,8 @@ * Table. * * The typical way for a table to be created is when Polypheny-DB interrogates a user-defined schema in order to validate - * names appearing in a SQL query. Polypheny-DB finds the schema by calling {@link Schema#getSubSchema(String)} on the - * connection's root schema, then gets a table by calling {@link Schema#getTable(String)}. + * names appearing in a SQL query. Polypheny-DB finds the schema by calling {@link Namespace#getSubNamespace(String)} on the + * connection's root schema, then gets a table by calling {@link Namespace#getEntity(String)}. * * Note that a table does not know its name. It is in fact possible for a table to be used more than once, perhaps under * multiple names or under multiple schemas. (Compare with the i-node concept @@ -56,7 +56,7 @@ * * @see TableMacro */ -public interface Table { +public interface Entity { /** * Returns this table's row type. @@ -83,12 +83,12 @@ default AlgDataTypeFactory getTypeFactory() { /** * Returns the tableId of this table. */ - Long getTableId(); + Long getId(); /** * Type of table. */ - Schema.TableType getJdbcTableType(); + TableType getJdbcTableType(); /** * Determines whether the given {@code column} has been rolled up. @@ -111,5 +111,14 @@ default NamespaceType getSchemaType() { return NamespaceType.RELATIONAL; } + interface Table { + + } + + + interface Collection { + + } + } diff --git a/core/src/main/java/org/polypheny/db/schema/ExtensibleTable.java b/core/src/main/java/org/polypheny/db/schema/ExtensibleEntity.java similarity index 91% rename from core/src/main/java/org/polypheny/db/schema/ExtensibleTable.java rename to core/src/main/java/org/polypheny/db/schema/ExtensibleEntity.java index 005dda5861..40a57a2a12 100644 --- a/core/src/main/java/org/polypheny/db/schema/ExtensibleTable.java +++ b/core/src/main/java/org/polypheny/db/schema/ExtensibleEntity.java @@ -46,17 +46,17 @@ * defined in the schema. Calling the {@link #extend} method creates a temporarily extended table schema. * * If the table implements extended interfaces such as - * {@link ScannableTable}, - * {@link org.polypheny.db.schema.FilterableTable} or - * {@link ProjectableFilterableTable}, you may wish + * {@link ScannableEntity}, + * {@link FilterableEntity} or + * {@link ProjectableFilterableEntity}, you may wish * to make the table returned from {@link #extend} implement these interfaces as well. */ -public interface ExtensibleTable extends Table { +public interface ExtensibleEntity extends Entity { /** * Returns a table that has the row type of this table plus the given fields. */ - Table extend( List fields ); + Entity extend( List fields ); /** * Returns the starting offset of the first extended column, which may differ from the field count when the table stores diff --git a/core/src/main/java/org/polypheny/db/schema/FilterableTable.java b/core/src/main/java/org/polypheny/db/schema/FilterableEntity.java similarity index 96% rename from core/src/main/java/org/polypheny/db/schema/FilterableTable.java rename to core/src/main/java/org/polypheny/db/schema/FilterableEntity.java index 413dc1e753..c6c673d965 100644 --- a/core/src/main/java/org/polypheny/db/schema/FilterableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/FilterableEntity.java @@ -43,9 +43,9 @@ /** * Table that can be scanned, optionally applying supplied filter expressions, without creating an intermediate expression. * - * @see ScannableTable + * @see ScannableEntity */ -public interface FilterableTable extends Table { +public interface FilterableEntity extends Entity { /** * Returns an enumerator over the rows in this Table. Each row is represented as an array of its column values. diff --git a/core/src/main/java/org/polypheny/db/schema/Function.java b/core/src/main/java/org/polypheny/db/schema/Function.java index 31fc40783e..32103d6ad7 100644 --- a/core/src/main/java/org/polypheny/db/schema/Function.java +++ b/core/src/main/java/org/polypheny/db/schema/Function.java @@ -43,7 +43,7 @@ * The application may occur at compile time (for a macro) or at run time (for a regular function). The result may be a * relation, and so might any of the parameters. * - * Functions are registered in a {@link Schema}, and may be queried by name ({@link Schema#getFunctions(String)}) then + * Functions are registered in a {@link Namespace}, and may be queried by name ({@link Namespace#getFunctions(String)}) then * overloads resolved based on parameter types. * * @see TableMacro diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java b/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java index 5ddf3425b6..7ed1c30937 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java @@ -21,7 +21,7 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; -public class LogicalCollection extends LogicalTable { +public class LogicalCollection extends LogicalEntity { protected LogicalCollection( long tableId, String logicalSchemaName, String logicalTableName, AlgProtoDataType protoRowType ) { super( tableId, logicalSchemaName, logicalTableName, List.of( 0L ), List.of( "d" ), protoRowType, NamespaceType.DOCUMENT ); diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalTable.java b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java similarity index 94% rename from core/src/main/java/org/polypheny/db/schema/LogicalTable.java rename to core/src/main/java/org/polypheny/db/schema/LogicalEntity.java index ae05fec3a0..138a0d5086 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java @@ -16,7 +16,6 @@ package org.polypheny.db.schema; -import java.util.Collection; import java.util.List; import java.util.Set; import java.util.TreeSet; @@ -24,7 +23,7 @@ import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Queryable; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.core.Modify.Operation; @@ -42,7 +41,7 @@ import org.polypheny.db.rex.RexNode; -public class LogicalTable extends AbstractQueryableTable implements TranslatableTable, ScannableTable, ModifiableTable { +public class LogicalEntity extends AbstractQueryableEntity implements TranslatableEntity, ScannableEntity, ModifiableEntity { private AlgProtoDataType protoRowType; @@ -63,7 +62,7 @@ public class LogicalTable extends AbstractQueryableTable implements Translatable private final List logicalColumnNames; - public LogicalTable( + public LogicalEntity( long tableId, String logicalSchemaName, String logicalTableName, @@ -72,7 +71,7 @@ public LogicalTable( AlgProtoDataType protoRowType, NamespaceType schemaType ) { super( Object[].class ); - this.tableId = tableId; + this.id = tableId; this.logicalSchemaName = logicalSchemaName; this.logicalTableName = logicalTableName; this.columnIds = columnIds; diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java b/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java index 624d8889cc..414a56256a 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java @@ -21,7 +21,7 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; -public class LogicalRelView extends LogicalTable { +public class LogicalRelView extends LogicalEntity { protected LogicalRelView( long tableId, diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java index 2f9e67eac3..1b8225a824 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java @@ -23,33 +23,37 @@ import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.schema.Namespace.Schema; -public class LogicalSchema implements Schema { +public class LogicalSchema implements Namespace, Schema { private final String schemaName; @Getter - private final Map tableMap; + private final Map tableMap; - private final Map collectionMap; + private final Map collectionMap; + @Getter + private final long id; - public LogicalSchema( String schemaName, Map tableMap, Map collectionMap ) { + public LogicalSchema( long id, String schemaName, Map tableMap, Map collectionMap ) { this.schemaName = schemaName; this.tableMap = tableMap; this.collectionMap = collectionMap; + this.id = id; } @Override - public Table getTable( String name ) { + public Entity getEntity( String name ) { return tableMap.get( name ); } @Override - public Set getTableNames() { + public Set getEntityNames() { return tableMap.keySet(); } @@ -79,13 +83,13 @@ public Set getFunctionNames() { @Override - public Schema getSubSchema( String name ) { + public Namespace getSubNamespace( String name ) { return null; } @Override - public Set getSubSchemaNames() { + public Set getSubNamespaceNames() { return ImmutableSet.of(); } @@ -103,8 +107,8 @@ public boolean isMutable() { @Override - public Schema snapshot( SchemaVersion version ) { - return new LogicalSchema( schemaName, tableMap, collectionMap ); + public Namespace snapshot( SchemaVersion version ) { + return new LogicalSchema( id, schemaName, tableMap, collectionMap ); } } diff --git a/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java b/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java index e3a20b6b0f..399a9a0aa9 100644 --- a/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java +++ b/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java @@ -26,7 +26,7 @@ import org.polypheny.db.rex.RexNode; -public interface ModifiableCollection extends QueryableTable { +public interface ModifiableCollection extends QueryableEntity { DocumentModify toModificationAlg( AlgOptCluster cluster, diff --git a/core/src/main/java/org/polypheny/db/schema/ModifiableTable.java b/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java similarity index 94% rename from core/src/main/java/org/polypheny/db/schema/ModifiableTable.java rename to core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java index 077871b7c8..0f365b4325 100644 --- a/core/src/main/java/org/polypheny/db/schema/ModifiableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java @@ -34,7 +34,6 @@ package org.polypheny.db.schema; -import java.util.Collection; import java.util.List; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Modify; @@ -47,13 +46,13 @@ /** * A table that can be modified. */ -public interface ModifiableTable extends QueryableTable { +public interface ModifiableEntity extends QueryableEntity { /** * Returns the modifiable collection. * Modifying the collection will change the table's contents. */ - Collection getModifiableCollection(); + Collection getModifiableCollection(); /** * Creates a relational expression that modifies this table. diff --git a/core/src/main/java/org/polypheny/db/schema/Schema.java b/core/src/main/java/org/polypheny/db/schema/Namespace.java similarity index 53% rename from core/src/main/java/org/polypheny/db/schema/Schema.java rename to core/src/main/java/org/polypheny/db/schema/Namespace.java index c6830e7458..a801d08dd9 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schema.java +++ b/core/src/main/java/org/polypheny/db/schema/Namespace.java @@ -62,9 +62,26 @@ * is valid if "sales" is a registered schema and "emps" is a member with zero parameters and a result type of * Collection(Record(int: "empno", String: "name")). * - * A schema may be nested within another schema; see {@link Schema#getSubSchema(String)}. + * A schema may be nested within another schema; see {@link Namespace#getSubNamespace(String)}. */ -public interface Schema { +public interface Namespace { + + long getId(); + + /** + * Returns a sub-schema with a given name, or null. + * + * @param name Sub-schema name + * @return Sub-schema with a given name, or null + */ + Namespace getSubNamespace( String name ); + + /** + * Returns the names of this schema's child schemas. + * + * @return Names of this schema's child schemas + */ + Set getSubNamespaceNames(); /** * Returns a table with a given name, or null if not found. @@ -72,15 +89,14 @@ public interface Schema { * @param name Table name * @return Table, or null */ - Table getTable( String name ); + Entity getEntity( String name ); /** * Returns the names of the tables in this schema. * * @return Names of the tables in this schema */ - Set getTableNames(); - + Set getEntityNames(); /** * Returns a type with a given name, or null if not found. @@ -112,21 +128,6 @@ public interface Schema { */ Set getFunctionNames(); - /** - * Returns a sub-schema with a given name, or null. - * - * @param name Sub-schema name - * @return Sub-schema with a given name, or null - */ - Schema getSubSchema( String name ); - - /** - * Returns the names of this schema's child schemas. - * - * @return Names of this schema's child schemas - */ - Set getSubSchemaNames(); - /** * Returns the expression by which this schema can be referenced in generated code. * @@ -138,7 +139,7 @@ public interface Schema { /** * Returns whether the user is allowed to create new tables, functions and sub-schemas in this schema, in addition to - * those returned automatically by methods such as {@link #getTable(String)}. + * those returned automatically by methods such as {@link Schema#getEntity(String)}. * * Even if this method returns true, the maps are not modified. Polypheny-DB stores the defined objects in a wrapper object. * @@ -153,230 +154,21 @@ public interface Schema { * @param version The current schema version * @return the schema snapshot. */ - Schema snapshot( SchemaVersion version ); + Namespace snapshot( SchemaVersion version ); - /** - * Table type. - */ - enum TableType { - /** - * A regular table. - * - * Used by DB2, MySQL, PostgreSQL and others. - */ - TABLE, - - /** - * A relation whose contents are calculated by evaluating a SQL expression. - * - * Used by DB2, PostgreSQL and others. - */ - VIEW, - - /** - * Foreign table. - * - * Used by PostgreSQL. - */ - FOREIGN_TABLE, - - /** - * Index table. - * - * Used by Apache Phoenix, PostgreSQL. - */ - INDEX, - - /** - * Join table. - * - * Used by Apache Phoenix. - */ - JOIN, - - /** - * Sequence table. - * - * Used by Apache Phoenix, Oracle, PostgreSQL and others. - * In Phoenix, must have a single BIGINT column called "$seq". - */ - SEQUENCE, - - /** - * Stream. - */ - STREAM, - - /** - * Type. - * - * Used by PostgreSQL. - */ - TYPE, - - /** - * A table maintained by the system. Data dictionary tables, such as the "TABLES" and "COLUMNS" table in the "metamodel" schema, examples of system tables. - * - * Specified by the JDBC standard and used by DB2, MySQL, Oracle, PostgreSQL and others. - */ - SYSTEM_TABLE, - - /** - * System view. - * - * Used by PostgreSQL, MySQL. - */ - SYSTEM_VIEW, - - /** - * System index. - * - * Used by PostgreSQL. - */ - SYSTEM_INDEX, - - /** - * System TOAST index. - * - * Used by PostgreSQL. - */ - SYSTEM_TOAST_INDEX, - - /** - * System TOAST table. - * - * Used by PostgreSQL. - */ - SYSTEM_TOAST_TABLE, + interface Graph { - /** - * Temporary index. - * - * Used by PostgreSQL. - */ - TEMPORARY_INDEX, - - /** - * Temporary sequence. - * - * Used by PostgreSQL. - */ - TEMPORARY_SEQUENCE, - - /** - * Temporary table. - * - * Used by PostgreSQL. - */ - TEMPORARY_TABLE, - - /** - * Temporary view. - * - * Used by PostgreSQL. - */ - TEMPORARY_VIEW, - - /** - * A table that is only visible to one connection. - * - * Specified by the JDBC standard and used by PostgreSQL, MySQL. - */ - LOCAL_TEMPORARY, - - /** - * A synonym. - * - * Used by DB2, Oracle. - */ - SYNONYM, - - /** - * An alias. - * - * Specified by the JDBC standard. - */ - ALIAS, - - /** - * A global temporary table. - * - * Specified by the JDBC standard. - */ - GLOBAL_TEMPORARY, - - /** - * An accel-only table. - * - * Used by DB2. - */ - ACCEL_ONLY_TABLE, - - /** - * An auxiliary table. - * - * Used by DB2. - */ - AUXILIARY_TABLE, - - /** - * A global temporary table. - * - * Used by DB2. - */ - GLOBAL_TEMPORARY_TABLE, - - /** - * A hierarchy table. - * - * Used by DB2. - */ - HIERARCHY_TABLE, - - /** - * An inoperative view. - * - * Used by DB2. - */ - INOPERATIVE_VIEW, + } - /** - * A nickname. - * - * Used by DB2. - */ - NICKNAME, - /** - * A typed table. - * - * Used by DB2. - */ - TYPED_TABLE, + interface Schema { - /** - * A typed view. - * - * Used by DB2. - */ - TYPED_VIEW, - /** - * Table type not known to Polypheny-DB. - * - * If you get one of these, please fix the problem by adding an enum value. - */ - OTHER; + } - /** - * The name used in JDBC. For example "SYSTEM TABLE" rather than "SYSTEM_TABLE". - */ - public final String jdbcName; + interface Database { - TableType() { - this.jdbcName = name().replace( '_', ' ' ); - } } } diff --git a/core/src/main/java/org/polypheny/db/schema/Path.java b/core/src/main/java/org/polypheny/db/schema/Path.java index c4d093c213..31cad96c99 100644 --- a/core/src/main/java/org/polypheny/db/schema/Path.java +++ b/core/src/main/java/org/polypheny/db/schema/Path.java @@ -48,7 +48,7 @@ *
  • A direct child "foo" of the root schema has a two elements [(root, ""), (child, "foo")].
  • * */ -public interface Path extends List>, RandomAccess { +public interface Path extends List>, RandomAccess { /** * Returns the parent path, or null if the path is empty. @@ -63,6 +63,6 @@ public interface Path extends List>, RandomAccess { /** * Returns the schemas of this path. */ - List schemas(); + List schemas(); } diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index aef72d2d36..45da54ec2b 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -26,7 +26,8 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.NameMap; @@ -39,7 +40,7 @@ static PolyphenyDbSchema from( SchemaPlus plus ) { void setCache( boolean cache ); - TableEntry add( String tableName, Table table ); + TableEntry add( String tableName, Entity entity ); TypeEntry add( String name, AlgProtoDataType type ); @@ -49,12 +50,12 @@ static PolyphenyDbSchema from( SchemaPlus plus ) { List path( String name ); - PolyphenyDbSchema getSubSchema( String schemaName, boolean caseSensitive ); + PolyphenyDbSchema getSubNamespace( String namespaceName, boolean caseSensitive ); /** * Adds a child schema of this schema. */ - PolyphenyDbSchema add( String name, Schema schema, NamespaceType type ); + PolyphenyDbSchema add( String name, Namespace namespace, NamespaceType type ); TableEntry getTable( String tableName ); @@ -62,9 +63,9 @@ static PolyphenyDbSchema from( SchemaPlus plus ) { PolyphenyDbSchema getParent(); - Schema getSchema(); + Namespace getNamespace(); - void setSchema( Schema schema ); + void setNamespace( Namespace namespace ); SchemaPlus plus(); @@ -129,7 +130,7 @@ public TableEntry( PolyphenyDbSchema schema, String name ) { } - public abstract Table getTable(); + public abstract Entity getTable(); } @@ -169,21 +170,21 @@ public FunctionEntry( PolyphenyDbSchema schema, String name ) { */ class TableEntryImpl extends TableEntry { - private final Table table; + private final Entity entity; /** * Creates a TableEntryImpl. */ - public TableEntryImpl( PolyphenyDbSchema schema, String name, Table table ) { + public TableEntryImpl( PolyphenyDbSchema schema, String name, Entity entity ) { super( schema, name ); - this.table = Objects.requireNonNull( table ); + this.entity = Objects.requireNonNull( entity ); } @Override - public Table getTable() { - return table; + public Entity getTable() { + return entity; } } @@ -242,10 +243,10 @@ public Function getFunction() { /** * Schema that has no parents. */ - class RootSchema extends AbstractSchema { + class RootSchema extends AbstractNamespace implements Schema { RootSchema() { - super(); + super( -1L ); } diff --git a/core/src/main/java/org/polypheny/db/schema/ProjectableFilterableTable.java b/core/src/main/java/org/polypheny/db/schema/ProjectableFilterableEntity.java similarity index 96% rename from core/src/main/java/org/polypheny/db/schema/ProjectableFilterableTable.java rename to core/src/main/java/org/polypheny/db/schema/ProjectableFilterableEntity.java index 7c2acdbb07..70de568ce9 100644 --- a/core/src/main/java/org/polypheny/db/schema/ProjectableFilterableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/ProjectableFilterableEntity.java @@ -46,10 +46,10 @@ * * If you wish to write a table that can apply projects but not filters, simply decline all filters. * - * @see ScannableTable - * @see FilterableTable + * @see ScannableEntity + * @see FilterableEntity */ -public interface ProjectableFilterableTable extends Table { +public interface ProjectableFilterableEntity extends Entity { /** * Returns an enumerable over the rows in this Table. diff --git a/core/src/main/java/org/polypheny/db/schema/QueryableTable.java b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java similarity index 94% rename from core/src/main/java/org/polypheny/db/schema/QueryableTable.java rename to core/src/main/java/org/polypheny/db/schema/QueryableEntity.java index 272e15a77a..6f3c3e3d58 100644 --- a/core/src/main/java/org/polypheny/db/schema/QueryableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java @@ -41,9 +41,9 @@ /** - * Extension to {@link Table} that can translate itself to a {@link Queryable}. + * Extension to {@link Entity} that can translate itself to a {@link Queryable}. */ -public interface QueryableTable extends Table { +public interface QueryableEntity extends Entity { /** * Converts this table into a {@link Queryable}. diff --git a/core/src/main/java/org/polypheny/db/schema/ScannableTable.java b/core/src/main/java/org/polypheny/db/schema/ScannableEntity.java similarity index 97% rename from core/src/main/java/org/polypheny/db/schema/ScannableTable.java rename to core/src/main/java/org/polypheny/db/schema/ScannableEntity.java index 801cb1cdda..eaf094399a 100644 --- a/core/src/main/java/org/polypheny/db/schema/ScannableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/ScannableEntity.java @@ -41,7 +41,7 @@ /** * Table that can be scanned without creating an intermediate relational expression. */ -public interface ScannableTable extends Table { +public interface ScannableEntity extends Entity { /** * Returns an enumerator over the rows in this Table. Each row is represented as an array of its column values. diff --git a/core/src/main/java/org/polypheny/db/schema/SchemaPlus.java b/core/src/main/java/org/polypheny/db/schema/SchemaPlus.java index 9b29dfb59d..b25861942e 100644 --- a/core/src/main/java/org/polypheny/db/schema/SchemaPlus.java +++ b/core/src/main/java/org/polypheny/db/schema/SchemaPlus.java @@ -36,12 +36,13 @@ import com.google.common.collect.ImmutableList; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.schema.Namespace.Schema; /** - * Extension to the {@link Schema} interface. + * Extension to the {@link Namespace} interface. * - * Given a user-defined schema that implements the {@link Schema} interface, Polypheny-DB creates a wrapper that implements + * Given a user-defined schema that implements the {@link Namespace} interface, Polypheny-DB creates a wrapper that implements * the {@code SchemaPlus} interface. This provides extra functionality, such as access to tables that have been added explicitly. * * A user-defined schema does not need to implement this interface, but by the time a schema is passed to a method in a @@ -51,7 +52,7 @@ * given by the system. The purpose of SchemaPlus is to expose to user code, in a read only manner, some of the extra * information about schemas that Polypheny-DB builds up when a schema is registered. */ -public interface SchemaPlus extends Schema { +public interface SchemaPlus extends Namespace, Schema { PolyphenyDbSchema polyphenyDbSchema(); @@ -71,17 +72,17 @@ public interface SchemaPlus extends Schema { // override with stricter return @Override - SchemaPlus getSubSchema( String name ); + SchemaPlus getSubNamespace( String name ); /** * Adds a schema as a sub-schema of this schema, and returns the wrapped object. */ - SchemaPlus add( String name, Schema schema, NamespaceType namespaceType ); + SchemaPlus add( String name, Namespace namespace, NamespaceType namespaceType ); /** * Adds a table to this schema. */ - void add( String name, Table table ); + void add( String name, Entity entity ); /** * Adds a function to this schema. diff --git a/core/src/main/java/org/polypheny/db/schema/SchemaVersion.java b/core/src/main/java/org/polypheny/db/schema/SchemaVersion.java index eb91cd02e6..a6c993bc0b 100644 --- a/core/src/main/java/org/polypheny/db/schema/SchemaVersion.java +++ b/core/src/main/java/org/polypheny/db/schema/SchemaVersion.java @@ -44,7 +44,7 @@ * * Implementation classes of this interface must also override equals(Object), hashCode() and toString(). * - * @see Schema#snapshot(SchemaVersion) + * @see Namespace#snapshot(SchemaVersion) */ public interface SchemaVersion { diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 29f478707c..c57047a5f4 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -37,6 +37,15 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import java.lang.reflect.Type; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.Queryable; @@ -62,9 +71,6 @@ import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.Pair; -import java.lang.reflect.Type; -import java.util.*; - /** * Utility functions for schemas. @@ -136,7 +142,7 @@ public static Expression subSchemaExpression( SchemaPlus schema, String name, Cl Expressions.constant( name ) ); //CHECKSTYLE: IGNORE 2 //noinspection unchecked - if ( false && type != null && !type.isAssignableFrom( Schema.class ) ) { + if ( false && type != null && !type.isAssignableFrom( Namespace.class ) ) { return unwrap( call, type ); } return call; @@ -156,27 +162,27 @@ public static Expression unwrap( Expression call, Class type ) { */ public static Expression tableExpression( SchemaPlus schema, Type elementType, String tableName, Class clazz ) { final MethodCallExpression expression; - if ( Table.class.isAssignableFrom( clazz ) ) { + if ( Entity.class.isAssignableFrom( clazz ) ) { expression = Expressions.call( expression( schema ), BuiltInMethod.SCHEMA_GET_TABLE.method, Expressions.constant( tableName ) ); - if ( ScannableTable.class.isAssignableFrom( clazz ) ) { + if ( ScannableEntity.class.isAssignableFrom( clazz ) ) { return Expressions.call( BuiltInMethod.SCHEMAS_ENUMERABLE_SCANNABLE.method, - Expressions.convert_( expression, ScannableTable.class ), + Expressions.convert_( expression, ScannableEntity.class ), DataContext.ROOT ); } - if ( FilterableTable.class.isAssignableFrom( clazz ) ) { + if ( FilterableEntity.class.isAssignableFrom( clazz ) ) { return Expressions.call( BuiltInMethod.SCHEMAS_ENUMERABLE_FILTERABLE.method, - Expressions.convert_( expression, FilterableTable.class ), + Expressions.convert_( expression, FilterableEntity.class ), DataContext.ROOT ); } - if ( ProjectableFilterableTable.class.isAssignableFrom( clazz ) ) { + if ( ProjectableFilterableEntity.class.isAssignableFrom( clazz ) ) { return Expressions.call( BuiltInMethod.SCHEMAS_ENUMERABLE_PROJECTABLE_FILTERABLE.method, - Expressions.convert_( expression, ProjectableFilterableTable.class ), + Expressions.convert_( expression, ProjectableFilterableEntity.class ), DataContext.ROOT ); } } else { @@ -212,7 +218,7 @@ public static Queryable queryable( DataContext root, Class clazz, Iter for ( Iterator iterator = names.iterator(); ; ) { String name = iterator.next(); if ( iterator.hasNext() ) { - schema = schema.getSubSchema( name ); + schema = schema.getSubNamespace( name ); } else { return queryable( root, schema, clazz, name ); } @@ -224,13 +230,13 @@ public static Queryable queryable( DataContext root, Class clazz, Iter * Returns a {@link Queryable}, given a schema and table name. */ public static Queryable queryable( DataContext root, SchemaPlus schema, Class clazz, String tableName ) { - QueryableTable table = (QueryableTable) schema.getTable( tableName ); + QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); return table.asQueryable( root, schema, tableName ); } public static Queryable graph( DataContext root, SchemaPlus schema ) { - QueryableGraph graph = (QueryableGraph) schema.polyphenyDbSchema().getSchema(); + QueryableGraph graph = (QueryableGraph) schema.polyphenyDbSchema().getNamespace(); return graph.asQueryable( root, graph ); } @@ -238,7 +244,7 @@ public static Queryable graph( DataContext root, SchemaPlus schema ) { /** * Returns an {@link org.apache.calcite.linq4j.Enumerable} over the rows of a given table, representing each row as an object array. */ - public static Enumerable enumerable( final ScannableTable table, final DataContext root ) { + public static Enumerable enumerable( final ScannableEntity table, final DataContext root ) { return table.scan( root ); } @@ -246,7 +252,7 @@ public static Enumerable enumerable( final ScannableTable table, final /** * Returns an {@link org.apache.calcite.linq4j.Enumerable} over the rows of a given table, not applying any filters, representing each row as an object array. */ - public static Enumerable enumerable( final FilterableTable table, final DataContext root ) { + public static Enumerable enumerable( final FilterableEntity table, final DataContext root ) { return table.scan( root, ImmutableList.of() ); } @@ -254,7 +260,7 @@ public static Enumerable enumerable( final FilterableTable table, fina /** * Returns an {@link org.apache.calcite.linq4j.Enumerable} over the rows of a given table, not applying any filters and projecting all columns, representing each row as an object array. */ - public static Enumerable enumerable( final ProjectableFilterableTable table, final DataContext root ) { + public static Enumerable enumerable( final ProjectableFilterableEntity table, final DataContext root ) { return table.scan( root, ImmutableList.of(), identity( table.getRowType( root.getTypeFactory() ).getFieldCount() ) ); } @@ -269,17 +275,17 @@ private static int[] identity( int count ) { /** - * Returns an {@link org.apache.calcite.linq4j.Enumerable} over object arrays, given a fully-qualified table name which leads to a {@link ScannableTable}. + * Returns an {@link org.apache.calcite.linq4j.Enumerable} over object arrays, given a fully-qualified table name which leads to a {@link ScannableEntity}. */ - public static Table table( DataContext root, String... names ) { + public static Entity table( DataContext root, String... names ) { SchemaPlus schema = root.getRootSchema(); final List nameList = Arrays.asList( names ); for ( Iterator iterator = nameList.iterator(); ; ) { String name = iterator.next(); if ( iterator.hasNext() ) { - schema = schema.getSubSchema( name ); + schema = schema.getSubNamespace( name ); } else { - return schema.getTable( name ); + return schema.getEntity( name ); } } } @@ -418,8 +424,8 @@ public int getCurrentUserId() { /** * Returns an implementation of {@link AlgProtoDataType} that asks a given table for its row type with a given type factory. */ - public static AlgProtoDataType proto( final Table table ) { - return table::getRowType; + public static AlgProtoDataType proto( final Entity entity ) { + return entity::getRowType; } @@ -441,7 +447,7 @@ public static PolyphenyDbSchema subSchema( PolyphenyDbSchema schema, Iterable names ) { - final ImmutableList.Builder> builder = ImmutableList.builder(); - Schema schema = rootSchema.plus(); + final ImmutableList.Builder> builder = ImmutableList.builder(); + Namespace namespace = rootSchema.plus(); final Iterator iterator = names.iterator(); if ( !iterator.hasNext() ) { return PathImpl.EMPTY; @@ -474,16 +480,16 @@ public static Path path( PolyphenyDbSchema rootSchema, Iterable names ) } for ( ; ; ) { final String name = iterator.next(); - builder.add( Pair.of( name, schema ) ); + builder.add( Pair.of( name, namespace ) ); if ( !iterator.hasNext() ) { return path( builder.build() ); } - schema = schema.getSubSchema( name ); + namespace = namespace.getSubNamespace( name ); } } - public static PathImpl path( ImmutableList> build ) { + public static PathImpl path( ImmutableList> build ) { return new PathImpl( build ); } @@ -492,7 +498,7 @@ public static PathImpl path( ImmutableList> build ) { * Returns the path to get to a schema from its root. */ public static Path path( SchemaPlus schema ) { - List> list = new ArrayList<>(); + List> list = new ArrayList<>(); for ( SchemaPlus s = schema; s != null; s = s.getParentSchema() ) { list.add( Pair.of( s.getName(), s ) ); } @@ -593,14 +599,14 @@ public void setParameterTypes( Map types ) { /** * Implementation of {@link Path}. */ - private static class PathImpl extends AbstractList> implements Path { + private static class PathImpl extends AbstractList> implements Path { - private final ImmutableList> pairs; + private final ImmutableList> pairs; private static final PathImpl EMPTY = new PathImpl( ImmutableList.of() ); - PathImpl( ImmutableList> pairs ) { + PathImpl( ImmutableList> pairs ) { this.pairs = pairs; } @@ -620,7 +626,7 @@ public int hashCode() { @Override - public Pair get( int index ) { + public Pair get( int index ) { return pairs.get( index ); } @@ -658,7 +664,7 @@ public int size() { @Override - public List schemas() { + public List schemas() { return Pair.right( pairs ); } diff --git a/core/src/main/java/org/polypheny/db/schema/SimplePolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/SimplePolyphenyDbSchema.java index a4a7295e98..49e3b46ef5 100644 --- a/core/src/main/java/org/polypheny/db/schema/SimplePolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/SimplePolyphenyDbSchema.java @@ -37,12 +37,12 @@ class SimplePolyphenyDbSchema extends AbstractPolyphenyDbSchema { /** * Creates a SimplePolyphenyDbSchema. * - * Use {@link AbstractPolyphenyDbSchema#createRootSchema(String)} or {@link #add(String, Schema, NamespaceType)}. + * Use {@link AbstractPolyphenyDbSchema#createRootSchema(String)} or {@link #add(String, Namespace, NamespaceType)}. */ - SimplePolyphenyDbSchema( AbstractPolyphenyDbSchema parent, Schema schema, String name, NamespaceType namespaceType, boolean caseSensitive ) { + SimplePolyphenyDbSchema( AbstractPolyphenyDbSchema parent, Namespace namespace, String name, NamespaceType namespaceType, boolean caseSensitive ) { this( parent, - schema, + namespace, name, namespaceType, caseSensitive, @@ -58,7 +58,7 @@ class SimplePolyphenyDbSchema extends AbstractPolyphenyDbSchema { private SimplePolyphenyDbSchema( AbstractPolyphenyDbSchema parent, - Schema schema, + Namespace namespace, String name, NamespaceType namespaceType, boolean caseSensitive, @@ -69,7 +69,7 @@ private SimplePolyphenyDbSchema( NameSet functionNames, NameMap nullaryFunctionMap, List> path ) { - super( parent, schema, name, namespaceType, caseSensitive, subSchemaMap, tableMap, typeMap, functionMap, functionNames, nullaryFunctionMap, path ); + super( parent, namespace, name, namespaceType, caseSensitive, subSchemaMap, tableMap, typeMap, functionMap, functionNames, nullaryFunctionMap, path ); } @@ -80,8 +80,8 @@ public void setCache( boolean cache ) { @Override - public PolyphenyDbSchema add( String name, Schema schema, NamespaceType namespaceType ) { - final PolyphenyDbSchema polyphenyDbSchema = new SimplePolyphenyDbSchema( this, schema, name, namespaceType, false ); + public PolyphenyDbSchema add( String name, Namespace namespace, NamespaceType namespaceType ) { + final PolyphenyDbSchema polyphenyDbSchema = new SimplePolyphenyDbSchema( this, namespace, name, namespaceType, false ); subSchemaMap.put( name, polyphenyDbSchema ); return polyphenyDbSchema; } @@ -90,7 +90,7 @@ public PolyphenyDbSchema add( String name, Schema schema, NamespaceType namespac @Override protected AbstractPolyphenyDbSchema getImplicitSubSchema( String schemaName, boolean caseSensitive ) { // Check implicit schemas. - Schema s = schema.getSubSchema( schemaName ); + Namespace s = namespace.getSubNamespace( schemaName ); if ( s != null ) { return new SimplePolyphenyDbSchema( this, s, schemaName, namespaceType, false ); } @@ -101,9 +101,9 @@ protected AbstractPolyphenyDbSchema getImplicitSubSchema( String schemaName, boo @Override protected TableEntry getImplicitTable( String tableName ) { // Check implicit tables. - Table table = schema.getTable( tableName ); - if ( table != null ) { - return tableEntry( tableName, table ); + Entity entity = namespace.getEntity( tableName ); + if ( entity != null ) { + return tableEntry( tableName, entity ); } return null; } @@ -112,7 +112,7 @@ protected TableEntry getImplicitTable( String tableName ) { @Override protected TypeEntry getImplicitType( String name, boolean caseSensitive ) { // Check implicit types. - AlgProtoDataType type = schema.getType( name ); + AlgProtoDataType type = namespace.getType( name ); if ( type != null ) { return typeEntry( name, type ); } @@ -123,12 +123,12 @@ protected TypeEntry getImplicitType( String name, boolean caseSensitive ) { @Override protected void addImplicitSubSchemaToBuilder( ImmutableSortedMap.Builder builder ) { ImmutableSortedMap explicitSubSchemas = builder.build(); - for ( String schemaName : schema.getSubSchemaNames() ) { + for ( String schemaName : namespace.getSubNamespaceNames() ) { if ( explicitSubSchemas.containsKey( schemaName ) ) { // explicit subschema wins. continue; } - Schema s = schema.getSubSchema( schemaName ); + Namespace s = namespace.getSubNamespace( schemaName ); if ( s != null ) { PolyphenyDbSchema polyphenyDbSchema = new SimplePolyphenyDbSchema( this, s, schemaName, namespaceType, false ); builder.put( schemaName, polyphenyDbSchema ); @@ -139,13 +139,13 @@ protected void addImplicitSubSchemaToBuilder( ImmutableSortedMap.Builder builder ) { - builder.addAll( schema.getTableNames() ); + builder.addAll( namespace.getEntityNames() ); } @Override protected void addImplicitFunctionsToBuilder( ImmutableList.Builder builder, String name, boolean caseSensitive ) { - Collection functions = schema.getFunctions( name ); + Collection functions = namespace.getFunctions( name ); if ( functions != null ) { builder.addAll( functions ); } @@ -154,29 +154,29 @@ protected void addImplicitFunctionsToBuilder( ImmutableList.Builder bu @Override protected void addImplicitFuncNamesToBuilder( ImmutableSortedSet.Builder builder ) { - builder.addAll( schema.getFunctionNames() ); + builder.addAll( namespace.getFunctionNames() ); } @Override protected void addImplicitTypeNamesToBuilder( ImmutableSortedSet.Builder builder ) { - builder.addAll( schema.getTypeNames() ); + builder.addAll( namespace.getTypeNames() ); } @Override - protected void addImplicitTablesBasedOnNullaryFunctionsToBuilder( ImmutableSortedMap.Builder builder ) { - ImmutableSortedMap explicitTables = builder.build(); + protected void addImplicitTablesBasedOnNullaryFunctionsToBuilder( ImmutableSortedMap.Builder builder ) { + ImmutableSortedMap explicitTables = builder.build(); - for ( String s : schema.getFunctionNames() ) { + for ( String s : namespace.getFunctionNames() ) { // explicit table wins. if ( explicitTables.containsKey( s ) ) { continue; } - for ( Function function : schema.getFunctions( s ) ) { + for ( Function function : namespace.getFunctions( s ) ) { if ( function instanceof TableMacro && function.getParameters().isEmpty() ) { - final Table table = ((TableMacro) function).apply( ImmutableList.of() ); - builder.put( s, table ); + final Entity entity = ((TableMacro) function).apply( ImmutableList.of() ); + builder.put( s, entity ); } } } @@ -185,12 +185,12 @@ protected void addImplicitTablesBasedOnNullaryFunctionsToBuilder( ImmutableSorte @Override protected TableEntry getImplicitTableBasedOnNullaryFunction( String tableName, boolean caseSensitive ) { - Collection functions = schema.getFunctions( tableName ); + Collection functions = namespace.getFunctions( tableName ); if ( functions != null ) { for ( Function function : functions ) { if ( function instanceof TableMacro && function.getParameters().isEmpty() ) { - final Table table = ((TableMacro) function).apply( ImmutableList.of() ); - return tableEntry( tableName, table ); + final Entity entity = ((TableMacro) function).apply( ImmutableList.of() ); + return tableEntry( tableName, entity ); } } } @@ -202,7 +202,7 @@ protected TableEntry getImplicitTableBasedOnNullaryFunction( String tableName, b protected PolyphenyDbSchema snapshot( AbstractPolyphenyDbSchema parent, SchemaVersion version ) { AbstractPolyphenyDbSchema snapshot = new SimplePolyphenyDbSchema( parent, - schema.snapshot( version ), + namespace.snapshot( version ), name, namespaceType, isCaseSensitive(), diff --git a/core/src/main/java/org/polypheny/db/schema/Statistic.java b/core/src/main/java/org/polypheny/db/schema/Statistic.java index 966d55eff1..62d02205f5 100644 --- a/core/src/main/java/org/polypheny/db/schema/Statistic.java +++ b/core/src/main/java/org/polypheny/db/schema/Statistic.java @@ -42,7 +42,7 @@ /** - * Statistics about a {@link Table}. + * Statistics about a {@link Entity}. * * Each of the methods may return {@code null} meaning "not known". * diff --git a/core/src/main/java/org/polypheny/db/schema/StreamableTable.java b/core/src/main/java/org/polypheny/db/schema/StreamableEntity.java similarity index 96% rename from core/src/main/java/org/polypheny/db/schema/StreamableTable.java rename to core/src/main/java/org/polypheny/db/schema/StreamableEntity.java index 1784188f09..86a437b601 100644 --- a/core/src/main/java/org/polypheny/db/schema/StreamableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/StreamableEntity.java @@ -42,12 +42,12 @@ * * @see Delta */ -public interface StreamableTable extends Table { +public interface StreamableEntity extends Entity { /** * Returns an enumerator over the rows in this Table. Each row is represented as an array of its column values. */ - Table stream(); + Entity stream(); } diff --git a/core/src/main/java/org/polypheny/db/schema/TableFactory.java b/core/src/main/java/org/polypheny/db/schema/TableFactory.java index f40487c79f..03bc0745c5 100644 --- a/core/src/main/java/org/polypheny/db/schema/TableFactory.java +++ b/core/src/main/java/org/polypheny/db/schema/TableFactory.java @@ -39,7 +39,7 @@ /** - * Factory for {@link Table} objects. + * Factory for {@link Entity} objects. * * A table factory allows you to include custom tables in a model file. * For example, here is a model that contains a custom table that generates a range of integers. @@ -87,7 +87,7 @@ * * @param Sub-type of table created by this factory */ -public interface TableFactory { +public interface TableFactory { /** * Creates a Table. diff --git a/core/src/main/java/org/polypheny/db/schema/TableMacro.java b/core/src/main/java/org/polypheny/db/schema/TableMacro.java index 2b37caf3f3..52ca96187b 100644 --- a/core/src/main/java/org/polypheny/db/schema/TableMacro.java +++ b/core/src/main/java/org/polypheny/db/schema/TableMacro.java @@ -38,7 +38,7 @@ /** - * Function that returns a {@link Table}. + * Function that returns a {@link Entity}. * * As the name "macro" implies, this is invoked at "compile time", that is, during query preparation. Compile-time expansion * of table expressions allows for some very powerful query-optimizations. @@ -51,7 +51,7 @@ public interface TableMacro extends Function { * @param arguments Arguments * @return Table */ - TranslatableTable apply( List arguments ); + TranslatableEntity apply( List arguments ); } diff --git a/core/src/main/java/org/polypheny/db/schema/TableType.java b/core/src/main/java/org/polypheny/db/schema/TableType.java new file mode 100644 index 0000000000..d021f91eb4 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schema/TableType.java @@ -0,0 +1,241 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schema; + +/** + * Table type. + */ +public enum TableType { + /** + * A regular table. + * + * Used by DB2, MySQL, PostgreSQL and others. + */ + TABLE, + + /** + * A relation whose contents are calculated by evaluating a SQL expression. + * + * Used by DB2, PostgreSQL and others. + */ + VIEW, + + /** + * Foreign table. + * + * Used by PostgreSQL. + */ + FOREIGN_TABLE, + + /** + * Index table. + * + * Used by Apache Phoenix, PostgreSQL. + */ + INDEX, + + /** + * Join table. + * + * Used by Apache Phoenix. + */ + JOIN, + + /** + * Sequence table. + * + * Used by Apache Phoenix, Oracle, PostgreSQL and others. + * In Phoenix, must have a single BIGINT column called "$seq". + */ + SEQUENCE, + + /** + * Stream. + */ + STREAM, + + /** + * Type. + * + * Used by PostgreSQL. + */ + TYPE, + + /** + * A table maintained by the system. Data dictionary tables, such as the "TABLES" and "COLUMNS" table in the "metamodel" schema, examples of system tables. + * + * Specified by the JDBC standard and used by DB2, MySQL, Oracle, PostgreSQL and others. + */ + SYSTEM_TABLE, + + /** + * System view. + * + * Used by PostgreSQL, MySQL. + */ + SYSTEM_VIEW, + + /** + * System index. + * + * Used by PostgreSQL. + */ + SYSTEM_INDEX, + + /** + * System TOAST index. + * + * Used by PostgreSQL. + */ + SYSTEM_TOAST_INDEX, + + /** + * System TOAST table. + * + * Used by PostgreSQL. + */ + SYSTEM_TOAST_TABLE, + + /** + * Temporary index. + * + * Used by PostgreSQL. + */ + TEMPORARY_INDEX, + + /** + * Temporary sequence. + * + * Used by PostgreSQL. + */ + TEMPORARY_SEQUENCE, + + /** + * Temporary table. + * + * Used by PostgreSQL. + */ + TEMPORARY_TABLE, + + /** + * Temporary view. + * + * Used by PostgreSQL. + */ + TEMPORARY_VIEW, + + /** + * A table that is only visible to one connection. + * + * Specified by the JDBC standard and used by PostgreSQL, MySQL. + */ + LOCAL_TEMPORARY, + + /** + * A synonym. + * + * Used by DB2, Oracle. + */ + SYNONYM, + + /** + * An alias. + * + * Specified by the JDBC standard. + */ + ALIAS, + + /** + * A global temporary table. + * + * Specified by the JDBC standard. + */ + GLOBAL_TEMPORARY, + + /** + * An accel-only table. + * + * Used by DB2. + */ + ACCEL_ONLY_TABLE, + + /** + * An auxiliary table. + * + * Used by DB2. + */ + AUXILIARY_TABLE, + + /** + * A global temporary table. + * + * Used by DB2. + */ + GLOBAL_TEMPORARY_TABLE, + + /** + * A hierarchy table. + * + * Used by DB2. + */ + HIERARCHY_TABLE, + + /** + * An inoperative view. + * + * Used by DB2. + */ + INOPERATIVE_VIEW, + + /** + * A nickname. + * + * Used by DB2. + */ + NICKNAME, + + /** + * A typed table. + * + * Used by DB2. + */ + TYPED_TABLE, + + /** + * A typed view. + * + * Used by DB2. + */ + TYPED_VIEW, + + /** + * Table type not known to Polypheny-DB. + * + * If you get one of these, please fix the problem by adding an enum value. + */ + OTHER; + + /** + * The name used in JDBC. For example "SYSTEM TABLE" rather than "SYSTEM_TABLE". + */ + public final String jdbcName; + + + TableType() { + this.jdbcName = name().replace( '_', ' ' ); + } +} diff --git a/core/src/main/java/org/polypheny/db/schema/TranslatableTable.java b/core/src/main/java/org/polypheny/db/schema/TranslatableEntity.java similarity index 93% rename from core/src/main/java/org/polypheny/db/schema/TranslatableTable.java rename to core/src/main/java/org/polypheny/db/schema/TranslatableEntity.java index 06bb6f2054..6e8128e075 100644 --- a/core/src/main/java/org/polypheny/db/schema/TranslatableTable.java +++ b/core/src/main/java/org/polypheny/db/schema/TranslatableEntity.java @@ -42,13 +42,13 @@ /** - * Extension to {@link Table} that specifies how it is to be translated to a {@link AlgNode relational expression}. + * Extension to {@link Entity} that specifies how it is to be translated to a {@link AlgNode relational expression}. * * It is optional for a Table to implement this interface. If Table does not implement this interface, it will be converted * to a {@link EnumerableScan}. Generally a Table will implement this interface to create a particular subclass of AlgNode, * and also register rules that act on that particular subclass of AlgNode. */ -public interface TranslatableTable extends Table { +public interface TranslatableEntity extends Entity { /** * Converts this table into a {@link AlgNode relational expression}. diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTable.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractEntity.java similarity index 84% rename from core/src/main/java/org/polypheny/db/schema/impl/AbstractTable.java rename to core/src/main/java/org/polypheny/db/schema/impl/AbstractEntity.java index 262e0503f8..f8a4a01df5 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTable.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractEntity.java @@ -39,37 +39,36 @@ import org.polypheny.db.StatisticsManager; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.Schema.TableType; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.TableType; import org.polypheny.db.schema.Wrapper; /** - * Abstract base class for implementing {@link Table}. + * Abstract base class for implementing {@link Entity}. * - * Sub-classes should override {@link #isRolledUp} and {@link Table#rolledUpColumnValidInsideAgg(String, Call, Node)} if + * Sub-classes should override {@link #isRolledUp} and {@link Entity#rolledUpColumnValidInsideAgg(String, Call, Node)} if * their table can potentially contain rolled up values. This information is used by the validator to check for illegal uses * of these columns. */ @Getter -public abstract class AbstractTable implements Table, Wrapper { +public abstract class AbstractEntity implements Entity, Wrapper { @Getter - protected Long tableId; + protected Long id; protected Statistic statistic = Statistics.UNKNOWN; - protected AbstractTable() { + protected AbstractEntity() { } @Override public TableType getJdbcTableType() { - return Schema.TableType.TABLE; + return TableType.TABLE; } @@ -96,10 +95,10 @@ public boolean rolledUpColumnValidInsideAgg( String column, Call call, Node pare @Override public Statistic getStatistic() { - if ( tableId == null ) { + if ( id == null ) { return Statistics.UNKNOWN; } - Integer rowCount = StatisticsManager.getInstance().rowCountPerTable( tableId ); + Integer rowCount = StatisticsManager.getInstance().rowCountPerTable( id ); if ( rowCount == null ) { return Statistics.UNKNOWN; diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractSchema.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java similarity index 84% rename from core/src/main/java/org/polypheny/db/schema/impl/AbstractSchema.java rename to core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java index d8e5a47bca..659db3a713 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java @@ -40,18 +40,19 @@ import java.util.Collection; import java.util.Map; import java.util.Set; +import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.Table; /** - * Abstract implementation of {@link Schema}. + * Abstract implementation of {@link Namespace}. * *

    Behavior is as follows:

    *
      @@ -62,9 +63,14 @@ *
    • The name and parent schema are as specified in the constructor arguments.
    • *
    */ -public class AbstractSchema implements Schema { +public class AbstractNamespace implements Namespace { - public AbstractSchema() { + @Getter + public final long id; + + + public AbstractNamespace( long id ) { + this.id = id; } @@ -75,7 +81,7 @@ public boolean isMutable() { @Override - public Schema snapshot( SchemaVersion version ) { + public Namespace snapshot( SchemaVersion version ) { return this; } @@ -89,25 +95,25 @@ public Expression getExpression( SchemaPlus parentSchema, String name ) { /** * Returns a map of tables in this schema by name. * - * The implementations of {@link #getTableNames()} and {@link #getTable(String)} depend on this map. + * The implementations of {@link #getEntityNames()} and {@link #getEntity(String)} depend on this map. * The default implementation of this method returns the empty map. * Override this method to change their behavior. * * @return Map of tables in this schema by name */ - protected Map getTableMap() { + protected Map getTableMap() { return ImmutableMap.of(); } @Override - public final Set getTableNames() { + public final Set getEntityNames() { return getTableMap().keySet(); } @Override - public final Table getTable( String name ) { + public final Entity getEntity( String name ) { return getTableMap().get( name ); } @@ -143,7 +149,7 @@ public Set getTypeNames() { * It is a multi-map because functions are overloaded; there may be more than one function in a schema with a given * name (as long as they have different parameter lists). * - * The implementations of {@link #getFunctionNames()} and {@link Schema#getFunctions(String)} depend on this map. + * The implementations of {@link #getFunctionNames()} and {@link Namespace#getFunctions(String)} depend on this map. * The default implementation of this method returns the empty multi-map. * Override this method to change their behavior. * @@ -169,25 +175,25 @@ public final Set getFunctionNames() { /** * Returns a map of sub-schemas in this schema by name. * - * The implementations of {@link #getSubSchemaNames()} and {@link #getSubSchema(String)} depend on this map. + * The implementations of {@link #getSubNamespaceNames()} and {@link #getSubNamespace(String)} depend on this map. * The default implementation of this method returns the empty map. * Override this method to change their behavior. * * @return Map of sub-schemas in this schema by name */ - protected Map getSubSchemaMap() { + protected Map getSubSchemaMap() { return ImmutableMap.of(); } @Override - public final Set getSubSchemaNames() { + public final Set getSubNamespaceNames() { return getSubSchemaMap().keySet(); } @Override - public final Schema getSubSchema( String name ) { + public final Namespace getSubNamespace( String name ) { return getSubSchemaMap().get( name ); } diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java index f09181e6a7..d5b00b5cfe 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java @@ -42,15 +42,15 @@ import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableTable; -import org.polypheny.db.schema.QueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; +import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.SchemaPlus; /** - * Abstract implementation of {@link org.apache.calcite.linq4j.Queryable} for {@link QueryableTable}. + * Abstract implementation of {@link org.apache.calcite.linq4j.Queryable} for {@link QueryableEntity}. * - * Not to be confused with {@link AbstractQueryableTable}. + * Not to be confused with {@link AbstractQueryableEntity}. * * @param element type */ @@ -58,11 +58,11 @@ public abstract class AbstractTableQueryable extends AbstractQueryable { public final DataContext dataContext; public final SchemaPlus schema; - public final QueryableTable table; + public final QueryableEntity table; public final String tableName; - public AbstractTableQueryable( DataContext dataContext, SchemaPlus schema, QueryableTable table, String tableName ) { + public AbstractTableQueryable( DataContext dataContext, SchemaPlus schema, QueryableEntity table, String tableName ) { this.dataContext = dataContext; this.schema = schema; this.table = table; diff --git a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingSchema.java b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java similarity index 65% rename from core/src/main/java/org/polypheny/db/schema/impl/DelegatingSchema.java rename to core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java index 1dc01bdaf9..bc23b9c4c9 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java @@ -38,100 +38,106 @@ import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; -import org.polypheny.db.schema.Table; /** - * Implementation of {@link Schema} that delegates to an underlying schema. + * Implementation of {@link Namespace} that delegates to an underlying schema. */ -public class DelegatingSchema implements Schema { +public class DelegatingNamespace implements Namespace { - protected final Schema schema; + protected final Namespace namespace; /** * Creates a DelegatingSchema. * - * @param schema Underlying schema + * @param namespace Underlying schema */ - public DelegatingSchema( Schema schema ) { - this.schema = schema; + public DelegatingNamespace( Namespace namespace ) { + this.namespace = namespace; } @Override public String toString() { - return "DelegatingSchema(delegate=" + schema + ")"; + return "DelegatingSchema(delegate=" + namespace + ")"; } @Override public boolean isMutable() { - return schema.isMutable(); + return namespace.isMutable(); } @Override - public Schema snapshot( SchemaVersion version ) { - return schema.snapshot( version ); + public Namespace snapshot( SchemaVersion version ) { + return namespace.snapshot( version ); } @Override public Expression getExpression( SchemaPlus parentSchema, String name ) { - return schema.getExpression( parentSchema, name ); + return namespace.getExpression( parentSchema, name ); } @Override - public Table getTable( String name ) { - return schema.getTable( name ); + public Entity getEntity( String name ) { + return namespace.getEntity( name ); } @Override - public Set getTableNames() { - return schema.getTableNames(); + public Set getEntityNames() { + return namespace.getEntityNames(); } @Override public AlgProtoDataType getType( String name ) { - return schema.getType( name ); + return namespace.getType( name ); } @Override public Set getTypeNames() { - return schema.getTypeNames(); + return namespace.getTypeNames(); } @Override public Collection getFunctions( String name ) { - return schema.getFunctions( name ); + return namespace.getFunctions( name ); } @Override public Set getFunctionNames() { - return schema.getFunctionNames(); + return namespace.getFunctionNames(); } @Override - public Schema getSubSchema( String name ) { - return schema.getSubSchema( name ); + public long getId() { + return namespace.getId(); } @Override - public Set getSubSchemaNames() { - return schema.getSubSchemaNames(); + public Namespace getSubNamespace( String name ) { + return namespace.getSubNamespace( name ); + } + + + @Override + public Set getSubNamespaceNames() { + return namespace.getSubNamespaceNames(); } } diff --git a/core/src/main/java/org/polypheny/db/schema/impl/TableFunctionImpl.java b/core/src/main/java/org/polypheny/db/schema/impl/TableFunctionImpl.java index 095bae96cc..1a78245389 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/TableFunctionImpl.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/TableFunctionImpl.java @@ -52,11 +52,11 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.rex.RexCall; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ImplementableFunction; -import org.polypheny.db.schema.QueryableTable; -import org.polypheny.db.schema.ScannableTable; +import org.polypheny.db.schema.QueryableEntity; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; import org.polypheny.db.schema.TableFunction; import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.Static; @@ -110,7 +110,7 @@ public static TableFunction create( final Method method ) { } } final Class returnType = method.getReturnType(); - if ( !QueryableTable.class.isAssignableFrom( returnType ) && !ScannableTable.class.isAssignableFrom( returnType ) ) { + if ( !QueryableEntity.class.isAssignableFrom( returnType ) && !ScannableEntity.class.isAssignableFrom( returnType ) ) { return null; } CallImplementor implementor = createImplementor( method ); @@ -126,14 +126,14 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory, List argu @Override public Type getElementType( List arguments ) { - final Table table = apply( arguments ); - if ( table instanceof QueryableTable ) { - QueryableTable queryableTable = (QueryableTable) table; + final Entity entity = apply( arguments ); + if ( entity instanceof QueryableEntity ) { + QueryableEntity queryableTable = (QueryableEntity) entity; return queryableTable.getElementType(); - } else if ( table instanceof ScannableTable ) { + } else if ( entity instanceof ScannableEntity ) { return Object[].class; } - throw new AssertionError( "Invalid table class: " + table + " " + table.getClass() ); + throw new AssertionError( "Invalid table class: " + entity + " " + entity.getClass() ); } @@ -150,9 +150,9 @@ private static CallImplementor createImplementor( final Method method ) { public Expression implement( RexToLixTranslator translator, RexCall call, List translatedOperands ) { Expression expr = super.implement( translator, call, translatedOperands ); final Class returnType = method.getReturnType(); - if ( QueryableTable.class.isAssignableFrom( returnType ) ) { + if ( QueryableEntity.class.isAssignableFrom( returnType ) ) { Expression queryable = Expressions.call( - Expressions.convert_( expr, QueryableTable.class ), + Expressions.convert_( expr, QueryableEntity.class ), BuiltInMethod.QUERYABLE_TABLE_AS_QUERYABLE.method, Expressions.call( DataContext.ROOT, BuiltInMethod.DATA_CONTEXT_GET_QUERY_PROVIDER.method ), Expressions.constant( null, SchemaPlus.class ), @@ -167,7 +167,7 @@ public Expression implement( RexToLixTranslator translator, RexCall call, List arguments ) { + private Entity apply( List arguments ) { try { Object o = null; if ( !Modifier.isStatic( method.getModifiers() ) ) { @@ -175,7 +175,7 @@ private Table apply( List arguments ) { o = constructor.newInstance(); } final Object table = method.invoke( o, arguments.toArray() ); - return (Table) table; + return (Entity) table; } catch ( IllegalArgumentException e ) { throw Static.RESOURCE.illegalArgumentForTableFunctionCall( method.toString(), diff --git a/core/src/main/java/org/polypheny/db/schema/impl/TableMacroImpl.java b/core/src/main/java/org/polypheny/db/schema/impl/TableMacroImpl.java index f8c28063b0..3924127371 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/TableMacroImpl.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/TableMacroImpl.java @@ -41,7 +41,7 @@ import java.util.Arrays; import java.util.List; import org.polypheny.db.schema.TableMacro; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.util.Static; @@ -81,7 +81,7 @@ public static TableMacro create( final Method method ) { } } final Class returnType = method.getReturnType(); - if ( !TranslatableTable.class.isAssignableFrom( returnType ) ) { + if ( !TranslatableEntity.class.isAssignableFrom( returnType ) ) { return null; } return new TableMacroImpl( method ); @@ -95,14 +95,14 @@ public static TableMacro create( final Method method ) { * @return Table */ @Override - public TranslatableTable apply( List arguments ) { + public TranslatableEntity apply( List arguments ) { try { Object o = null; if ( !Modifier.isStatic( method.getModifiers() ) ) { final Constructor constructor = method.getDeclaringClass().getConstructor(); o = constructor.newInstance(); } - return (TranslatableTable) method.invoke( o, arguments.toArray() ); + return (TranslatableEntity) method.invoke( o, arguments.toArray() ); } catch ( IllegalArgumentException e ) { throw new RuntimeException( "Expected " + Arrays.toString( method.getParameterTypes() ) + " actual " + arguments, e ); } catch ( IllegalAccessException | InvocationTargetException | NoSuchMethodException | InstantiationException e ) { diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index d581f9ae50..d27032fa11 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -35,15 +35,47 @@ import com.google.common.collect.ImmutableMap; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.sql.ResultSet; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.TimeZone; import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.avatica.util.TimeUnitRange; -import org.apache.calcite.linq4j.*; -import org.apache.calcite.linq4j.function.*; +import org.apache.calcite.linq4j.AbstractEnumerable; +import org.apache.calcite.linq4j.CorrelateJoinType; +import org.apache.calcite.linq4j.Enumerable; +import org.apache.calcite.linq4j.EnumerableDefaults; +import org.apache.calcite.linq4j.Enumerator; +import org.apache.calcite.linq4j.ExtendedEnumerable; +import org.apache.calcite.linq4j.Linq4j; +import org.apache.calcite.linq4j.Queryable; +import org.apache.calcite.linq4j.function.EqualityComparer; +import org.apache.calcite.linq4j.function.Function0; +import org.apache.calcite.linq4j.function.Function1; +import org.apache.calcite.linq4j.function.Function2; +import org.apache.calcite.linq4j.function.Predicate1; +import org.apache.calcite.linq4j.function.Predicate2; import org.apache.calcite.linq4j.tree.FunctionExpression; import org.apache.calcite.linq4j.tree.Primitive; import org.apache.calcite.linq4j.tree.Types; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.enumerable.*; +import org.polypheny.db.adapter.enumerable.AggregateLambdaFactory; +import org.polypheny.db.adapter.enumerable.BatchIteratorEnumerable; +import org.polypheny.db.adapter.enumerable.OrderedAggregateLambdaFactory; +import org.polypheny.db.adapter.enumerable.SequencedAdderAggregateLambdaFactory; +import org.polypheny.db.adapter.enumerable.SourceSorter; import org.polypheny.db.adapter.enumerable.lpg.EnumerableLpgMatch.MatchEnumerable; import org.polypheny.db.adapter.java.ReflectiveSchema; import org.polypheny.db.algebra.constant.ExplainLevel; @@ -52,31 +84,62 @@ import org.polypheny.db.algebra.json.JsonQueryEmptyOrErrorBehavior; import org.polypheny.db.algebra.json.JsonQueryWrapperBehavior; import org.polypheny.db.algebra.json.JsonValueEmptyOrErrorBehavior; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.AllPredicates; import org.polypheny.db.algebra.metadata.BuiltInMetadata.Collation; -import org.polypheny.db.algebra.metadata.BuiltInMetadata.*; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.ColumnOrigin; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.ColumnUniqueness; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.CumulativeCost; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.DistinctRowCount; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.Distribution; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.ExplainVisibility; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.ExpressionLineage; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.MaxRowCount; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.Memory; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.MinRowCount; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.NodeTypes; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.NonCumulativeCost; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.Parallelism; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.PercentageOriginalRows; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.PopulationSize; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.Predicates; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.RowCount; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.Selectivity; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.Size; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.TableReferences; +import org.polypheny.db.algebra.metadata.BuiltInMetadata.UniqueKeys; import org.polypheny.db.algebra.metadata.Metadata; import org.polypheny.db.interpreter.Context; import org.polypheny.db.interpreter.Row; import org.polypheny.db.interpreter.Scalar; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.runtime.*; +import org.polypheny.db.runtime.ArrayBindable; +import org.polypheny.db.runtime.BinarySearch; +import org.polypheny.db.runtime.Bindable; +import org.polypheny.db.runtime.Enumerables; +import org.polypheny.db.runtime.FlatLists; +import org.polypheny.db.runtime.RandomFunction; +import org.polypheny.db.runtime.SortedMultiMap; +import org.polypheny.db.runtime.Utilities; import org.polypheny.db.runtime.functions.CrossModelFunctions; import org.polypheny.db.runtime.functions.CypherFunctions; import org.polypheny.db.runtime.functions.Functions; import org.polypheny.db.runtime.functions.Functions.FlatProductInputType; import org.polypheny.db.runtime.functions.MqlFunctions; -import org.polypheny.db.schema.*; -import org.polypheny.db.schema.graph.*; +import org.polypheny.db.schema.FilterableEntity; +import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.Namespace; +import org.polypheny.db.schema.ProjectableFilterableEntity; +import org.polypheny.db.schema.QueryableEntity; +import org.polypheny.db.schema.ScannableEntity; +import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.Schemas; +import org.polypheny.db.schema.graph.GraphPropertyHolder; +import org.polypheny.db.schema.graph.PolyEdge; +import org.polypheny.db.schema.graph.PolyGraph; +import org.polypheny.db.schema.graph.PolyNode; +import org.polypheny.db.schema.graph.PolyPath; import org.polypheny.db.type.PolyType; -import java.lang.reflect.Constructor; -import java.lang.reflect.Field; -import java.lang.reflect.Method; -import java.sql.ResultSet; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.*; - /** * Built-in methods. @@ -92,19 +155,19 @@ public enum BuiltInMethod { PARSE_ARRAY_FROM_TEXT( Functions.class, "reparse", PolyType.class, Long.class, String.class ), QUERYABLE_SELECT( Queryable.class, "select", FunctionExpression.class ), QUERYABLE_AS_ENUMERABLE( Queryable.class, "asEnumerable" ), - QUERYABLE_TABLE_AS_QUERYABLE( QueryableTable.class, "asQueryable", DataContext.class, SchemaPlus.class, String.class ), + QUERYABLE_TABLE_AS_QUERYABLE( QueryableEntity.class, "asQueryable", DataContext.class, SchemaPlus.class, String.class ), AS_QUERYABLE( Enumerable.class, "asQueryable" ), ABSTRACT_ENUMERABLE_CTOR( AbstractEnumerable.class ), BATCH_ITERATOR_CTOR( BatchIteratorEnumerable.class ), BATCH_ITERATOR_GET_ENUM( BatchIteratorEnumerable.class, "getEnumerable" ), INTO( ExtendedEnumerable.class, "into", Collection.class ), REMOVE_ALL( ExtendedEnumerable.class, "removeAll", Collection.class ), - SCHEMA_GET_SUB_SCHEMA( Schema.class, "getSubSchema", String.class ), - SCHEMA_GET_TABLE( Schema.class, "getTable", String.class ), + SCHEMA_GET_SUB_SCHEMA( Namespace.class, "getSubNamespace", String.class ), + SCHEMA_GET_TABLE( Namespace.class, "getEntity", String.class ), SCHEMA_PLUS_UNWRAP( SchemaPlus.class, "unwrap", Class.class ), - SCHEMAS_ENUMERABLE_SCANNABLE( Schemas.class, "enumerable", ScannableTable.class, DataContext.class ), - SCHEMAS_ENUMERABLE_FILTERABLE( Schemas.class, "enumerable", FilterableTable.class, DataContext.class ), - SCHEMAS_ENUMERABLE_PROJECTABLE_FILTERABLE( Schemas.class, "enumerable", ProjectableFilterableTable.class, DataContext.class ), + SCHEMAS_ENUMERABLE_SCANNABLE( Schemas.class, "enumerable", ScannableEntity.class, DataContext.class ), + SCHEMAS_ENUMERABLE_FILTERABLE( Schemas.class, "enumerable", FilterableEntity.class, DataContext.class ), + SCHEMAS_ENUMERABLE_PROJECTABLE_FILTERABLE( Schemas.class, "enumerable", ProjectableFilterableEntity.class, DataContext.class ), SCHEMAS_QUERYABLE( Schemas.class, "queryable", DataContext.class, SchemaPlus.class, Class.class, String.class ), REFLECTIVE_SCHEMA_GET_TARGET( ReflectiveSchema.class, "getTarget" ), DATA_CONTEXT_GET( DataContext.class, "get", String.class ), @@ -252,8 +315,8 @@ public enum BuiltInMethod { GREATER( Functions.class, "greater", Comparable.class, Comparable.class ), BIT_AND( Functions.class, "bitAnd", long.class, long.class ), BIT_OR( Functions.class, "bitOr", long.class, long.class ), - MODIFIABLE_TABLE_GET_MODIFIABLE_COLLECTION( ModifiableTable.class, "getModifiableCollection" ), - SCANNABLE_TABLE_SCAN( ScannableTable.class, "scan", DataContext.class ), + MODIFIABLE_TABLE_GET_MODIFIABLE_COLLECTION( ModifiableEntity.class, "getModifiableCollection" ), + SCANNABLE_TABLE_SCAN( ScannableEntity.class, "scan", DataContext.class ), STRING_TO_BOOLEAN( Functions.class, "toBoolean", String.class ), INTERNAL_TO_DATE( Functions.class, "internalToDate", int.class ), INTERNAL_TO_TIME( Functions.class, "internalToTime", int.class ), diff --git a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java index bfd6abcc68..cd3d3c80a7 100644 --- a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java +++ b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java @@ -278,7 +278,7 @@ public static TypeEntry getTypeEntry( PolyphenyDbSchema rootSchema, Identifier t if ( schema == rootSchema && NameMatchers.withCaseSensitive( true ).matches( p, schema.getName() ) ) { continue; } - schema = schema.getSubSchema( p, true ); + schema = schema.getSubNamespace( p, true ); } return schema == null ? null : schema.getType( name, false ); } @@ -334,7 +334,7 @@ public static PolyphenyDbSchema getSchema( PolyphenyDbSchema rootSchema, Iterabl if ( schema == rootSchema && nameMatcher.matches( schemaName, schema.getName() ) ) { continue; } - schema = schema.getSubSchema( schemaName, nameMatcher.isCaseSensitive() ); + schema = schema.getSubNamespace( schemaName, nameMatcher.isCaseSensitive() ); if ( schema == null ) { return null; } diff --git a/core/src/main/resources/org/polypheny/db/runtime/PolyphenyDbResource.properties b/core/src/main/resources/org/polypheny/db/runtime/PolyphenyDbResource.properties index a39b26143a..e463f50721 100644 --- a/core/src/main/resources/org/polypheny/db/runtime/PolyphenyDbResource.properties +++ b/core/src/main/resources/org/polypheny/db/runtime/PolyphenyDbResource.properties @@ -77,30 +77,30 @@ ColumnNotNullable=Column ''{0}'' has no default value and does not allow NULLs TypeNotAssignable=Cannot assign to target field ''{0}'' of type {1} from source field ''{2}'' of type {3} ExceededCardinality=Array in column ''{0}'' with cardinality {1,number} exceeds max-cardinality of {2,number} ExceededDimension=Array in column ''{0}'' with dimension {1,number} exceeds max-dimension of {2,number} -DatabaseNotFound=Database ''{0}'' not found -NotValidJson=Value ''{0}'' is not valid JSON; {1} -TableNameNotFound=Table ''{0}'' not found -TableNotFound=Table ''{0}'' not found -TableNameNotFoundDidYouMean=Table ''{0}'' not found; did you mean ''{1}''? -ObjectNotFound=Object ''{0}'' not found -ObjectNotFoundWithin=Object ''{0}'' not found within ''{1}'' -ObjectNotFoundDidYouMean=Object ''{0}'' not found; did you mean ''{1}''? -ObjectNotFoundWithinDidYouMean=Object ''{0}'' not found within ''{1}''; did you mean ''{2}''? -NotASequence=Table ''{0}'' is not a sequence -ColumnNotFound=Column ''{0}'' not found in any table -ColumnNotFoundDidYouMean=Column ''{0}'' not found in any table; did you mean ''{1}''? -ColumnNotFoundInTable=Column ''{0}'' not found in table ''{1}'' -ColumnNotFoundInTableDidYouMean=Column ''{0}'' not found in table ''{1}''; did you mean ''{2}''? -ColumnAmbiguous=Column ''{0}'' is ambiguous -NeedQueryOp=Operand {0} must be a query -NeedSameTypeParameter=Parameters must be of the same type -CanNotApplyOp2Type=Cannot apply ''{0}'' to arguments of type {1}. Supported form(s): {2} -ExpectedBoolean=Expected a boolean type -ExpectedCharacter=Expected a character type -ExpectedMultimedia=Expected a multimedia type -MustNotNullInElse=ELSE clause or at least one THEN clause must be non-NULL -FunctionUndefined=Function ''{0}'' is not defined -WrongNumberOfParam=Encountered {0} with {1,number} parameter(s); was expecting {2} +DatabaseNotFound = Database ''{0}'' not found +NotValidJson = Value ''{0}'' is not valid JSON; {1} +TableNameNotFound = Table ''{0}'' not found +TableNotFound = Table ''{0}'' not found +TableNameNotFoundDidYouMean = Table ''{0}'' not found; did you mean ''{1}''? +ObjectNotFound = Object ''{0}'' not found +ObjectNotFoundWithin = Object ''{0}'' not found within ''{1}'' +ObjectNotFoundDidYouMean = Object ''{0}'' not found; did you mean ''{1}''? +ObjectNotFoundWithinDidYouMean = Object ''{0}'' not found within ''{1}''; did you mean ''{2}''? +NotASequence = Table ''{0}'' is not a sequence +ColumnNotFound = Column ''{0}'' not found in any entity +ColumnNotFoundDidYouMean = Column ''{0}'' not found in any entity; did you mean ''{1}''? +ColumnNotFoundInTable = Column ''{0}'' not found in entity ''{1}'' +ColumnNotFoundInTableDidYouMean = Column ''{0}'' not found in entity ''{1}''; did you mean ''{2}''? +ColumnAmbiguous = Column ''{0}'' is ambiguous +NeedQueryOp = Operand {0} must be a query +NeedSameTypeParameter = Parameters must be of the same type +CanNotApplyOp2Type = Cannot apply ''{0}'' to arguments of type {1}. Supported form(s): {2} +ExpectedBoolean = Expected a boolean type +ExpectedCharacter = Expected a character type +ExpectedMultimedia = Expected a multimedia type +MustNotNullInElse = ELSE clause or at least one THEN clause must be non-NULL +FunctionUndefined = Function ''{0}'' is not defined +WrongNumberOfParam = Encountered {0} with {1,number} parameter(s); was expecting {2} IllegalMixingOfTypes=Illegal mixing of types in CASE or COALESCE statement InvalidCompare=Invalid compare. Comparing (collation, coercibility): ({0}, {1} with ({2}, {3}) is illegal DifferentCollations=Invalid syntax. Two explicit different collations ({0}, {1}) are illegal @@ -112,9 +112,9 @@ TypeNotComparableEachOther=Types {0} not comparable to each other NumberLiteralOutOfRange=Numeric literal ''{0}'' out of range DateLiteralOutOfRange=Date literal ''{0}'' out of range StringFragsOnSameLine=String literal continued on same line -AliasMustBeSimpleIdentifier=Table or column alias must be a simple identifier -AliasListDegree=List of column aliases must have same degree as table; table has {0,number,#} columns {1}, whereas alias list has {2,number,#} columns -AliasListDuplicate=Duplicate name ''{0}'' in column alias list +AliasMustBeSimpleIdentifier = Table or column alias must be a simple identifier +AliasListDegree = List of column aliases must have same degree as entity; entity has {0,number,#} columns {1}, whereas alias list has {2,number,#} columns +AliasListDuplicate = Duplicate name ''{0}'' in column alias list JoinRequiresCondition=INNER, LEFT, RIGHT or FULL join requires a condition (NATURAL keyword or ON or USING clause) CrossJoinDisallowsCondition=Cannot specify condition (NATURAL keyword, or ON or USING clause) following CROSS JOIN NaturalDisallowsOnOrUsing=Cannot specify NATURAL keyword with ON or USING clause @@ -195,9 +195,9 @@ ArgumentMustNotBeNull=Argument to function ''{0}'' must not be NULL NullIllegal=Illegal use of ''NULL'' DynamicParamIllegal=Illegal use of dynamic parameter InvalidBoolean=''{0}'' is not a valid boolean value -ArgumentMustBeValidPrecision=Argument to function ''{0}'' must be a valid precision between ''{1,number,#}'' and ''{2,number,#}'' -IllegalArgumentForTableFunctionCall=Wrong arguments for table function ''{0}'' call. Expected ''{1}'', actual ''{2}'' -InvalidDatetimeFormat=''{0}'' is not a valid datetime format +ArgumentMustBeValidPrecision = Argument to function ''{0}'' must be a valid precision between ''{1,number,#}'' and ''{2,number,#}'' +IllegalArgumentForTableFunctionCall = Wrong arguments for entity function ''{0}'' call. Expected ''{1}'', actual ''{2}'' +InvalidDatetimeFormat = ''{0}'' is not a valid datetime format InsertIntoAlwaysGenerated=Cannot INSERT into generated column ''{0}'' ArgumentMustHaveScaleZero=Argument to function ''{0}'' must have a scale of 0 PreparationAborted=Statement preparation aborted @@ -212,9 +212,9 @@ SQLFeature_T613=TABLESAMPLE not supported SQLConformance_MultipleActiveAutocommitStatements=Execution of a new autocommit statement while a cursor is still open on same connection is not supported SQLConformance_OrderByDesc=Descending sort (ORDER BY DESC) not supported SharedStatementPlans=Sharing of cached statement plans not supported -SQLFeatureExt_T613_Substitution=TABLESAMPLE SUBSTITUTE not supported -PersonalityManagesRowCount=Personality does not maintain table''s row count in the catalog -PersonalitySupportsSnapshots=Personality does not support snapshot reads +SQLFeatureExt_T613_Substitution = TABLESAMPLE SUBSTITUTE not supported +PersonalityManagesRowCount = Personality does not maintain entity''s row count in the catalog +PersonalitySupportsSnapshots = Personality does not support snapshot reads PersonalitySupportsLabels=Personality does not support labels DuplicateNameInColumnList=Duplicate name ''{0}'' in column list RequireAtLeastOneArg=Require at least 1 argument @@ -227,28 +227,28 @@ RequireDefaultConstructor=Declaring class ''{0}'' of non-static user-defined fun FirstParameterOfAdd=In user-defined aggregate class ''{0}'', first parameter to ''add'' method must be the accumulator (the return type of the ''init'' method) FilterableTableInventedFilter = FilterableTable.scan returned a filter that was not in the original list: {0} FilterableScanReturnedNull = FilterableTable.scan must not return null -CannotConvertToStream = Cannot convert table ''{0}'' to stream -CannotConvertToRelation=Cannot convert stream ''{0}'' to relation -StreamMustGroupByMonotonic=Streaming aggregation requires at least one monotonic expression in GROUP BY clause -StreamMustOrderByMonotonic=Streaming ORDER BY must start with monotonic expression -StreamSetOpInconsistentInputs=Set operator cannot combine streaming and non-streaming inputs -CannotStreamValues=Cannot stream VALUES -CyclicDefinition=Cannot resolve ''{0}''; it references view ''{1}'', whose definition is cyclic -ModifiableViewMustBeBasedOnSingleTable=Modifiable view must be based on a single table -ModifiableViewMustHaveOnlyEqualityPredicates=Modifiable view must be predicated only on equality expressions -MoreThanOneMappedColumn=View is not modifiable. More than one expression maps to column ''{0}'' of base table ''{1}'' -NoValueSuppliedForViewColumn=View is not modifiable. No value is supplied for NOT NULL column ''{0}'' of base table ''{1}'' -ViewConstraintNotSatisfied=Modifiable view constraint is not satisfied for column ''{0}'' of base table ''{1}'' -StarRequiresRecordType=Not a record type. The ''*'' operator requires a record -FilterMustBeBoolean=FILTER expression must be of type BOOLEAN -CannotStreamResultsForNonStreamingInputs=Cannot stream results of a query with no streaming inputs: ''{0}''. At least one input should be convertible to a stream -MinusNotAllowed=MINUS is not allowed under the current SQL conformance level -SelectMissingFrom=SELECT must have a FROM clause -GroupFunctionMustAppearInGroupByClause=Group function ''{0}'' can only appear in GROUP BY clause -AuxiliaryWithoutMatchingGroupCall=Call to auxiliary group function ''{0}'' must have matching call to group function ''{1}'' in GROUP BY clause -PatternVarAlreadyDefined=Pattern variable ''{0}'' has already been defined -PatternPrevFunctionInMeasure=Cannot use PREV/NEXT in MEASURE ''{0}'' -PatternPrevFunctionOrder=Cannot nest PREV/NEXT under LAST/FIRST ''{0}'' +CannotConvertToStream = Cannot convert entity ''{0}'' to stream +CannotConvertToRelation = Cannot convert stream ''{0}'' to relation +StreamMustGroupByMonotonic = Streaming aggregation requires at least one monotonic expression in GROUP BY clause +StreamMustOrderByMonotonic = Streaming ORDER BY must start with monotonic expression +StreamSetOpInconsistentInputs = Set operator cannot combine streaming and non-streaming inputs +CannotStreamValues = Cannot stream VALUES +CyclicDefinition = Cannot resolve ''{0}''; it references view ''{1}'', whose definition is cyclic +ModifiableViewMustBeBasedOnSingleTable = Modifiable view must be based on a single entity +ModifiableViewMustHaveOnlyEqualityPredicates = Modifiable view must be predicated only on equality expressions +MoreThanOneMappedColumn = View is not modifiable. More than one expression maps to column ''{0}'' of base entity ''{1}'' +NoValueSuppliedForViewColumn = View is not modifiable. No value is supplied for NOT NULL column ''{0}'' of base entity ''{1}'' +ViewConstraintNotSatisfied = Modifiable view constraint is not satisfied for column ''{0}'' of base entity ''{1}'' +StarRequiresRecordType = Not a record type. The ''*'' operator requires a record +FilterMustBeBoolean = FILTER expression must be of type BOOLEAN +CannotStreamResultsForNonStreamingInputs = Cannot stream results of a query with no streaming inputs: ''{0}''. At least one input should be convertible to a stream +MinusNotAllowed = MINUS is not allowed under the current SQL conformance level +SelectMissingFrom = SELECT must have a FROM clause +GroupFunctionMustAppearInGroupByClause = Group function ''{0}'' can only appear in GROUP BY clause +AuxiliaryWithoutMatchingGroupCall = Call to auxiliary group function ''{0}'' must have matching call to group function ''{1}'' in GROUP BY clause +PatternVarAlreadyDefined = Pattern variable ''{0}'' has already been defined +PatternPrevFunctionInMeasure = Cannot use PREV/NEXT in MEASURE ''{0}'' +PatternPrevFunctionOrder = Cannot nest PREV/NEXT under LAST/FIRST ''{0}'' PatternAggregationInNavigation=Cannot use aggregation in navigation ''{0}'' PatternCountFunctionArg=Invalid number of parameters to COUNT method PatternRunningFunctionInDefine=Cannot use RUNNING/FINAL in DEFINE ''{0}'' @@ -264,8 +264,8 @@ RolledUpNotAllowed=Rolled up column ''{0}'' is not allowed in {1} SchemaExists=Schema ''{0}'' already exists ColumnExists = Column ''{0}'' already exists NotNullAndNoDefaultValue = Column ''{0}'' is defined NOT NULL and has no default value assigned -SchemaInvalidType = Invalid schema type ''{0}''; valid values: {1} -TableExists=Table ''{0}'' already exists +SchemaInvalidType = Invalid namespace type ''{0}''; valid values: {1} +TableExists = Table ''{0}'' already exists CreateTableRequiresColumnList=Missing column list CreateTableRequiresColumnTypes=Type required for column ''{0}'' in CREATE TABLE without AS ViewExists=View ''{0}'' already exists and REPLACE not specified @@ -297,14 +297,14 @@ IllegalEmptyBehaviorInJsonQueryFunc=Illegal empty behavior ''{0}'' specified in ArrayOrObjectValueRequiredInStrictModeOfJsonQueryFunc=Strict jsonpath mode requires array or object value, and the actual value is: ''{0}'' IllegalErrorBehaviorInJsonQueryFunc=Illegal error behavior ''{0}'' specified in JSON_VALUE function NullKeyOfJsonObjectNotAllowed=Null key of JSON object is not allowed -QueryExecutionTimeoutReached=Timeout of ''{0}'' ms for query execution is reached. Query execution started at ''{1}'' -ExceptionWhilePerformingQueryOnJdbcSubSchema=While executing SQL [{0}] on JDBC sub-schema -UnknownStoreName=There is no data store with this name: ''{0}'' -PlacementAlreadyExists=Table ''{0}'' is already placed on store ''{1}'' -PlacementDoesNotExist=There is no placement of table ''{1}'' on store ''{0}'' -PlacementIsPrimaryKey=The column ''{0}'' is part of the primary key and cannot be dropped -OnlyOnePlacementLeft=There needs to be at least one placement per table -UnknownIndexMethod=The specified data store does not support the index method ''{0}''! +QueryExecutionTimeoutReached = Timeout of ''{0}'' ms for query execution is reached. Query execution started at ''{1}'' +ExceptionWhilePerformingQueryOnJdbcSubSchema = While executing SQL [{0}] on JDBC sub-namespace +UnknownStoreName = There is no data store with this name: ''{0}'' +PlacementAlreadyExists = Table ''{0}'' is already placed on store ''{1}'' +PlacementDoesNotExist = There is no placement of entity ''{1}'' on store ''{0}'' +PlacementIsPrimaryKey = The column ''{0}'' is part of the primary key and cannot be dropped +OnlyOnePlacementLeft = There needs to be at least one placement per entity +UnknownIndexMethod = The specified data store does not support the index method ''{0}''! MissingColumnPlacement=There is no placement of column ''{0}'' on the specified data store! IndexPreventsRemovalOfPlacement=Unable to remove placement of column ''{0}'' because it is part of the index ''{1}''! IndexExists=There is already an index with the name ''{0}''! diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index ec6cf0adb4..f4d26c6d0f 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -38,7 +38,6 @@ import com.google.common.collect.Iterables; import java.lang.reflect.Type; import java.util.ArrayList; -import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -75,17 +74,17 @@ import org.polypheny.db.prepare.Prepare.AbstractPreparingEntity; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; -import org.polypheny.db.schema.CustomColumnResolvingTable; -import org.polypheny.db.schema.ExtensibleTable; +import org.polypheny.db.schema.CustomColumnResolvingEntity; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.ExtensibleEntity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.Schema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Statistic; -import org.polypheny.db.schema.StreamableTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.StreamableEntity; +import org.polypheny.db.schema.TableType; import org.polypheny.db.schema.Wrapper; -import org.polypheny.db.schema.impl.AbstractSchema; -import org.polypheny.db.test.JdbcTest; +import org.polypheny.db.schema.impl.AbstractNamespace; +import org.polypheny.db.test.JdbcTest.AbstractModifiableEntity; import org.polypheny.db.util.AccessType; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.InitializerExpressionFactory; @@ -122,7 +121,6 @@ public MockCatalogReader( AlgDataTypeFactory typeFactory, boolean caseSensitive } - /** * Initializes this catalog reader. */ @@ -148,7 +146,7 @@ protected void registerTablesWithRollUp( MockSchema schema, Fixture f ) { // Register nested schema NEST that contains table with a rolled up column. MockSchema nestedSchema = new MockSchema( "NEST" ); - registerNestedSchema( schema, nestedSchema ); + registerNestedSchema( schema, nestedSchema, -1 ); // Register "EMP_R" table which contains a rolled up column in NEST schema. ImmutableList tablePath = ImmutableList.of( schema.getCatalogName(), schema.name, nestedSchema.name, "EMP_R" ); @@ -172,13 +170,13 @@ protected void registerType( final List names, final AlgProtoDataType al protected void registerTable( final MockEntity table ) { table.onRegister( typeFactory ); - final WrapperTable wrapperTable = new WrapperTable( table ); + final WrapperEntity wrapperTable = new WrapperEntity( table ); if ( table.stream ) { registerTable( table.names, - new StreamableWrapperTable( table ) { + new StreamableWrapperEntity( table ) { @Override - public Table stream() { + public Entity stream() { return wrapperTable; } } ); @@ -188,22 +186,22 @@ public Table stream() { } - private void registerTable( final List names, final Table table ) { + private void registerTable( final List names, final Entity entity ) { assert names.get( 0 ).equals( DEFAULT_CATALOG ); final List schemaPath = Util.skipLast( names ); final String tableName = Util.last( names ); final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, schemaPath, NameMatchers.withCaseSensitive( true ) ); - schema.add( tableName, table ); + schema.add( tableName, entity ); } - protected void registerSchema( MockSchema schema ) { - rootSchema.add( schema.name, new AbstractSchema(), NamespaceType.RELATIONAL ); + protected void registerSchema( MockSchema schema, long id ) { + rootSchema.add( schema.name, new AbstractNamespace( id ), NamespaceType.RELATIONAL ); } - private void registerNestedSchema( MockSchema parentSchema, MockSchema schema ) { - rootSchema.getSubSchema( parentSchema.getName(), true ).add( schema.name, new AbstractSchema(), NamespaceType.RELATIONAL ); + private void registerNestedSchema( MockSchema parentSchema, MockSchema schema, long id ) { + rootSchema.getSubNamespace( parentSchema.getName(), true ).add( schema.name, new AbstractNamespace( id ), NamespaceType.RELATIONAL ); } @@ -336,9 +334,9 @@ protected MockEntity( /** * Implementation of AbstractModifiableTable. */ - private class ModifiableTable extends JdbcTest.AbstractModifiableTable implements ExtensibleTable, Wrapper { + private class ModifiableEntity extends AbstractModifiableEntity implements ExtensibleEntity, Wrapper { - protected ModifiableTable( String tableName ) { + protected ModifiableEntity( String tableName ) { super( tableName ); } @@ -385,11 +383,11 @@ public C unwrap( Class aClass ) { @Override - public Table extend( final List fields ) { - return new ModifiableTable( Util.last( names ) ) { + public Entity extend( final List fields ) { + return new ModifiableEntity( Util.last( names ) ) { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - ImmutableList allFields = ImmutableList.copyOf( Iterables.concat( ModifiableTable.this.getRowType( typeFactory ).getFieldList(), fields ) ); + ImmutableList allFields = ImmutableList.copyOf( Iterables.concat( ModifiableEntity.this.getRowType( typeFactory ).getFieldList(), fields ) ); return typeFactory.createStructType( allFields ); } }; @@ -405,11 +403,11 @@ public int getExtendedColumnOffset() { @Override - protected AlgOptEntity extend( final Table extendedTable ) { + protected AlgOptEntity extend( final Entity extendedEntity ) { return new MockEntity( catalogReader, names, stream, rowCount, resolver, initializerFactory ) { @Override public AlgDataType getRowType() { - return extendedTable.getRowType( catalogReader.typeFactory ); + return extendedEntity.getRowType( catalogReader.typeFactory ); } }; } @@ -449,11 +447,11 @@ public T unwrap( Class clazz ) { if ( clazz.isInstance( initializerFactory ) ) { return clazz.cast( initializerFactory ); } - if ( clazz.isAssignableFrom( Table.class ) ) { - final Table table = resolver == null - ? new ModifiableTable( Util.last( names ) ) - : new ModifiableTableWithCustomColumnResolving( Util.last( names ) ); - return clazz.cast( table ); + if ( clazz.isAssignableFrom( Entity.class ) ) { + final Entity entity = resolver == null + ? new ModifiableEntity( Util.last( names ) ) + : new ModifiableEntityWithCustomColumnResolving( Util.last( names ) ); + return clazz.cast( entity ); } return null; } @@ -525,7 +523,6 @@ public List getQualifiedName() { } - @Override public Monotonicity getMonotonicity( String columnName ) { return monotonicColumnSet.contains( columnName ) @@ -582,11 +579,11 @@ public StructKind getKind() { /** - * Subclass of {@link ModifiableTable} that also implements {@link CustomColumnResolvingTable}. + * Subclass of {@link ModifiableEntity} that also implements {@link CustomColumnResolvingEntity}. */ - private class ModifiableTableWithCustomColumnResolving extends ModifiableTable implements CustomColumnResolvingTable, Wrapper { + private class ModifiableEntityWithCustomColumnResolving extends ModifiableEntity implements CustomColumnResolvingEntity, Wrapper { - ModifiableTableWithCustomColumnResolving( String tableName ) { + ModifiableEntityWithCustomColumnResolving( String tableName ) { super( tableName ); } @@ -632,14 +629,14 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { /** - * Wrapper around a {@link MockEntity}, giving it a {@link Table} interface. You can get the {@code MockTable} by calling {@link #unwrap(Class)}. + * Wrapper around a {@link MockEntity}, giving it a {@link Entity} interface. You can get the {@code MockTable} by calling {@link #unwrap(Class)}. */ - private static class WrapperTable implements Table, Wrapper { + private static class WrapperEntity implements Entity, Wrapper { private final MockEntity table; - WrapperTable( MockEntity table ) { + WrapperEntity( MockEntity table ) { this.table = table; } @@ -696,7 +693,7 @@ public AlgDistribution getDistribution() { @Override - public Long getTableId() { + public Long getId() { throw new RuntimeException( "Method getTableId is not implemented." ); } @@ -715,25 +712,25 @@ public boolean rolledUpColumnValidInsideAgg( String column, Call call, Node pare @Override - public Schema.TableType getJdbcTableType() { - return table.stream ? Schema.TableType.STREAM : Schema.TableType.TABLE; + public TableType getJdbcTableType() { + return table.stream ? TableType.STREAM : TableType.TABLE; } } /** - * Wrapper around a {@link MockEntity}, giving it a {@link StreamableTable} interface. + * Wrapper around a {@link MockEntity}, giving it a {@link StreamableEntity} interface. */ - private static class StreamableWrapperTable extends WrapperTable implements StreamableTable { + private static class StreamableWrapperEntity extends WrapperEntity implements StreamableEntity { - StreamableWrapperTable( MockEntity table ) { + StreamableWrapperEntity( MockEntity table ) { super( table ); } @Override - public Table stream() { + public Entity stream() { return this; } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDocument.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDocument.java index b3f95160d0..869d1dfc53 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDocument.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDocument.java @@ -41,7 +41,7 @@ public MockCatalogReader init() { // Register "SALES" schema. MockSchema salesSchema = new MockSchema( "private" ); - registerSchema( salesSchema ); + registerSchema( salesSchema, -1 ); // Register "EMP" table. final MockEntity empTable = MockEntity.create( this, salesSchema, "secrets", false, 14, null ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDynamic.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDynamic.java index 9f483c567b..c76c753e0d 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDynamic.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderDynamic.java @@ -46,7 +46,7 @@ public MockCatalogReaderDynamic( AlgDataTypeFactory typeFactory, boolean caseSen public MockCatalogReader init() { // Register "DYNAMIC" schema. MockSchema schema = new MockSchema( "SALES" ); - registerSchema( schema ); + registerSchema( schema, -1 ); MockEntity nationTable = new MockDynamicEntity( this, schema.getCatalogName(), schema.getName(), "NATION", false, 100 ); registerTable( nationTable ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderExtended.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderExtended.java index 5c607c6ede..eb1ce7b14e 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderExtended.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderExtended.java @@ -47,7 +47,7 @@ public MockCatalogReader init() { super.init(); MockSchema structTypeSchema = new MockSchema( "STRUCT" ); - registerSchema( structTypeSchema ); + registerSchema( structTypeSchema, -1 ); final Fixture f = new Fixture( typeFactory ); final List columnsExtended = Arrays.asList( new CompoundNameColumn( "", "K0", f.varchar20TypeNull ), diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderSimple.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderSimple.java index 3153fd6b08..95ced9ec42 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderSimple.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReaderSimple.java @@ -84,7 +84,7 @@ public MockCatalogReader init() { // Register "SALES" schema. MockSchema salesSchema = new MockSchema( "SALES" ); - registerSchema( salesSchema ); + registerSchema( salesSchema, -1 ); // Register "EMP" table with customer InitializerExpressionFactory to check whether newDefaultValue method called or not. final InitializerExpressionFactory countingInitializerExpressionFactory = new CountingFactory( ImmutableList.of( "DEPTNO" ) ); @@ -182,7 +182,7 @@ public MockCatalogReader init() { // Register "CUSTOMER" schema. MockSchema customerSchema = new MockSchema( "CUSTOMER" ); - registerSchema( customerSchema ); + registerSchema( customerSchema, -1 ); // Register "CONTACT" table. MockEntity contactTable = MockEntity.create( this, customerSchema, "CONTACT", false, 1000 ); @@ -240,7 +240,7 @@ public MockCatalogReader init() { registerTable( suppliersTable ); MockSchema structTypeSchema = new MockSchema( "STRUCT" ); - registerSchema( structTypeSchema ); + registerSchema( structTypeSchema, -1 ); final List columns = Arrays.asList( new CompoundNameColumn( "", "K0", fixture.varchar20Type ), new CompoundNameColumn( "", "C1", fixture.varchar20Type ), diff --git a/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java b/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java index 3b580172f7..bc6d0b3784 100644 --- a/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java +++ b/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java @@ -66,7 +66,7 @@ public class RelOptUtilTest { private static Frameworks.ConfigBuilder config() { final SchemaPlus schema = Frameworks .createRootSchema( false ) - .add( "scott", new ReflectiveSchema( new ScottSchema() ), NamespaceType.RELATIONAL ); + .add( "scott", new ReflectiveSchema( new ScottSchema(), -1 ), NamespaceType.RELATIONAL ); return Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) diff --git a/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java b/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java index e5b57c9b09..317611e0af 100644 --- a/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java +++ b/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java @@ -48,29 +48,30 @@ import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.schema.ScannableTable; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.impl.AbstractEntity; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.util.ImmutableBitSet; /** * A typical HR schema with employees (emps) and departments (depts) tables that are naturally ordered based on their primary keys representing clustered tables. */ -public final class HrClusteredSchema extends AbstractSchema { +public final class HrClusteredSchema extends AbstractNamespace implements Schema { - private final ImmutableMap tables; + private final ImmutableMap tables; - public HrClusteredSchema() { - super(); - tables = ImmutableMap.builder() + public HrClusteredSchema( long id ) { + super( id ); + tables = ImmutableMap.builder() .put( "emps", - new PkClusteredTable( + new PkClusteredEntity( factory -> new AlgDataTypeFactory.Builder( factory ) .add( "empid", null, factory.createJavaType( int.class ) ) @@ -88,7 +89,7 @@ public HrClusteredSchema() { ) ) .put( "depts", - new PkClusteredTable( + new PkClusteredEntity( factory -> new AlgDataTypeFactory.Builder( factory ) .add( "deptno", null, factory.createJavaType( int.class ) ) @@ -104,7 +105,7 @@ public HrClusteredSchema() { @Override - protected Map getTableMap() { + protected Map getTableMap() { return tables; } @@ -112,14 +113,14 @@ protected Map getTableMap() { /** * A table sorted (ascending direction and nulls last) on the primary key. */ - private static class PkClusteredTable extends AbstractTable implements ScannableTable { + private static class PkClusteredEntity extends AbstractEntity implements ScannableEntity { private final ImmutableBitSet pkColumns; private final List data; private final Function typeBuilder; - PkClusteredTable( Function dataTypeBuilder, ImmutableBitSet pkColumns, List data ) { + PkClusteredEntity( Function dataTypeBuilder, ImmutableBitSet pkColumns, List data ) { this.data = data; this.typeBuilder = dataTypeBuilder; this.pkColumns = pkColumns; diff --git a/core/src/test/java/org/polypheny/db/test/JdbcTest.java b/core/src/test/java/org/polypheny/db/test/JdbcTest.java index bb22d891ca..dad4810614 100644 --- a/core/src/test/java/org/polypheny/db/test/JdbcTest.java +++ b/core/src/test/java/org/polypheny/db/test/JdbcTest.java @@ -25,8 +25,8 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableTable; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.impl.AbstractEntity; /** @@ -35,11 +35,11 @@ public class JdbcTest { /** - * Abstract base class for implementations of {@link ModifiableTable}. + * Abstract base class for implementations of {@link ModifiableEntity}. */ - public abstract static class AbstractModifiableTable extends AbstractTable implements ModifiableTable { + public abstract static class AbstractModifiableEntity extends AbstractEntity implements ModifiableEntity { - protected AbstractModifiableTable( String tableName ) { + protected AbstractModifiableEntity( String tableName ) { super(); } diff --git a/core/src/test/java/org/polypheny/db/test/ScannableTableTest.java b/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java similarity index 95% rename from core/src/test/java/org/polypheny/db/test/ScannableTableTest.java rename to core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java index f55a928ee5..aa61575d75 100644 --- a/core/src/test/java/org/polypheny/db/test/ScannableTableTest.java +++ b/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java @@ -56,17 +56,17 @@ import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.FilterableTable; -import org.polypheny.db.schema.ProjectableFilterableTable; -import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.FilterableEntity; +import org.polypheny.db.schema.ProjectableFilterableEntity; +import org.polypheny.db.schema.ScannableEntity; +import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.type.PolyType; /** - * Unit test for {@link ScannableTable}. + * Unit test for {@link ScannableEntity}. */ -public class ScannableTableTest { +public class ScannableEntityTest { @Test public void testTens() throws SQLException { @@ -432,9 +432,9 @@ private static Integer getFilter( boolean cooperative, List filters ) { /** - * Table that returns one column via the {@link ScannableTable} interface. + * Table that returns one column via the {@link ScannableEntity} interface. */ - public static class SimpleTable extends AbstractTable implements ScannableTable { + public static class SimpleEntity extends AbstractEntity implements ScannableEntity { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @@ -458,7 +458,7 @@ public Enumerator enumerator() { /** * Table that returns two columns via the ScannableTable interface. */ - public static class BeatlesTable extends AbstractTable implements ScannableTable { + public static class BeatlesEntity extends AbstractEntity implements ScannableEntity { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @@ -483,15 +483,15 @@ public Enumerator enumerator() { /** - * Table that returns two columns via the {@link FilterableTable} interface. + * Table that returns two columns via the {@link FilterableEntity} interface. */ - public static class BeatlesFilterableTable extends AbstractTable implements FilterableTable { + public static class BeatlesFilterableEntity extends AbstractEntity implements FilterableEntity { private final StringBuilder buf; private final boolean cooperative; - public BeatlesFilterableTable( StringBuilder buf, boolean cooperative ) { + public BeatlesFilterableEntity( StringBuilder buf, boolean cooperative ) { this.buf = buf; this.cooperative = cooperative; } @@ -522,15 +522,15 @@ public Enumerator enumerator() { /** - * Table that returns two columns via the {@link FilterableTable} interface. + * Table that returns two columns via the {@link FilterableEntity} interface. */ - public static class BeatlesProjectableFilterableTable extends AbstractTable implements ProjectableFilterableTable { + public static class BeatlesProjectableFilterableEntity extends AbstractEntity implements ProjectableFilterableEntity { private final StringBuilder buf; private final boolean cooperative; - public BeatlesProjectableFilterableTable( StringBuilder buf, boolean cooperative ) { + public BeatlesProjectableFilterableEntity( StringBuilder buf, boolean cooperative ) { this.buf = buf; this.cooperative = cooperative; } diff --git a/core/src/test/resources/org/polypheny/db/test/HepPlannerTest.xml b/core/src/test/resources/org/polypheny/db/test/HepPlannerTest.xml index 701b8687a2..b3d42980c1 100644 --- a/core/src/test/resources/org/polypheny/db/test/HepPlannerTest.xml +++ b/core/src/test/resources/org/polypheny/db/test/HepPlannerTest.xml @@ -41,17 +41,17 @@ LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -64,11 +64,11 @@ LogicalProject(DEPTNO=[$0]) LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -78,11 +78,11 @@ LogicalAggregate(group=[{0}]) LogicalAggregate(group=[{0}]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -95,11 +95,11 @@ LogicalAggregate(group=[{0}]) LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -108,11 +108,11 @@ LogicalAggregate(group=[{0}]) LogicalUnion(all=[true]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -125,11 +125,11 @@ LogicalAggregate(group=[{0}]) LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -138,11 +138,11 @@ LogicalUnion(all=[false]) LogicalAggregate(group=[{0}]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -155,11 +155,11 @@ LogicalUnion(all=[false]) LogicalIntersect(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(FNAME=[$1]) - LogicalScan(table=[[CATALOG, CUSTOMER, CONTACT]]) + LogicalScan(entity=[[CATALOG, CUSTOMER, CONTACT]]) ]]> @@ -168,12 +168,12 @@ LogicalIntersect(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[CAST($0):VARCHAR(20) NOT NULL]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(FNAME=[CAST($0):VARCHAR(20) NOT NULL]) LogicalProject(FNAME=[$1]) - LogicalScan(table=[[CATALOG, CUSTOMER, CONTACT]]) + LogicalScan(entity=[[CATALOG, CUSTOMER, CONTACT]]) ]]> @@ -184,13 +184,13 @@ LogicalIntersect(all=[false]) @@ -202,14 +202,14 @@ LogicalCalc(expr#0..8=[{inputs}], expr#9=[LOWER($t1)], expr#10=[UPPER($t9)], EXP @@ -221,13 +221,13 @@ LogicalProject(NAME=[$1]) diff --git a/core/src/test/resources/org/polypheny/db/test/RelOptRulesTest.xml b/core/src/test/resources/org/polypheny/db/test/RelOptRulesTest.xml index f76eabb154..ca2cb8fa2a 100644 --- a/core/src/test/resources/org/polypheny/db/test/RelOptRulesTest.xml +++ b/core/src/test/resources/org/polypheny/db/test/RelOptRulesTest.xml @@ -40,14 +40,14 @@ from emp]]> @@ -60,7 +60,7 @@ from emp]]> @@ -73,14 +73,14 @@ group by grouping sets ((empno, deptno),(deptno),(empno))]]> @@ -93,14 +93,14 @@ group by grouping sets ((empno, deptno),(deptno),(empno))]]> @@ -115,7 +115,7 @@ where NOT(caseCol)]]> LogicalProject(CASECOL=[$0]) LogicalFilter(condition=[NOT($0)]) LogicalProject(CASECOL=[CASE(>($5, 1000), null, false)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -123,7 +123,7 @@ LogicalProject(CASECOL=[$0]) LogicalProject(CASECOL=[$0]) LogicalFilter(condition=[NOT($0)]) LogicalProject(CASECOL=[AND(>($5, 1000), null)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -139,14 +139,14 @@ where case when (sal = 1000) then ($5, 1000)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -161,14 +161,14 @@ OR case when sal = 2000 then null else 1 end is null]]> @@ -203,13 +203,13 @@ window w as (partition by empno order by empno)]]> @@ -233,7 +233,7 @@ LogicalProject(EMPNO=[$0], DEPTNO=[$1], W_COUNT=[$2]) LogicalFilter(condition=[IS NULL($2)]) LogicalProject(EMPNO=[$0], DEPTNO=[$7], $2=[$9]) LogicalWindow(window#0=[window(partition {7} order by [0] range between UNBOUNDED PRECEDING and CURRENT ROW aggs [COUNT($0)])]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -256,7 +256,7 @@ LogicalProject(EMPNO=[$0], DEPTNO=[$1], W_COUNT=[$2]) LogicalFilter(condition=[IS NULL($2)]) LogicalWindow(window#0=[window(partition {} order by [] rows between $2 PRECEDING and $3 PRECEDING aggs [COUNT($0)])]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -273,13 +273,13 @@ from emp]]> @@ -292,18 +292,18 @@ LogicalProject(COUNT1=[COUNT() OVER (PARTITION BY $0 ORDER BY $5 RANGE BETWEEN U LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -329,17 +329,17 @@ select name, deptno from dept @@ -349,17 +349,17 @@ LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -392,20 +392,20 @@ LogicalMinus(all=[true]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalProject(NAME=[$1], DEPTNO=[$0], EXPR$2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalMinus(all=[true]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalProject(NAME=[$1], DEPTNO=[$0], EXPR$2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -418,18 +418,18 @@ LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalMinus(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1], DEPTNO=[$0], EXPR$2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalMinus(all=[true]) LogicalProject(NAME=[$1], DEPTNO=[$0], EXPR$2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -441,8 +441,8 @@ LogicalProject(NAME=[$0], DEPTNO=[$1]) @@ -450,8 +450,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($7, $9)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -463,8 +463,8 @@ LogicalProject(EXPR$0=[1]) @@ -472,9 +472,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -488,9 +488,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[NOT(<= SOME($0, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -499,10 +499,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[AND(OR(IS NOT TRUE(<=($0, $9)), =($10, 0)), OR(<=($10, $11), =($10, 0), IS TRUE(<=($0, $9))), OR(>($0, $9), =($10, 0), IS TRUE(<=($0, $9)), >($10, $11)))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], m=[MAX($0)], c=[COUNT()], d=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -511,10 +511,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[AND(OR(IS NOT TRUE(<=($0, $9)), =($10, 0)), OR(<=($10, $11), =($10, 0), IS TRUE(<=($0, $9))), OR(>($0, $9), =($10, 0), IS TRUE(<=($0, $9)), >($10, $11)))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], m=[MAX($0)], c=[COUNT()], d=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -526,14 +526,14 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ ($5, 1000), $0, $5), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> ($5, 1000), =($0, 1)), =($5, 1))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -551,13 +551,13 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($2, $3)], joinType=[inner]) LogicalFilter(condition=[<($2, 10)]) LogicalProject(EMPNO=[$0], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], EXPR$0=[AVG($1)]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -572,20 +572,20 @@ where EXISTS ( LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[EXISTS({ LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -593,11 +593,11 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -614,12 +614,12 @@ AND NOT EXISTS ( LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[AND(EXISTS({ LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), NOT(EXISTS({ LogicalFilter(condition=[AND(=($2, $cor0.JOB), =($5, 34))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -629,15 +629,15 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[IS NULL($10)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{2}]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[AND(=($2, $cor0.JOB), =($5, 34))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -646,16 +646,16 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[IS NULL($12)]) LogicalJoin(condition=[=($2, $11)], joinType=[left]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], $f1=[true]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7], i=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(JOB=[$0], $f1=[true]) LogicalAggregate(group=[{0}]) LogicalProject(JOB=[$2], i=[true]) LogicalFilter(condition=[=($5, 34)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -673,12 +673,12 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[AND(IN($2, { LogicalProject(JOB=[$2]) LogicalFilter(condition=[=($5, 34)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), EXISTS({ LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -687,15 +687,15 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) LogicalJoin(condition=[=($2, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(JOB=[$2]) LogicalFilter(condition=[=($5, 34)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -703,15 +703,15 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalJoin(condition=[=($7, $10)], joinType=[inner]) LogicalJoin(condition=[=($2, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(JOB=[$2]) LogicalFilter(condition=[=($5, 34)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], $f1=[true]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7], i=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -730,13 +730,13 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[AND(IN($0, { LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }), IN($0, { LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[=($cor0.ENAME, $1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -747,13 +747,13 @@ LogicalProject(SAL=[$5]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{1}]) LogicalFilter(condition=[=($0, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{2}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[=($cor0.ENAME, $1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -761,11 +761,11 @@ LogicalProject(SAL=[$5]) LogicalProject(SAL=[$5]) LogicalJoin(condition=[AND(=($1, $12), =($0, $11))], joinType=[inner]) LogicalJoin(condition=[AND(=($2, $10), =($0, $9))], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -783,14 +783,14 @@ LogicalProject(DEPTNO=[$0], I0=[$SCALAR_QUERY({ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject($f0=[1]) LogicalFilter(condition=[>($0, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], I1=[$SCALAR_QUERY({ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject($f0=[0]) LogicalFilter(condition=[AND(=($7, $cor1.DEPTNO), =($1, 'SMITH'))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -798,15 +798,15 @@ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$0], I0=[$2], I1=[$3]) LogicalJoin(condition=[true], joinType=[left]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject($f0=[1]) LogicalFilter(condition=[>($0, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject($f0=[0]) LogicalFilter(condition=[AND(=($7, $cor1.DEPTNO), =($1, 'SMITH'))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -819,7 +819,7 @@ LogicalProject(DEPTNO=[$0], I0=[$2], I1=[$3]) LogicalProject(EXPR$0=[$1], EXPR$1=[$2]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT(DISTINCT $0)], EXPR$1=[SUM($1)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -830,7 +830,7 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$2]) LogicalProject(DEPTNO=[$0], EXPR$1=[$1], $g_0=[=($2, 0)]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], $g=[GROUPING($0)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -845,7 +845,7 @@ GROUP BY deptno]]> @@ -853,7 +853,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($0)], EXPR$2=[SUM(DISTINCT $1)], EXPR$ LogicalAggregate(group=[{0}], EXPR$1=[SUM($2)], EXPR$2=[SUM($1)], EXPR$3=[MAX($3)], EXPR$4=[MAX($4)]) LogicalAggregate(group=[{0, 1}], EXPR$1=[SUM($0)], EXPR$3=[MAX($0)], EXPR$4=[MAX($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -867,7 +867,7 @@ GROUP BY deptno]]> @@ -875,7 +875,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT()], EXPR$2=[SUM(DISTINCT $1)]) LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($2)], EXPR$2=[SUM($1)]) LogicalAggregate(group=[{0, 1}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -889,7 +889,7 @@ GROUP BY deptno]]> @@ -897,7 +897,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)], EXPR$3=[SUM(DI LogicalAggregate(group=[{0}], EXPR$1=[SUM($2)], EXPR$2=[MIN($3)], EXPR$3=[SUM($1)]) LogicalAggregate(group=[{0, 2}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)]) LogicalProject(DEPTNO=[$7], COMM=[$6], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -911,7 +911,7 @@ GROUP BY sal]]> @@ -919,7 +919,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)], EXPR$3=[SUM(DI LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($2)], EXPR$3=[SUM($0)]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)]) LogicalProject(SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -933,7 +933,7 @@ GROUP BY sal]]> @@ -943,16 +943,16 @@ LogicalProject(SAL=[$0], EXPR$1=[$1], EXPR$2=[$3], EXPR$3=[$5]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $2)], joinType=[inner]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$2=[MIN($1)]) LogicalAggregate(group=[{0, 1}]) LogicalProject(SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$3=[SUM($0)]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$0]) LogicalProject(SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -966,7 +966,7 @@ GROUP BY deptno]]> @@ -974,7 +974,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)], EXPR$3=[COUNT( LogicalAggregate(group=[{0}], EXPR$1=[SUM($3)], EXPR$2=[MIN($4)], EXPR$3=[COUNT($2, $1)]) LogicalAggregate(group=[{0, 1, 2}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)]) LogicalProject(DEPTNO=[$7], COMM=[$6], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -988,7 +988,7 @@ GROUP BY deptno]]> @@ -996,7 +996,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)], EXPR$3=[COUNT( LogicalAggregate(group=[{0}], EXPR$1=[SUM($3)], EXPR$2=[MIN($4)], EXPR$3=[COUNT($2, $0, $1)]) LogicalAggregate(group=[{0, 1, 2}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)]) LogicalProject(DEPTNO=[$7], COMM=[$6], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1086,8 +1086,8 @@ LogicalValues(tuples=[[]]) ProjectRel(EXPR$0=[1]) FilterRel(condition=[=($1, 'Charlie')]) JoinRel(condition=[=($0, $9)], joinType=[left]) - TableAccessRel(table=[[CATALOG, SALES, DEPT]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, DEPT]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1095,8 +1095,8 @@ ProjectRel(EXPR$0=[1]) ProjectRel(EXPR$0=[1]) JoinRel(condition=[=($0, $9)], joinType=[left]) FilterRel(condition=[=($1, 'Charlie')]) - TableAccessRel(table=[[CATALOG, SALES, DEPT]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, DEPT]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1113,11 +1113,11 @@ LogicalProject(SAL=[$0]) SemiJoin(condition=[=($1, $2)], joinType=[inner]) LogicalFilter(condition=[=($1, 200)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[=($0, 100)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1126,11 +1126,11 @@ LogicalProject(SAL=[$0]) SemiJoin(condition=[=($1, $2)], joinType=[inner]) LogicalFilter(condition=[=($1, 200)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[=($0, 100)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1143,8 +1143,8 @@ LogicalProject(SAL=[$0]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($1, 'Charlie')]) LogicalJoin(condition=[=($0, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1153,8 +1153,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[CAST($0):INTEGER], NAME=[CAST($1):VARCHAR(10)], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) LogicalFilter(condition=[=($1, 'Charlie')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1167,8 +1167,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($7, 100)]) LogicalJoin(condition=[=($0, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1176,9 +1176,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[CAST($2):INTEGER], ENAME=[CAST($3):VARCHAR(20)], JOB=[CAST($4):VARCHAR(10)], MGR=[$5], HIREDATE=[CAST($6):TIMESTAMP(0)], SAL=[CAST($7):INTEGER], COMM=[CAST($8):INTEGER], DEPTNO0=[CAST($9):INTEGER], SLACKER=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($0, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1191,8 +1191,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[AND(=($1, 'Charlie'), >($7, 100))]) LogicalJoin(condition=[=($0, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1201,9 +1201,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[CAST($0):INTEGER], NAME=[CAST($1):VARCHAR(10)], EMPNO=[CAST($2):INTEGER], ENAME=[CAST($3):VARCHAR(20)], JOB=[CAST($4):VARCHAR(10)], MGR=[$5], HIREDATE=[CAST($6):TIMESTAMP(0)], SAL=[CAST($7):INTEGER], COMM=[CAST($8):INTEGER], DEPTNO0=[CAST($9):INTEGER], SLACKER=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($0, $9)], joinType=[inner]) LogicalFilter(condition=[=($1, 'Charlie')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1216,8 +1216,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($7, 100)]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1225,9 +1225,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[CAST($2):INTEGER], ENAME=[CAST($3):VARCHAR(20)], JOB=[CAST($4):VARCHAR(10)], MGR=[$5], HIREDATE=[CAST($6):TIMESTAMP(0)], SAL=[CAST($7):INTEGER], COMM=[CAST($8):INTEGER], DEPTNO0=[CAST($9):INTEGER], SLACKER=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($0, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1240,8 +1240,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($1, 'Charlie')]) LogicalJoin(condition=[=($0, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1250,8 +1250,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[CAST($0):INTEGER], NAME=[CAST($1):VARCHAR(10)], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalJoin(condition=[=($0, $9)], joinType=[inner]) LogicalFilter(condition=[=($1, 'Charlie')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1267,9 +1267,9 @@ where R.deptno <=10]]> LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[<=($0, 10)]) SemiJoin(condition=[=($0, $2)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1277,9 +1277,9 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalProject(DEPTNO=[$0], NAME=[$1]) SemiJoin(condition=[=($0, $2)], joinType=[inner]) LogicalFilter(condition=[<=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1293,7 +1293,7 @@ LogicalProject(DNAME=[$0], C=[$1]) LogicalFilter(condition=[=($0, 'Charlie')]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject(DNAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1302,7 +1302,7 @@ LogicalProject(DNAME=[$0], C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalFilter(condition=[=($0, 'Charlie')]) LogicalProject(DNAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1319,7 +1319,7 @@ LogicalProject(DDEPTNO=[$0], DNAME=[$1], C=[$2]) LogicalProject(DDEPTNO=[$0], DNAME=[$1], C=[$2]) LogicalFilter(condition=[=($1, 'Charlie')]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], C=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1328,7 +1328,7 @@ LogicalProject(DDEPTNO=[$0], DNAME=[$1], C=[$2]) LogicalProject(DDEPTNO=[CASE($2, null, $0)], DNAME=[CASE($3, null, $1)], C=[$4]) LogicalFilter(condition=[=(CASE($3, null, $1), 'Charlie')]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], indicator=[true], C=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1345,7 +1345,7 @@ LogicalProject(DNAME=[$0], DDEPTNO=[$1], C=[$2]) LogicalFilter(condition=[=($0, 'Charlie')]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], C=[COUNT()]) LogicalProject(DNAME=[$1], DDEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1354,7 +1354,7 @@ LogicalProject(DNAME=[$0], DDEPTNO=[$1], C=[$2]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], C=[COUNT()]) LogicalFilter(condition=[=($0, 'Charlie')]) LogicalProject(DNAME=[$1], DDEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1366,7 +1366,7 @@ LogicalProject(DNAME=[$0], DDEPTNO=[$1], C=[$2]) @@ -1374,7 +1374,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($0)], EXPR$2=[AVG($1)], EXPR$3=[MIN($0 LogicalProject(NAME=[$0], EXPR$1=[$1], EXPR$2=[CAST(/($2, $3)):INTEGER NOT NULL], EXPR$3=[$4]) LogicalAggregate(group=[{0}], EXPR$1=[MAX($0)], agg#1=[$SUM0($1)], agg#2=[COUNT()], EXPR$3=[MIN($0)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1386,7 +1386,7 @@ LogicalProject(NAME=[$0], EXPR$1=[$1], EXPR$2=[CAST(/($2, $3)):INTEGER NOT NULL] @@ -1394,7 +1394,7 @@ LogicalProject(EXPR$0=[+($0, $7)]) LogicalProject(EXPR$0=[+($0, $4)]) LogicalFilter(condition=[AND(=($2, *(10, $3)), =(UPPER($1), 'FOO'))]) LogicalProject(EMPNO=[$0], ENAME=[$1], SAL=[$5], COMM=[$6], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1406,8 +1406,8 @@ LogicalProject(EXPR$0=[+($0, $4)]) @@ -1415,9 +1415,9 @@ LogicalProject(EXPR$0=[+($5, $12)]) LogicalProject(EXPR$0=[+($1, $4)]) LogicalJoin(condition=[AND(=($0, $3), $2)], joinType=[inner]) LogicalProject(ENAME=[$1], SAL=[$5], ==[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0], COMM=[$3]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1433,8 +1433,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), *(-1, $5), $5)]) LogicalJoin(condition=[=($1, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1444,9 +1444,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1462,8 +1462,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), 11, *(-1, $5))]) LogicalJoin(condition=[=($1, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1473,9 +1473,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), 11, *(-1, $5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1491,8 +1491,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), 11, *(-1, $5))]) LogicalJoin(condition=[=($1, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1502,9 +1502,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[left]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), 11, *(-1, $5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1520,8 +1520,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), 11, *(-1, $9))]) LogicalJoin(condition=[=($5, $0)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1531,9 +1531,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[CASE($2, 11, $3)]) LogicalJoin(condition=[=($1, $0)], joinType=[left]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], <=[<($5, 11)], *=[*(-1, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1549,8 +1549,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), *(-1, $9), $9)]) LogicalJoin(condition=[=($5, $0)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1560,9 +1560,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$2]) LogicalJoin(condition=[=($1, $0)], joinType=[left]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1578,8 +1578,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), *(-1, $9), $9)]) LogicalJoin(condition=[=($5, $0)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1589,9 +1589,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$2]) LogicalJoin(condition=[=($1, $0)], joinType=[left]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1607,8 +1607,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), 11, *(-1, $5))]) LogicalJoin(condition=[=($1, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1618,9 +1618,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[CASE($1, 11, $2)]) LogicalJoin(condition=[=($0, $3)], joinType=[right]) LogicalProject(ENAME=[$1], <=[<($5, 11)], *=[*(-1, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1637,8 +1637,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), *(-1, $5), $5)]) LogicalJoin(condition=[=($1, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1648,9 +1648,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[right]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1666,8 +1666,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), 11, *(-1, $9))]) LogicalJoin(condition=[=($5, $0)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1677,9 +1677,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$2]) LogicalJoin(condition=[=($1, $0)], joinType=[right]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), 11, *(-1, $5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1695,8 +1695,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), *(-1, $9), $9)]) LogicalJoin(condition=[=($5, $0)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1706,9 +1706,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$2]) LogicalJoin(condition=[=($1, $0)], joinType=[right]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1724,8 +1724,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), 11, *(-1, $5))]) LogicalJoin(condition=[=($1, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1735,9 +1735,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[CASE($1, 11, $2)]) LogicalJoin(condition=[=($0, $3)], joinType=[full]) LogicalProject(ENAME=[$1], <=[<($5, 11)], *=[*(-1, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1753,8 +1753,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), *(-1, $5), $5)]) LogicalJoin(condition=[=($1, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1764,9 +1764,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[full]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1779,9 +1779,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(SAL=[$5]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1789,10 +1789,10 @@ LogicalProject(SAL=[$5]) LogicalUnion(all=[true]) LogicalProject(SAL=[$5]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(SAL=[$5]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1806,10 +1806,10 @@ LogicalProject(SAL=[$5]) LogicalJoin(condition=[true], joinType=[inner]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1818,12 +1818,12 @@ LogicalProject(SAL=[$5]) LogicalUnion(all=[true]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1835,12 +1835,12 @@ LogicalProject(SAL=[$5]) @@ -1848,13 +1848,13 @@ LogicalProject(SAL=[$5]) LogicalProject(SAL=[$5]) LogicalUnion(all=[true]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1868,9 +1868,9 @@ LogicalProject(EXPR$0=[+(1, 2)], EXPR$1=[+($0, +(3, 4))], EXPR$2=[+(+(5, 6), $0) LogicalFilter(condition=[AND(=($0, +(7, 8)), =($0, +(8, 7)), =($0, CASE(IS NOT NULL(2), 2, null:INTEGER)))]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, -(5, 5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1888,7 +1888,7 @@ LogicalProject(EXPR$0=[3], EXPR$1=[22], EXPR$2=[26], EXPR$3=[null:INTEGER], EXPR @@ -2145,9 +2145,9 @@ LogicalProject(EXPR$0=[CAST($1):VARCHAR(128) NOT NULL], EXPR$1=[$2]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$3], ENAME=[$4], JOB=[$5], MGR=[$6], HIREDATE=[$7], SAL=[$8], COMM=[$9], DEPTNO1=[$10], SLACKER=[$11]) LogicalJoin(condition=[=($2, $12)], joinType=[inner]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2157,9 +2157,9 @@ LogicalProject(EXPR$0=[CAST($1):VARCHAR(128) NOT NULL], EXPR$1=[$2]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$3], ENAME=[$4], JOB=[$5], MGR=[$6], HIREDATE=[$7], SAL=[$8], COMM=[$9], DEPTNO1=[$10], SLACKER=[$11]) LogicalJoin(condition=[=($2, $12)], joinType=[inner]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2173,9 +2173,9 @@ AggregateRel(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()]) ProjectRel(ENAME=[$1], EMPNO=[$0]) UnionRel(all=[true]) ProjectRel(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2187,12 +2187,12 @@ ProjectRel(ENAME=[$0], EXPR$1=[$1], EXPR$2=[CAST($2):BIGINT NOT NULL]) ProjectRel(ENAME=[$1], EMPNO=[$0]) ProjectRel(EMPNO=[$0], ENAME=[$1]) ProjectRel(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) AggregateRel(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()]) ProjectRel(ENAME=[$1], EMPNO=[$0]) ProjectRel(EMPNO=[$0], ENAME=[$1]) ProjectRel(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2204,14 +2204,14 @@ ProjectRel(ENAME=[$0], EXPR$1=[$1], EXPR$2=[CAST($2):BIGINT NOT NULL]) @@ -2221,11 +2221,11 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ select upper(substring(x FROM 1 FOR 2) || substring(x FROM 3)) as u, substring(x FROM 1 FOR 1) as s from ( - select 'table' as x from (values (true)) + select 'entity' as x from (values (true)) union select 'view' from (values (true)) union - select 'foreign table' from (values (true)) + select 'foreign entity' from (values (true)) ) ) where u = 'TABLE']]> @@ -2236,11 +2236,11 @@ LogicalProject(U=[$0], S=[$1]) LogicalProject(U=[UPPER(||(SUBSTRING($0, 1, 2), SUBSTRING($0, 3)))], S=[SUBSTRING($0, 1, 1)]) LogicalUnion(all=[false]) LogicalUnion(all=[false]) - LogicalProject(X=['table']) + LogicalProject(X=['entity']) LogicalValues(tuples=[[{ true }]]) LogicalProject(EXPR$0=['view']) LogicalValues(tuples=[[{ true }]]) - LogicalProject(EXPR$0=['foreign table']) + LogicalProject(EXPR$0=['foreign entity']) LogicalValues(tuples=[[{ true }]]) ]]> @@ -2276,14 +2276,14 @@ LogicalProject(EXPR$0=[false]) @@ -2299,8 +2299,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[false]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2322,8 +2322,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($7, $9)], joinType=[right]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[false]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2331,7 +2331,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) LogicalValues(tuples=[[]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2347,8 +2347,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($7, $9)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[false]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2366,7 +2366,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -2384,7 +2384,7 @@ EmptyRel LogicalSort(sort0=[$7], dir0=[ASC]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[false]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2403,7 +2403,7 @@ LogicalSort(sort0=[$7], dir0=[ASC]) @@ -2426,9 +2426,9 @@ LogicalProject(EMPNO=[$0]) LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[=($0, $cor0.EMPNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2437,11 +2437,11 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($5, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[=($0, $cor0.EMPNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2457,9 +2457,9 @@ where c0 in ( LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalFilter(condition=[IN($4, { LogicalProject(C0=[$5]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) })]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2467,10 +2467,10 @@ LogicalProject(C0=[$5]) LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalJoin(condition=[=($4, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) LogicalAggregate(group=[{0}]) LogicalProject(C0=[$5]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2488,9 +2488,9 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject("F1"."C0"=[$5]) LogicalFilter(condition=[=($cor0."K0", $0)]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2499,11 +2499,11 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalFilter(condition=[=($4, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject("F1"."C0"=[$5]) LogicalFilter(condition=[=($cor0."K0", $0)]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2520,9 +2520,9 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ LogicalFilter(condition=[IN($4, { LogicalProject(C0=[$5]) LogicalFilter(condition=[=($cor0."F1"."C2", $7)]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2531,11 +2531,11 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalFilter(condition=[=($4, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) LogicalAggregate(group=[{0}]) LogicalProject(C0=[$5]) LogicalFilter(condition=[=($cor0."F1"."C2", $7)]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2549,14 +2549,14 @@ where extract(year from birthdate) = 2014and extract(month from birthdate) = 4]] =($9, 2014-04-01), <($9, 2014-05-01))]) - LogicalScan(table=[[CATALOG, SALES, EMP_B]]) + LogicalScan(entity=[[CATALOG, SALES, EMP_B]]) ]]> @@ -2570,14 +2570,14 @@ where extract(year from birthdate) = 2014]]> =($9, 2014-01-01), <($9, 2015-01-01))]) - LogicalScan(table=[[CATALOG, SALES, EMP_B]]) + LogicalScan(entity=[[CATALOG, SALES, EMP_B]]) ]]> @@ -2597,12 +2597,12 @@ WHERE exists ( LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) LogicalProject(TWICEDEPTNO=[*($0, 2)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2610,12 +2610,12 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalProject(TWICEDEPTNO=[*($0, 2)]) LogicalFilter(condition=[=($cor0.DEPTNO, *($0, 2))]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2635,12 +2635,12 @@ WHERE exists ( LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) LogicalProject(TWICEDEPTNO=[*($0, 2)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2648,13 +2648,13 @@ LogicalProject(EMPNO=[$0]) @@ -2667,8 +2667,8 @@ join sales.emp e on e.deptno = d.deptno and d.deptno not in (4, 6)]]> @@ -2676,9 +2676,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], EMPNO0=[$9], ENAME0=[$10], JOB0=[$11], MGR0=[$12], HIREDATE0=[$13], SAL0=[$14], COMM0=[$15], DEPTNO0=[$16], SLACKER0=[$17]) LogicalJoin(condition=[=($16, $7)], joinType=[inner]) LogicalFilter(condition=[AND(<>($7, 4), <>($7, 6))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[AND(<>($7, 4), <>($7, 6))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2697,13 +2697,13 @@ LogicalIntersect(all=[false]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2715,15 +2715,15 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2742,13 +2742,13 @@ LogicalIntersect(all=[true]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2761,14 +2761,14 @@ LogicalIntersect(all=[true]) LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2782,11 +2782,11 @@ IN (select e.deptno from sales.emp e where e.deptno = d.deptno or e.deptno = 4)] LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=($7, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[OR(=($7, $cor0.DEPTNO), =($7, 4))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2805,17 +2805,17 @@ LogicalProject(SAL=[$5]) LogicalJoin(condition=[AND(=($5, $9), =($5, $8))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(SAL=[$5], SAL0=[$8]) LogicalJoin(condition=[OR(=($8, $5), =($8, 4))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -2825,18 +2825,18 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[OR(IS NOT NULL($5), =($5, 4))]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) LogicalFilter(condition=[AND(OR(IS NOT NULL($0), =($0, 4)), OR(=($0, $1), =($0, 4)), OR(IS NOT NULL($1), =($1, 4)))]) LogicalAggregate(group=[{0, 1}]) LogicalProject(SAL=[$5], SAL0=[$8]) LogicalJoin(condition=[OR(=($8, $5), =($8, 4))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -2855,13 +2855,13 @@ LogicalIntersect(all=[false]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2869,13 +2869,13 @@ LogicalIntersect(all=[false]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2894,13 +2894,13 @@ LogicalMinus(all=[false]) LogicalMinus(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2908,13 +2908,13 @@ LogicalMinus(all=[false]) LogicalMinus(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2932,15 +2932,15 @@ select * from ( LogicalMinus(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalMinus(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2958,14 +2958,14 @@ select * from emp where deptno = 30 LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2984,13 +2984,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2998,13 +2998,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3023,13 +3023,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3037,13 +3037,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3062,13 +3062,13 @@ LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3076,13 +3076,13 @@ LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3101,13 +3101,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3126,13 +3126,13 @@ LogicalUnion(all=[false]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3140,13 +3140,13 @@ LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3163,7 +3163,7 @@ group by deptno]]> LogicalProject(EXPR$0=[CAST(/(SUM(+(+($1, *(2, $2)), *(3, $3))) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), COUNT(+(+($1, *(2, $2)), *(3, $3))) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING))):INTEGER NOT NULL]) LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[MIN($2)], agg#2=[AVG($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3173,7 +3173,7 @@ LogicalProject(EXPR$0=[CAST(/($2, $3)):INTEGER NOT NULL]) LogicalProject(DEPTNO=[$0], $1=[+(+($1, *(2, $2)), *(3, $3))]) LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[MIN($2)], agg#2=[AVG($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3185,14 +3185,14 @@ LogicalProject(EXPR$0=[CAST(/($2, $3)):INTEGER NOT NULL]) @@ -3207,7 +3207,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(IS NULL($5), IS NOT NULL($5))]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -3215,7 +3215,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$7]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -3230,7 +3230,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(IS NOT NULL($5), IS NULL($5))]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -3238,7 +3238,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$7]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -3257,7 +3257,7 @@ group by deptno]]> LogicalProject(EXPR$0=[$1], EXPR$1=[$2], EXPR$2=[SUM($2) OVER (PARTITION BY $1 ORDER BY $2 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) LogicalAggregate(group=[{0}], EXPR$0=[MIN($1)], EXPR$1=[SUM($2)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3266,7 +3266,7 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$2], $2=[$3]) LogicalWindow(window#0=[window(partition {1} order by [2] range between UNBOUNDED PRECEDING and CURRENT ROW aggs [SUM($2)])]) LogicalAggregate(group=[{0}], EXPR$0=[MIN($1)], EXPR$1=[SUM($2)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3283,9 +3283,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[$9]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3294,10 +3294,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3314,9 +3314,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[$9]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3325,10 +3325,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3346,9 +3346,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3357,10 +3357,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3377,9 +3377,9 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3388,10 +3388,10 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3408,9 +3408,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(ENAME=[$1]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3419,10 +3419,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3439,9 +3439,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3450,10 +3450,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3470,9 +3470,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3481,10 +3481,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[MAX($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[MAX($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3501,9 +3501,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[MIN($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3512,10 +3512,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[MIN($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[MIN($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[MIN($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3532,9 +3532,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[AVG($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3542,9 +3542,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[AVG($1)]) LogicalAggregate(group=[{0}], EXPR$1=[AVG($1)]) LogicalUnion(all=[true]) LogicalProject(ENAME=[$1], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$1], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3561,9 +3561,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()], EXPR$3=[MIN($2 LogicalProject(ENAME=[$1], EMPNO=[$0], DEPTNO=[$7]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3572,10 +3572,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[$SUM0($2)], EXPR$3=[MIN( LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()], EXPR$3=[MIN($2)], EXPR$4=[MAX($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()], EXPR$3=[MIN($2)], EXPR$4=[MAX($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3592,9 +3592,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[$9]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3603,10 +3603,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3623,9 +3623,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[$9]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3634,10 +3634,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3655,9 +3655,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3666,10 +3666,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3686,9 +3686,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[COUNT()]) LogicalProject(DEPTNO=[$7], JOB=[$2]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3697,10 +3697,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[$SUM0($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3717,9 +3717,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[COUNT($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3728,10 +3728,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[$SUM0($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3748,9 +3748,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3759,10 +3759,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[MAX($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3779,9 +3779,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[MIN($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3790,10 +3790,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[MIN($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[MIN($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[MIN($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3810,9 +3810,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[AVG($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3820,9 +3820,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[AVG($2)]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[AVG($2)]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3839,9 +3839,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)], E LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3850,10 +3850,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)], E LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)], EXPR$3=[COUNT()], EXPR$4=[MIN($0)], EXPR$5=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)], EXPR$3=[COUNT()], EXPR$4=[MIN($0)], EXPR$5=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3873,10 +3873,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT() FILTER $1]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3886,11 +3886,11 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($1)]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT() FILTER $1]) LogicalProject(DEPTNO=[$7], $f1=[=($2, 'CLERK')]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT() FILTER $1]) LogicalProject(DEPTNO=[$7], $f1=[=($2, 'CLERK')]) LogicalFilter(condition=[>($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3903,7 +3903,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($1)]) LogicalAggregate(group=[{0, 1, 2}]) LogicalFilter(condition=[>($1, 5000)]) LogicalProject(ENAME=[$1], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3911,7 +3911,7 @@ LogicalAggregate(group=[{0, 1, 2}]) LogicalFilter(condition=[>($1, 5000)]) LogicalAggregate(group=[{0, 1, 2}]) LogicalProject(ENAME=[$1], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3924,7 +3924,7 @@ LogicalFilter(condition=[>($1, 5000)]) LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}, {0}, {}]]) LogicalFilter(condition=[>($1, 5000)]) LogicalProject(ENAME=[$1], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3933,7 +3933,7 @@ LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}, {0}, {}]]) LogicalFilter(condition=[>($1, 5000)]) LogicalAggregate(group=[{0, 1, 2}]) LogicalProject(ENAME=[$1], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3948,14 +3948,14 @@ LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}, {0}, {}]]) LogicalProject(DEPTNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$7], FOUR=[4], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3970,14 +3970,14 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$7], FOUR=[4], ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3992,7 +3992,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$0], EXPR$1=[$4]) LogicalAggregate(group=[{0, 1, 2, 3}], EXPR$1=[MAX($4)]) LogicalProject(DEPTNO=[$7], FOUR=[4], TWO_PLUS_THREE=[+(2, 3)], DEPTNO42=[+($7, 42)], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4000,7 +4000,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$4]) LogicalProject(DEPTNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 3}], EXPR$1=[MAX($4)]) LogicalProject(DEPTNO=[$7], FOUR=[4], TWO_PLUS_THREE=[+(2, 3)], DEPTNO42=[+($7, 42)], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4015,14 +4015,14 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$2]) LogicalProject(DEPTNO=[$1], EXPR$1=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$1=[MAX($2)]) LogicalProject(FOUR=[4], DEPTNO=[$7], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4037,14 +4037,14 @@ LogicalAggregate(group=[{1}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$1], EXPR$1=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$1=[MAX($2)]) LogicalProject($f0=[+(42, 24)], DEPTNO=[$7], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4058,7 +4058,7 @@ LogicalAggregate(group=[{1}], EXPR$1=[MAX($2)]) @@ -4066,7 +4066,7 @@ LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) LogicalAggregate(group=[{0}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[4], EXPR$1=[+(2, 3)], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4080,7 +4080,7 @@ LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) @@ -4088,7 +4088,7 @@ LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) LogicalAggregate(group=[{0}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[4], EXPR$1=[+(2, 3)], FIVE=[5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4102,7 +4102,7 @@ LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) @@ -4110,7 +4110,7 @@ LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) LogicalAggregate(group=[{0}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[4], EXPR$1=[+(2, 3)], $f2=[5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4124,9 +4124,9 @@ select 2, deptno, job from emp as e2]]> @@ -4134,9 +4134,9 @@ LogicalUnion(all=[true]) LogicalProject(EXPR$0=[2], DEPTNO=[$0], JOB=[$1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4150,9 +4150,9 @@ select 1, deptno, job from emp as e2]]> @@ -4166,9 +4166,9 @@ select 2, 3 from emp as e2]]> @@ -4176,9 +4176,9 @@ LogicalUnion(all=[true]) LogicalProject(EXPR$0=[2], EXPR$1=[3]) LogicalUnion(all=[true]) LogicalProject(EXPR$0=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EXPR$0=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4192,18 +4192,18 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($9, $18))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4218,16 +4218,16 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalFilter(condition=[>($9, 3)]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> ($9, 3))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4240,8 +4240,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($1, 'foo'))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4249,8 +4249,8 @@ LogicalProject(ENAME=[$1]) LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) LogicalFilter(condition=[=($1, 'foo')]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4264,9 +4264,9 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($9, $18))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4274,9 +4274,9 @@ LogicalProject(ENAME=[$1]) LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($9, $18)], joinType=[inner], semiJoinDone=[true]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4289,8 +4289,8 @@ LogicalProject(ENAME=[$1]) ProjectRel(DNAME=[$10], ENAME=[$1]) FilterRel(condition=[=($10, 'Propane')]) JoinRel(condition=[=($7, $9)], joinType=[inner]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, DEPT]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4308,26 +4308,26 @@ LogicalProject(A=[$0], B=[$1], C=[$2], D=[$3], E=[$4], F=[$5], G=[$6], H=[$7], I LogicalJoin(condition=[AND(=($0, $2), =($1, $3))], joinType=[left]) LogicalProject(A=[$0], B=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) LogicalProject(C=[$0], D=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, C]]) - LogicalScan(table=[[CATALOG, SALES, D]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, D]]) LogicalProject(E=[$0], F=[$1], G=[$2], H=[$3]) LogicalJoin(condition=[AND(=($0, $2), =($1, $3))], joinType=[right]) LogicalProject(E=[$0], F=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, E]]) - LogicalScan(table=[[CATALOG, SALES, F]]) + LogicalScan(entity=[[CATALOG, SALES, E]]) + LogicalScan(entity=[[CATALOG, SALES, F]]) LogicalProject(G=[$0], H=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, G]]) - LogicalScan(table=[[CATALOG, SALES, H]]) + LogicalScan(entity=[[CATALOG, SALES, G]]) + LogicalScan(entity=[[CATALOG, SALES, H]]) LogicalProject(I=[$0], J=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, I]]) - LogicalScan(table=[[CATALOG, SALES, J]]) + LogicalScan(entity=[[CATALOG, SALES, I]]) + LogicalScan(entity=[[CATALOG, SALES, J]]) ]]> @@ -4336,20 +4336,20 @@ MultiJoin(joinFilter=[AND(=($0, $8), =($7, $9), =($8, $9))], isFullOuterJoin=[fa MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[RIGHT, INNER]], outerJoinConditions=[[AND(=($0, $4), =($1, $5), =($2, $6), =($3, $7)), NULL]], projFields=[[ALL, ALL]]) MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[INNER, LEFT]], outerJoinConditions=[[NULL, AND(=($0, $2), =($1, $3))]], projFields=[[ALL, ALL]]) MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[RIGHT, INNER]], outerJoinConditions=[[=($0, $1), NULL]], projFields=[[ALL, ALL]]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) MultiJoin(joinFilter=[=($0, $1)], isFullOuterJoin=[true], joinTypes=[[INNER, INNER]], outerJoinConditions=[[NULL, NULL]], projFields=[[ALL, ALL]]) - LogicalScan(table=[[CATALOG, SALES, C]]) - LogicalScan(table=[[CATALOG, SALES, D]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, D]]) MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[RIGHT, INNER]], outerJoinConditions=[[AND(=($0, $2), =($1, $3)), NULL]], projFields=[[ALL, ALL]]) MultiJoin(joinFilter=[=($0, $1)], isFullOuterJoin=[true], joinTypes=[[INNER, INNER]], outerJoinConditions=[[NULL, NULL]], projFields=[[ALL, ALL]]) - LogicalScan(table=[[CATALOG, SALES, E]]) - LogicalScan(table=[[CATALOG, SALES, F]]) + LogicalScan(entity=[[CATALOG, SALES, E]]) + LogicalScan(entity=[[CATALOG, SALES, F]]) MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[INNER, LEFT]], outerJoinConditions=[[NULL, =($0, $1)]], projFields=[[ALL, ALL]]) - LogicalScan(table=[[CATALOG, SALES, G]]) - LogicalScan(table=[[CATALOG, SALES, H]]) - LogicalScan(table=[[CATALOG, SALES, I]]) - LogicalScan(table=[[CATALOG, SALES, J]]) + LogicalScan(entity=[[CATALOG, SALES, G]]) + LogicalScan(entity=[[CATALOG, SALES, H]]) + LogicalScan(entity=[[CATALOG, SALES, I]]) + LogicalScan(entity=[[CATALOG, SALES, J]]) ]]> @@ -4363,9 +4363,9 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($0, $11))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4375,11 +4375,11 @@ LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) SemiJoin(condition=[=($0, $9)], joinType=[inner]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4393,9 +4393,9 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($9, $18))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4404,12 +4404,12 @@ LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($9, $18)], joinType=[inner], semiJoinDone=[true]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) SemiJoin(condition=[=($0, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4422,16 +4422,16 @@ LogicalProject(ENAME=[$1]) LogicalProject(ENAME=[$1]) LogicalFilter(condition=[=($7, $9)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4445,8 +4445,8 @@ LogicalProject(ENAME=[$0], EXPR$1=[$1], EXPR$2=[$2], DEPTNO=[$3]) LogicalFilter(condition=[=($3, $4)]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(ENAME=[$1], EXPR$1=[TRIM(FLAG(BOTH), ' ', $2)], EXPR$2=[*($5, 2)], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4455,9 +4455,9 @@ LogicalProject(ENAME=[$0], EXPR$1=[$1], EXPR$2=[$2], DEPTNO=[$3]) LogicalJoin(condition=[=($3, $4)], joinType=[inner], semiJoinDone=[true]) LogicalProject(ENAME=[$1], EXPR$1=[TRIM(FLAG(BOTH), ' ', $2)], EXPR$2=[*($5, 2)], DEPTNO=[$7]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4470,8 +4470,8 @@ LogicalProject(ENAME=[$0], EXPR$1=[$1], EXPR$2=[$2], DEPTNO=[$3]) LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($1, 'foo'))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4480,9 +4480,9 @@ LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) LogicalFilter(condition=[=($1, 'foo')]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4496,9 +4496,9 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($9, $18), =($10, 'foo'))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4506,10 +4506,10 @@ LogicalProject(ENAME=[$1]) LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($9, $18)], joinType=[inner], semiJoinDone=[true]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($1, 'foo')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4521,9 +4521,9 @@ LogicalProject(ENAME=[$1]) @@ -4537,7 +4537,7 @@ UnionRel(all=[true]) LogicalAggregate(group=[{1}], EXPR$1=[$SUM0($2)]) LogicalFilter(condition=[>(CAST($0):BIGINT NOT NULL, CAST('12'):BIGINT NOT NULL)]) LogicalAggregate(group=[{5, 7}], EXPR$1=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4545,7 +4545,7 @@ LogicalAggregate(group=[{1}], EXPR$1=[$SUM0($2)]) LogicalAggregate(group=[{1}], EXPR$1=[$SUM0($2)]) LogicalAggregate(group=[{5, 7}], EXPR$1=[COUNT()]) LogicalFilter(condition=[>(CAST($5):BIGINT NOT NULL, 12)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4560,7 +4560,7 @@ where case when mgr < 10 then true else false end]]> LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[0]) LogicalFilter(condition=[CASE(<($3, 10), true, false)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4569,7 +4569,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[0]) LogicalFilter(condition=[IS TRUE(<($0, 10))]) LogicalProject(MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4581,9 +4581,9 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) ($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4591,9 +4591,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4607,8 +4607,8 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4617,9 +4617,9 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4634,8 +4634,8 @@ LogicalProject(EXPR$0=[1]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4645,9 +4645,9 @@ LogicalProject(EXPR$0=[1]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4662,11 +4662,11 @@ LogicalProject(EXPR$0=[1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4676,12 +4676,12 @@ LogicalProject(EXPR$0=[1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[OR(>($7, 7), >($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4695,9 +4695,9 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($16, $25)], joinType=[inner]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4706,11 +4706,11 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($16, $25)], joinType=[inner]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4730,13 +4730,13 @@ LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($7, 4)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4745,14 +4745,14 @@ LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($7, 4)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 4)]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4769,9 +4769,9 @@ LogicalProject(EXPR$0=[1]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4782,11 +4782,11 @@ LogicalProject(EXPR$0=[1]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4800,8 +4800,8 @@ LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($16, 9)]) LogicalJoin(condition=[=($7, $16)], joinType=[left]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4810,9 +4810,9 @@ LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($16, 9)]) LogicalJoin(condition=[=($7, $16)], joinType=[left]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4825,9 +4825,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($7, 7)]) LogicalJoin(condition=[=($7, $16)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 9)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4836,9 +4836,9 @@ LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($7, 7)]) LogicalJoin(condition=[=($7, $16)], joinType=[right]) LogicalFilter(condition=[>($7, 9)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 9)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4851,8 +4851,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[AND(>($7, 7), >($16, 9))]) LogicalJoin(condition=[=($7, $16)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4860,8 +4860,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[AND(>($7, 7), >($16, 9))]) LogicalJoin(condition=[=($7, $16)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4875,8 +4875,8 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($0, $8)], joinType=[inner]) LogicalProject(DEPTNO=[$6]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4885,8 +4885,8 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($0, $8)], joinType=[inner]) LogicalProject(DEPTNO=[$6]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4900,8 +4900,8 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($0, $8)], joinType=[inner]) LogicalProject(DEPTNO=[$6]) LogicalFilter(condition=[>($6, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4910,9 +4910,9 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($0, $8)], joinType=[inner]) LogicalProject(DEPTNO=[$6]) LogicalFilter(condition=[>($6, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4925,8 +4925,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4934,9 +4934,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4949,8 +4949,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($6, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4958,8 +4958,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($6, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4975,14 +4975,14 @@ LogicalProject(EXPR$0=[1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4993,15 +4993,15 @@ LogicalProject(EXPR$0=[1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[OR(>($7, 7), >($7, 1))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5024,14 +5024,14 @@ ProjectRel(EXPR$0=[1]) UnionRel(all=[true]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[OR(>($7, 7), <($0, 10))]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[OR(>($7, 10), <($0, $7))]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[>($7, 1)]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5042,14 +5042,14 @@ ProjectRel(EXPR$0=[1]) UnionRel(all=[true]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[OR(>($7, 7), <($0, 10))]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[OR(>($7, 10), <($0, $7))]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[>($7, 1)]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5061,16 +5061,16 @@ ProjectRel(EXPR$0=[1]) @@ -5083,8 +5083,8 @@ LogicalProject(EXPR$0=[1]) ProjectRel(EXPR$0=[1]) JoinRel(condition=[=($7, $16)], joinType=[inner]) FilterRel(condition=[>(2, 1)]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5092,8 +5092,8 @@ ProjectRel(EXPR$0=[1]) ProjectRel(EXPR$0=[1]) JoinRel(condition=[=($7, $16)], joinType=[inner]) FilterRel(condition=[>(2, 1)]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5106,9 +5106,9 @@ ProjectRel(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[AND(>($7, 7), =($6, $7), >(+($6, $7), /($6, 2)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($5, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5116,10 +5116,10 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[AND(>($7, 7), =($6, $7), >(+($6, $7), /($6, 2)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) LogicalFilter(condition=[=($5, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5131,14 +5131,14 @@ LogicalProject(EXPR$0=[1]) @@ -5152,7 +5152,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>(+($7, 5), $0)]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5161,7 +5161,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>(15, $0)]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5177,7 +5177,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[AND(>($0, 3), >($7, 5))]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5186,7 +5186,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($0, 3)]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5202,8 +5202,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($0, $9)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5212,9 +5212,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[true], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5230,9 +5230,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[AND(=($0, $10), =($9, $12))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, $0)]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[+($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5241,10 +5241,10 @@ LogicalProject(EMPNO=[10], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($9, 15)], joinType=[inner]) LogicalProject(EMPNO=[10], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, 10)]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[10], NAME=[$1], $f2=[15]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5259,8 +5259,8 @@ where emp.deptno is not null and emp.sal > 100]]> LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalFilter(condition=[AND(IS NOT NULL($9), >($7, 100))]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5268,9 +5268,9 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5 LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[CAST($2):INTEGER], ENAME=[CAST($3):VARCHAR(20)], JOB=[CAST($4):VARCHAR(10)], MGR=[$5], HIREDATE=[CAST($6):TIMESTAMP(0)], SAL=[CAST($7):INTEGER], COMM=[CAST($8):INTEGER], DEPTNO0=[CAST($9):INTEGER], SLACKER=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($0, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5285,20 +5285,20 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5 ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5314,11 +5314,11 @@ SemiJoin(condition=[=($0, $2)], joinType=[inner]) LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) SemiJoin(condition=[=($0, $2)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$7], $f0=[true]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, CUSTOMER, ACCOUNT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, CUSTOMER, ACCOUNT]]) ]]> @@ -5327,13 +5327,13 @@ LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) SemiJoin(condition=[=($0, $1)], joinType=[inner]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[>($0, 100)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ACCTNO=[$0]) - LogicalScan(table=[[CATALOG, CUSTOMER, ACCOUNT]]) + LogicalScan(entity=[[CATALOG, CUSTOMER, ACCOUNT]]) ]]> @@ -5346,7 +5346,7 @@ from sales.dept group by name]]> @@ -5355,7 +5355,7 @@ LogicalProject(NAME=[$0], EXPR$1=[CAST(POWER(/(-($1, /(*($2, $2), $3)), $3), 0.5 LogicalAggregate(group=[{0}], agg#0=[$SUM0($2)], agg#1=[$SUM0($1)], agg#2=[COUNT()]) LogicalProject(NAME=[$0], DEPTNO=[$1], $f2=[*($1, $1)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5371,7 +5371,7 @@ group by x, y]]> LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalAggregate(group=[{0, 1}], EXPR$1=[SUM($2)]) LogicalProject(X=[$7], Y=[$0], Z=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5379,7 +5379,7 @@ LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalProject(DEPTNO=[$1], EMPNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 7}], EXPR$1=[SUM($5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5395,7 +5395,7 @@ group by rollup(x, y)]]> LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$1=[SUM($2)]) LogicalProject(X=[$7], Y=[$0], Z=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5403,7 +5403,7 @@ LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalProject(DEPTNO=[$1], EMPNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 7}], groups=[[{0, 7}, {7}, {}]], EXPR$1=[SUM($5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5417,10 +5417,10 @@ LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5428,9 +5428,9 @@ LogicalAggregate(group=[{0, 1}]) LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5444,10 +5444,10 @@ LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5455,9 +5455,9 @@ LogicalAggregate(group=[{0, 1}]) LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5470,13 +5470,13 @@ LogicalAggregate(group=[{0, 1}]) LogicalProject(EXPR$0=[$2], DEPTNO=[$1]) LogicalWindow(window#0=[window(partition {} order by [] range between UNBOUNDED PRECEDING and UNBOUNDED FOLLOWING aggs [COUNT($0)])]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5494,13 +5494,13 @@ from ( LogicalProject($0=[$2], $1=[$3]) LogicalWindow(window#0=[window(partition {1} order by [0] range between UNBOUNDED PRECEDING and CURRENT ROW aggs [SUM($2), SUM($3)])]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5516,7 +5516,7 @@ where r < 2]]> LogicalProject(ENAME=[$0], R=[$1]) LogicalFilter(condition=[<($1, 2)]) LogicalProject(ENAME=[$1], R=[RANK() OVER (PARTITION BY $7 ORDER BY $5 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5524,7 +5524,7 @@ LogicalProject(ENAME=[$0], R=[$1]) LogicalProject(ENAME=[$0], R=[$1]) LogicalFilter(condition=[<($1, 2)]) LogicalProject(ENAME=[$1], R=[RANK() OVER (PARTITION BY $7 ORDER BY $5 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5541,7 +5541,7 @@ where r < 2]]> LogicalProject(ENAME=[$0], R=[$1]) LogicalFilter(condition=[<($1, 2)]) LogicalProject(ENAME=[$1], R=[+(RANK() OVER (PARTITION BY $7 ORDER BY $5 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5549,7 +5549,7 @@ LogicalProject(ENAME=[$0], R=[$1]) LogicalProject(ENAME=[$0], R=[$1]) LogicalFilter(condition=[<($1, 2)]) LogicalProject(ENAME=[$1], R=[+(RANK() OVER (PARTITION BY $7 ORDER BY $5 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5562,7 +5562,7 @@ from sales.emp]]> @@ -5570,7 +5570,7 @@ LogicalAggregate(group=[{}], EXPR$0=[MAX($0)], EXPR$1=[COUNT(DISTINCT $1)]) LogicalAggregate(group=[{}], EXPR$0=[MIN($1) FILTER $3], EXPR$1=[COUNT($0) FILTER $2]) LogicalProject(ENAME=[$0], EXPR$0=[$1], $g_0=[=($2, 0)], $g_1=[=($2, 1)]) LogicalAggregate(group=[{1}], groups=[[{1}, {}]], EXPR$0=[MAX($7)], $g=[GROUPING($1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5582,14 +5582,14 @@ LogicalAggregate(group=[{}], EXPR$0=[MIN($1) FILTER $3], EXPR$1=[COUNT($0) FILTE @@ -5601,7 +5601,7 @@ LogicalAggregate(group=[{1}], EXPR$1=[COUNT($0)]) @@ -5611,7 +5611,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$1], EXPR$2=[CAST($2):INTEGER NOT NULL]) LogicalProject(DEPTNO=[$0], ENAME=[$1], EXPR$2=[$2], $g_0=[=($3, 0)], $g_1=[=($3, 1)]) LogicalProject(DEPTNO=[$1], ENAME=[$0], EXPR$2=[$2], $g=[$3]) LogicalAggregate(group=[{1, 7}], groups=[[{1, 7}, {7}]], EXPR$2=[SUM($5)], $g=[GROUPING($7, $1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5623,7 +5623,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$1], EXPR$2=[CAST($2):INTEGER NOT NULL]) @@ -5632,7 +5632,7 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[COUNT($2) F LogicalProject(DEPTNO=[$0], JOB=[$1], ENAME=[$2], $g_0=[=($3, 0)]) LogicalAggregate(group=[{0, 1, 2}], $g=[GROUPING($0, $1, $2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5644,7 +5644,7 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[COUNT($2) F @@ -5654,7 +5654,7 @@ LogicalProject(DEPTNO=[$0], JOB=[$1], EXPR$2=[$2], EXPR$3=[CAST($3):INTEGER NOT LogicalProject(DEPTNO=[$0], JOB=[$1], ENAME=[$2], EXPR$3=[$3], $g_0=[=($4, 0)], $g_1=[=($4, 1)]) LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}]], EXPR$3=[SUM($3)], $g=[GROUPING($0, $1, $2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5667,18 +5667,18 @@ LogicalProject(DEPTNO=[$0], JOB=[$1], EXPR$2=[$2], EXPR$3=[CAST($3):INTEGER NOT LogicalProject(A=[$0], B=[$1], C=[$2]) LogicalJoin(condition=[=($1, $2)], joinType=[inner]) LogicalJoin(condition=[=($0, $1)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) - LogicalScan(table=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) ]]> @@ -5691,17 +5691,17 @@ MultiJoin(joinFilter=[=($1, $2)], isFullOuterJoin=[false], joinTypes=[[INNER, IN LogicalProject(A=[$0], B=[$1], C=[$2]) LogicalJoin(condition=[=($1, $2)], joinType=[left]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) - LogicalScan(table=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) ]]> @@ -5714,18 +5714,18 @@ MultiJoin(joinFilter=[=($0, $1)], isFullOuterJoin=[false], joinTypes=[[INNER, IN LogicalProject(A=[$0], B=[$1], C=[$2]) LogicalJoin(condition=[=($1, $2)], joinType=[right]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) - LogicalScan(table=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) ]]> @@ -5743,7 +5743,7 @@ LogicalProject(C1=[$0]) LogicalAggregate(group=[{0}], C2=[COUNT()]) LogicalProject(C1=[$1]) LogicalFilter(condition=[>($1, 'b')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5754,7 +5754,7 @@ LogicalProject(C1=[$0]) LogicalFilter(condition=[>($0, 'c')]) LogicalProject(C1=[$1]) LogicalFilter(condition=[>($1, 'b')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5772,10 +5772,10 @@ LogicalProject(NAME=[$1]) LogicalJoin(condition=[$4], joinType=[right]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$2], NAME0=[$3], $f4=[>($2, 10)]) LogicalJoin(condition=[$4], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5785,12 +5785,12 @@ LogicalProject(NAME=[$1]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$2], NAME0=[$3], $f4=[>($2, 10)]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[CAST($2):INTEGER], NAME0=[CAST($3):VARCHAR(10)], $f2=[CAST($4):BOOLEAN]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[>($0, 10)]) LogicalFilter(condition=[>($0, 10)]) LogicalFilter(condition=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5809,11 +5809,11 @@ LogicalProject(NAME=[$0]) LogicalProject(NAME=[$1], $f4=[$5]) LogicalJoin(condition=[$4], joinType=[left]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[>($0, 10)], >=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5821,9 +5821,9 @@ LogicalProject(NAME=[$0]) LogicalProject(NAME=[$1]) LogicalJoin(condition=[>($2, 10)], joinType=[right]) LogicalJoin(condition=[>($2, 10)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5842,7 +5842,7 @@ LogicalProject(NAME=[$1]) LogicalFilter(condition=[=($0, 10)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5850,7 +5850,7 @@ LogicalProject(NAME=[$1]) LogicalProject(NAME=[$1]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5871,8 +5871,8 @@ LogicalProject(DEPTNO=[$0], ENAME=[$1]) LogicalFilter(condition=[=($0, 10)]) LogicalProject(DEPTNO=[$9], ENAME=[$1]) LogicalJoin(condition=[AND(=($7, $9), =($9, 10))], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5880,9 +5880,9 @@ LogicalProject(DEPTNO=[$0], ENAME=[$1]) LogicalProject(DEPTNO=[$0], ENAME=[$1]) LogicalProject(DEPTNO=[$9], ENAME=[$1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5896,13 +5896,13 @@ from emp]]> @@ -5913,7 +5913,7 @@ LogicalProject(NEWCOL=[1E0:FLOAT]) @@ -5935,9 +5935,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], DEPTNO=[$1]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($7, +(40, 60)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5947,9 +5947,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], DEPTNO=[$1]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($7, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5983,13 +5983,13 @@ from emp]]> LogicalProject($0=[$3], $1=[$4]) LogicalWindow(window#0=[window(partition {1} order by [0] range between UNBOUNDED PRECEDING and CURRENT ROW aggs [SUM($1), SUM($2)])]) LogicalProject(SAL=[$5], DEPTNO=[$7], $2=[+($7, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6006,8 +6006,8 @@ LogicalAggregate(group=[{2, 10}]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6016,9 +6016,9 @@ LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalAggregate(group=[{2}]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6036,9 +6036,9 @@ LogicalAggregate(group=[{2, 11}]) LogicalJoin(condition=[AND(=($2, $11), =($9, $12))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, $0)]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[+($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6048,10 +6048,10 @@ LogicalAggregate(group=[{0, 2}]) LogicalAggregate(group=[{2, 9}]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, $0)]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1, 2}]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[+($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6068,8 +6068,8 @@ LogicalAggregate(group=[{0, 9}]) LogicalJoin(condition=[<($0, $9)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6078,8 +6078,8 @@ LogicalAggregate(group=[{0, 9}]) LogicalJoin(condition=[<($0, $9)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6093,8 +6093,8 @@ group by e.deptno]]> @@ -6102,9 +6102,9 @@ LogicalAggregate(group=[{7}]) LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalAggregate(group=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6119,8 +6119,8 @@ group by e.deptno, d.deptno]]> LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalAggregate(group=[{7, 9}]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6128,9 +6128,9 @@ LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalAggregate(group=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6146,9 +6146,9 @@ on A.sal=B.sal @@ -6157,9 +6157,9 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($3)]) LogicalProject(SAL=[$0], $f1=[$1], SAL0=[$2], $f3=[CAST(*($1, $2)):INTEGER]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) LogicalAggregate(group=[{5}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{5}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6178,9 +6178,9 @@ group by A.job, B.mgr, A.deptno]]> LogicalProject(JOB=[$0], MGR0=[$2], DEPTNO=[$1], HIREDATE1=[$3], COMM1=[$4]) LogicalAggregate(group=[{2, 7, 9}], HIREDATE1=[MAX($11)], COMM1=[SUM($12)]) LogicalJoin(condition=[=($5, $10)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{3, 5}], HIREDATE1=[MAX($4)], COMM1=[SUM($6)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6190,9 +6190,9 @@ LogicalProject(JOB=[$0], MGR0=[$2], DEPTNO=[$1], HIREDATE1=[$3], COMM1=[$4]) LogicalProject(JOB=[$0], SAL=[$1], DEPTNO=[$2], $f3=[$3], MGR=[$4], SAL0=[$5], HIREDATE1=[$6], COMM1=[$7], $f8=[CAST(*($3, $7)):INTEGER NOT NULL]) LogicalJoin(condition=[=($1, $5)], joinType=[inner]) LogicalAggregate(group=[{2, 5, 7}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{3, 5}], HIREDATE1=[MAX($4)], COMM1=[SUM($6)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6210,8 +6210,8 @@ LogicalProject(JOB=[$0], EXPR$1=[$2]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6222,9 +6222,9 @@ LogicalProject(JOB=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{2}], EXPR$1=[SUM($5)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6240,8 +6240,8 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($5)]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6252,9 +6252,9 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($4)]) LogicalAggregate(group=[{2}], EXPR$0=[SUM($5)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6270,8 +6270,8 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($5)]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6283,9 +6283,9 @@ LogicalProject(EXPR$0=[CASE(=($1, 0), null:INTEGER, $0)]) LogicalAggregate(group=[{2}], EXPR$0=[$SUM0($5)], agg#1=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6296,13 +6296,13 @@ LogicalProject(EXPR$0=[CASE(=($1, 0), null:INTEGER, $0)]) @@ -6315,8 +6315,8 @@ LogicalProject(QX=[CAST(CASE(=($0, 1), 1, 2)):INTEGER]) LogicalProject(EXPR$0=[OR(AND(IS NULL($3), IS NULL($12)), IS TRUE(=($3, $12)))]) LogicalFilter(condition=[IS NULL($3)]) LogicalJoin(condition=[=($3, $12)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6324,8 +6324,8 @@ LogicalProject(EXPR$0=[OR(AND(IS NULL($3), IS NULL($12)), IS TRUE(=($3, $12)))]) LogicalProject(EXPR$0=[IS NULL($12)]) LogicalFilter(condition=[IS NULL($3)]) LogicalJoin(condition=[=($3, $12)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6340,7 +6340,7 @@ where t > TIMESTAMP '2018-01-01 00:00:00']]> LogicalProject(SAL=[$0], T=[$1]) LogicalFilter(condition=[>($1, 2018-01-01 00:00:00)]) LogicalProject(SAL=[$5], T=[CURRENT_TIMESTAMP]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6348,7 +6348,7 @@ LogicalProject(SAL=[$0], T=[$1]) LogicalProject(SAL=[$0], T=[$1]) LogicalFilter(condition=[>($1, 2018-01-01 00:00:00)]) LogicalProject(SAL=[$5], T=[CURRENT_TIMESTAMP]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6361,14 +6361,14 @@ where empno=10 and empno is not null]]> @@ -6380,7 +6380,7 @@ LogicalProject(EMPNO=[$0]) @@ -6399,7 +6399,7 @@ where empno=10 and not(empno=10)]]> @@ -6417,7 +6417,7 @@ LogicalProject(EMPNO=[$0]) ($0, 10), <=($0, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6444,7 +6444,7 @@ LogicalProject(N=[$0]) LogicalProject(N=[$0]) LogicalFilter(condition=[AND(IS NULL($0), IS NULL($0))]) LogicalProject(N=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6452,7 +6452,7 @@ LogicalProject(N=[$0]) LogicalProject(N=[$0]) LogicalProject(N=[$0]) LogicalProject(N=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6464,8 +6464,8 @@ LogicalProject(N=[$0]) @@ -6473,8 +6473,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[$9], NAME=[$10], EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO0=[$7], SLACKER=[$8]) LogicalJoin(condition=[=($9, $7)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6488,7 +6488,7 @@ LogicalProject(EXPR$0=[1]) @@ -6498,16 +6498,16 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$3], EXPR$2=[$5], EXPR$3=[$7], EXPR$4=[$1]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $4)], joinType=[inner]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $2)], joinType=[inner]) LogicalAggregate(group=[{7}], EXPR$4=[SUM($5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$1=[COUNT($0)]) LogicalAggregate(group=[{1, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{2}], EXPR$2=[COUNT($1, $0)]) LogicalAggregate(group=[{1, 2, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$3=[COUNT($1, $0)]) LogicalAggregate(group=[{2, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6520,7 +6520,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$3], EXPR$2=[$5], EXPR$3=[$7], EXPR$4=[$1]) @@ -6529,7 +6529,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1) FILTER $3], EXPR$2=[COUNT($2) FI LogicalProject(DEPTNO=[$0], ENAME=[$1], JOB=[$2], $g_1=[=($3, 1)], $g_2=[=($3, 2)]) LogicalProject(DEPTNO=[$2], ENAME=[$0], JOB=[$1], $g=[$3]) LogicalAggregate(group=[{1, 2, 7}], groups=[[{1, 7}, {2, 7}]], $g=[GROUPING($7, $1, $2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6542,7 +6542,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1) FILTER $3], EXPR$2=[COUNT($2) FI @@ -6550,7 +6550,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT(DISTINCT $0)], EXPR$1=[COUNT(DISTINCT LogicalAggregate(group=[{}], EXPR$0=[COUNT($0) FILTER $2], EXPR$1=[COUNT($1) FILTER $3]) LogicalProject(ENAME=[$0], JOB=[$1], $g_1=[=($2, 1)], $g_2=[=($2, 2)]) LogicalAggregate(group=[{1, 2}], groups=[[{1}, {2}]], $g=[GROUPING($1, $2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6563,7 +6563,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT($0) FILTER $2], EXPR$1=[COUNT($1) FIL @@ -6573,7 +6573,7 @@ LogicalProject(DEPTNO=[$0], CDDJ=[$1], S=[CAST($2):INTEGER NOT NULL]) LogicalProject(DEPTNO=[$0], JOB=[$1], S=[$2], $g_0=[=($3, 0)], $g_1=[=($3, 1)]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], S=[SUM($2)], $g=[GROUPING($0, $1)]) LogicalProject(DEPTNO=[$7], JOB=[$2], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6587,7 +6587,7 @@ LogicalProject(DEPTNO=[$0], CDDJ=[$1], S=[CAST($2):INTEGER NOT NULL]) @@ -6597,16 +6597,16 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$3], EXPR$2=[$5], EXPR$3=[$7], EXPR$4=[$1]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $4)], joinType=[inner]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $2)], joinType=[inner]) LogicalAggregate(group=[{7}], EXPR$4=[SUM($5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$1=[COUNT($0)]) LogicalAggregate(group=[{1, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{2}], EXPR$2=[COUNT($1, $0)]) LogicalAggregate(group=[{1, 2, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$3=[COUNT($1, $0)]) LogicalAggregate(group=[{2, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6622,7 +6622,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$3], EXPR$2=[$5], EXPR$3=[$7], EXPR$4=[$1]) @@ -6631,7 +6631,7 @@ LogicalProject(DEPTNO=[$0], CDE=[$1], CDJE=[$2], CDDJ=[$3], S=[CAST($4):INTEGER LogicalAggregate(group=[{0}], CDE=[COUNT($1) FILTER $5], CDJE=[COUNT($2, $1) FILTER $4], CDDJ=[COUNT($0, $2) FILTER $6], S=[MIN($3) FILTER $7]) LogicalProject(DEPTNO=[$2], ENAME=[$0], JOB=[$1], S=[$3], $g_0=[=($4, 0)], $g_1=[=($4, 1)], $g_2=[=($4, 2)], $g_3=[=($4, 3)]) LogicalAggregate(group=[{1, 2, 7}], groups=[[{1, 2, 7}, {1, 7}, {2, 7}, {7}]], S=[SUM($5)], $g=[GROUPING($7, $1, $2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6644,8 +6644,8 @@ LogicalProject(DEPTNO=[$0], CDE=[$1], CDJE=[$2], CDDJ=[$3], S=[CAST($4):INTEGER LogicalProject(DEPTNO=[$0], DEPTNO0=[$9]) LogicalFilter(condition=[=(+($0, 10), *($9, 2))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6654,9 +6654,9 @@ LogicalProject(DEPTNO=[$0], DEPTNO0=[$9]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$3], ENAME=[$4], JOB=[$5], MGR=[$6], HIREDATE=[$7], SAL=[$8], COMM=[$9], DEPTNO0=[$10], SLACKER=[$11]) LogicalJoin(condition=[=($2, $12)], joinType=[inner]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[+($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[*($7, 2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6670,14 +6670,14 @@ where r > 0.5]]> ($1, 10)]) LogicalProject(SAL=[$5], N=[NDC()]) - LogicalScan(table=[[scott, EMP]]) + LogicalScan(entity=[[scott, EMP]]) ]]> ($1, 10)]) LogicalProject(SAL=[$5], N=[NDC()]) - LogicalScan(table=[[scott, EMP]]) + LogicalScan(entity=[[scott, EMP]]) ]]> @@ -6690,7 +6690,7 @@ LogicalFilter(condition=[>($1, 10)]) LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=(null, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6714,7 +6714,7 @@ where case deptno LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=(CASE(=($7, 20), 2, =($7, 10), 1, 3), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6722,7 +6722,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6739,7 +6739,7 @@ where case deptno LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=(CASE(=($7, 20), 2, =($7, 10), 1, null:INTEGER), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6747,7 +6747,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6766,7 +6766,7 @@ where case deptno LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=(CASE(=($7, 30), 1, =($7, 20), 2, =($7, 10), 1, =($7, 30), 111, 0), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6774,7 +6774,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[OR(=($7, 30), =($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6788,16 +6788,16 @@ where coalesce(e1.mgr, -1) = coalesce(e2.mgr, -1)]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], EMPNO0=[$9], ENAME0=[$10], JOB0=[$11], MGR0=[$12], HIREDATE0=[$13], SAL0=[$14], COMM0=[$15], DEPTNO0=[$16], SLACKER0=[$17]) LogicalFilter(condition=[=(CASE(IS NOT NULL($3), $3, -1), CASE(IS NOT NULL($12), $12, -1))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6812,7 +6812,7 @@ LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[>($1, 1)]) LogicalAggregate(group=[{0}], agg#0=[COUNT()]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6821,7 +6821,7 @@ LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[>($1, 1)]) LogicalAggregate(group=[{0}], agg#0=[COUNT()]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6841,8 +6841,8 @@ group by e.job,d.name]]> LogicalProject(JOB=[$0], MIN_SAL=[$2], MIN_DEPTNO=[$3], SUM_SAL_PLUS=[+($4, 1)], MAX_SAL=[$5], SUM_SAL_2=[$4], COUNT_SAL=[$6], COUNT_MGR=[$7]) LogicalAggregate(group=[{2, 10}], MIN_SAL=[MIN($5)], MIN_DEPTNO=[MIN($7)], SUM_SAL_2=[SUM($5)], MAX_SAL=[MAX($5)], COUNT_SAL=[COUNT()], COUNT_MGR=[COUNT($3)]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6851,9 +6851,9 @@ LogicalProject(JOB=[$0], MIN_SAL=[$2], MIN_DEPTNO=[$3], SUM_SAL_PLUS=[+($4, 1)], LogicalProject(JOB=[$0], NAME=[$7], MIN_SAL=[$1], MIN_DEPTNO=[$2], $f9=[CAST(*($3, $8)):INTEGER NOT NULL], MAX_SAL=[$4], $f10=[*($5, $8)], $f11=[*($6, $8)]) LogicalJoin(condition=[=($0, $7)], joinType=[inner]) LogicalAggregate(group=[{2}], MIN_SAL=[MIN($5)], MIN_DEPTNO=[MIN($7)], SUM_SAL_2=[SUM($5)], MAX_SAL=[MAX($5)], COUNT_SAL=[COUNT()], COUNT_MGR=[COUNT($3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6865,8 +6865,8 @@ LogicalProject(JOB=[$0], MIN_SAL=[$2], MIN_DEPTNO=[$3], SUM_SAL_PLUS=[+($4, 1)], @@ -6875,9 +6875,9 @@ LogicalAggregate(group=[{}], EXPR$0=[$SUM0($4)]) LogicalProject(JOB=[$0], EXPR$0=[$1], NAME=[$2], EXPR$00=[$3], $f4=[*($1, $3)]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) LogicalAggregate(group=[{2}], EXPR$0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6890,16 +6890,16 @@ LogicalAggregate(group=[{}], EXPR$0=[$SUM0($4)]) LogicalAggregate(group=[{}], EXPR$0=[COUNT(DISTINCT $0)]) LogicalProject(SAL=[$5]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6916,9 +6916,9 @@ group by d.name]]> @@ -6926,9 +6926,9 @@ LogicalAggregate(group=[{9}], SUM_SAL=[SUM($5)], C=[COUNT()]) LogicalProject(NAME=[$3], SUM_SAL=[$1], C=[$2]) LogicalJoin(condition=[=($0, $3)], joinType=[inner]) LogicalAggregate(group=[{2}], SUM_SAL=[SUM($5)], C=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6940,14 +6940,14 @@ LogicalProject(NAME=[$3], SUM_SAL=[$1], C=[$2]) @@ -6960,13 +6960,13 @@ from emp]]> @@ -6979,13 +6979,13 @@ from emp]]> @@ -6999,20 +6999,20 @@ LogicalProject(NEWCOL=[+($0, CAST(1):INTEGER)]) ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7026,11 +7026,11 @@ SemiJoin(condition=[=($0, $2)], joinType=[inner]) ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7044,17 +7044,17 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7068,11 +7068,11 @@ LogicalProject(NAME=[$1]) ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7087,9 +7087,9 @@ order by sal limit 10]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$5], dir0=[ASC], fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7098,9 +7098,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalSort(sort0=[$5], dir0=[ASC], fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) LogicalSort(sort0=[$5], dir0=[ASC], fetch=[10]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7115,20 +7115,20 @@ from sales.emp]]> LogicalProject(EMPNO=[$0], D=[$SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7142,9 +7142,9 @@ LogicalProject(EMPNO=[$0], D=[$9]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[> SOME($0, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7153,10 +7153,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(AND(IS TRUE(>($0, $9)), <>($10, 0)), AND(>($0, $9), <>($10, 0), IS NOT TRUE(>($0, $9)), <=($10, $11)))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], m=[MIN($0)], c=[COUNT()], d=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7165,10 +7165,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(AND(IS TRUE(>($0, $9)), <>($10, 0)), AND(>($0, $9), <>($10, 0), IS NOT TRUE(>($0, $9)), <=($10, $11)))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], m=[MIN($0)], c=[COUNT()], d=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7183,9 +7183,9 @@ order by name]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$10], dir0=[ASC]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7193,10 +7193,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$10], dir0=[ASC]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7211,9 +7211,9 @@ order by sal, name limit 10]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$5], sort1=[$10], dir0=[ASC], dir1=[ASC], fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7221,9 +7221,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$5], sort1=[$10], dir0=[ASC], dir1=[ASC], fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7238,9 +7238,9 @@ order by name]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$10], dir0=[ASC]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7248,10 +7248,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$10], dir0=[ASC]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7266,10 +7266,10 @@ limit 10]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalSort(fetch=[10]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7277,10 +7277,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalSort(fetch=[10]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7294,8 +7294,8 @@ right join sales.emp e using (deptno) limit 10 offset 2]]> LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalSort(offset=[2], fetch=[10]) LogicalJoin(condition=[=($0, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7303,9 +7303,9 @@ LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalSort(offset=[2], fetch=[10]) LogicalJoin(condition=[=($0, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalSort(offset=[2], fetch=[10]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7319,8 +7319,8 @@ left join sales.emp e using (deptno) order by d.deptno offset 1]]> LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalSort(sort0=[$0], dir0=[ASC], offset=[1]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7328,8 +7328,8 @@ LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalSort(sort0=[$0], dir0=[ASC], offset=[1]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7342,14 +7342,14 @@ order by cast(d.deptno as integer) offset 1]]> @@ -7362,14 +7362,14 @@ order by cast(d.deptno as double) offset 1]]> @@ -7382,14 +7382,14 @@ order by cast(d.deptno as varchar(10)) offset 1]]> @@ -7406,9 +7406,9 @@ LogicalSort(sort0=[$0], dir0=[ASC], fetch=[10]) LogicalProject(NAME=[$0]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7417,10 +7417,10 @@ LogicalSort(sort0=[$0], dir0=[ASC], fetch=[10]) LogicalUnion(all=[true]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[10]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[10]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7435,20 +7435,20 @@ from sales.emp]]> LogicalProject(EMPNO=[$0], D=[IN($7, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7466,9 +7466,9 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(IN($0, $7, { LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7477,10 +7477,10 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(IS NOT NULL($11), <($5, 100))]) LogicalJoin(condition=[AND(=($0, $9), =($7, $10))], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], DEPTNO=[$7], i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7496,10 +7496,10 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[IN($7, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7507,13 +7507,13 @@ LogicalProject(DEPTNO=[$7]) LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[=($7, $11)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7530,9 +7530,9 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(IN($7, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7541,11 +7541,11 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(IS NOT NULL($10), <($5, 100))]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7560,10 +7560,10 @@ on exists (select deptno from sales.emp where empno < 20)]]> LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[EXISTS({ LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7571,13 +7571,13 @@ LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7591,20 +7591,20 @@ from sales.emp]]> @@ -7621,14 +7621,14 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[<($SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), $SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7637,18 +7637,18 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10], $f0=[$11]) LogicalJoin(condition=[<($11, $12)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[true], joinType=[left]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{}], agg#0=[SINGLE_VALUE($0)]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[SINGLE_VALUE($0)]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7664,9 +7664,9 @@ or emp.sal < 100]]> LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(EXISTS({ LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7675,11 +7675,11 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(IS NOT NULL($9), <($5, 100))]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7694,19 +7694,19 @@ from sales.emp]]> LogicalProject(EMPNO=[$0], D=[IN($0, $7, { LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7723,10 +7723,10 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[IN($0, $9, { LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7734,13 +7734,13 @@ LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[AND(=($0, $11), =($9, $12))], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7758,13 +7758,13 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(<($SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), $SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7774,15 +7774,15 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(<($9, $10), <($5, 100))]) LogicalJoin(condition=[true], joinType=[left]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[SINGLE_VALUE($0)]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[SINGLE_VALUE($0)]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7797,7 +7797,7 @@ and empno = 10 and mgr is null and empno = 10]]> @@ -7818,14 +7818,14 @@ and empno = 10 and mgr is null and empno = 10]]> @@ -7844,7 +7844,7 @@ LogicalSort(sort0=[$1], dir0=[DESC-nulls-last]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7853,7 +7853,7 @@ LogicalProject(C=[$2], DEPTNO=[$0]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7872,7 +7872,7 @@ LogicalSort(sort0=[$1], sort1=[$2], dir0=[ASC], dir1=[DESC]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7882,7 +7882,7 @@ LogicalSort(sort0=[$2], dir0=[DESC]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7899,9 +7899,9 @@ LogicalSort(sort0=[$0], dir0=[ASC]) LogicalProject(NAME=[$0]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7910,10 +7910,10 @@ LogicalSort(sort0=[$0], dir0=[ASC]) LogicalUnion(all=[true]) LogicalSort(sort0=[$0], dir0=[ASC]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalSort(sort0=[$0], dir0=[ASC]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7930,9 +7930,9 @@ LogicalSort(sort0=[$0], dir0=[ASC], fetch=[0]) LogicalProject(NAME=[$0]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7941,10 +7941,10 @@ LogicalSort(sort0=[$0], dir0=[ASC], fetch=[0]) LogicalUnion(all=[true]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[0]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[0]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7961,7 +7961,7 @@ LogicalProject(C=[$2]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7971,7 +7971,7 @@ LogicalProject(C=[$2]) LogicalAggregate(group=[{1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7988,7 +7988,7 @@ LogicalProject(C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7997,7 +7997,7 @@ LogicalProject(C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8016,7 +8016,7 @@ LogicalProject(JOB=[$1]) LogicalAggregate(group=[{0, 1}], agg#0=[COUNT()]) LogicalProject(SAL=[$5], JOB=[$2]) LogicalFilter(condition=[AND(IS NULL($5), =($2, 'Clerk'))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8027,7 +8027,7 @@ LogicalProject(JOB=[$1]) LogicalAggregate(group=[{0}], agg#0=[COUNT()]) LogicalProject(SAL=[$5], JOB=[$2]) LogicalFilter(condition=[AND(IS NULL($5), =($2, 'Clerk'))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8046,9 +8046,9 @@ LogicalProject(EMPNO=[$0], D=[IN(CASE(true, CAST($7):INTEGER, null:INTEGER), { LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0], DEPTNO=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8056,17 +8056,17 @@ LogicalProject(DEPTNO=[$1]) LogicalProject(EMPNO=[$0], D=[CASE(=($9, 0), false, IS NULL(CASE(true, CAST($7):INTEGER, null:INTEGER)), null:NULL, IS NOT NULL($12), true, <($10, $9), null:NULL, false)]) LogicalJoin(condition=[=($7, $11)], joinType=[left]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0], DEPTNO=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$1], i=[true]) LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0], DEPTNO=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8088,13 +8088,13 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[<($0, CASE(=(IN($7, { LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), true), 10, =(IN($7, { LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), false), 20, 30))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8104,15 +8104,15 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[<($0, CASE(=(OR(AND(IS NOT NULL($12), <>($9, 0)), AND(<($10, $9), null, <>($9, 0), IS NULL($12))), true), 10, =(OR(AND(IS NOT NULL($12), <>($9, 0)), AND(<($10, $9), null, <>($9, 0), IS NULL($12))), false), 20, 30))]) LogicalJoin(condition=[=($7, $11)], joinType=[left]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null:INTEGER)], i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8130,13 +8130,13 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[<($0, CASE(=(IN($7, { LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), true), 10, =(IN($7, { LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), false), 20, 30))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8146,16 +8146,16 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[<($0, CASE(=(CASE(=($9, 0), false, IS NOT NULL($12), true, <($10, $9), null, false), true), 10, =(CASE(=($9, 0), false, IS NOT NULL($12), true, <($10, $9), null, false), false), 20, 30))]) LogicalJoin(condition=[=($7, $11)], joinType=[left]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EXPR$0=[$0], i=[true]) LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8170,9 +8170,9 @@ where exists (select deptno from sales.emp where empno < 20)]]> LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[EXISTS({ LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8180,11 +8180,11 @@ LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8200,9 +8200,9 @@ and emp.sal < 100]]> LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[AND(EXISTS({ LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8211,11 +8211,11 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[<($5, 100)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8230,9 +8230,9 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[IN($0, { LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8241,19 +8241,19 @@ LogicalProject(SAL=[$5]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{2}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8270,11 +8270,11 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IN($5, { LogicalProject(SAL=[$5]) LogicalFilter(condition=[>($7, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8284,12 +8284,12 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[=($5, $11)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[>($7, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8305,8 +8305,8 @@ group by emp.empno]]> LogicalAggregate(group=[{0}], EXPR$1=[COUNT()], EXPR$2=[AVG(DISTINCT $1)]) LogicalProject(EMPNO=[$0], DEPTNO0=[$9]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8315,8 +8315,8 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($2)], EXPR$2=[AVG($1)]) LogicalAggregate(group=[{0, 1}], EXPR$1=[COUNT()]) LogicalProject(EMPNO=[$0], DEPTNO0=[$9]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8333,7 +8333,7 @@ group by name]]> LogicalAggregate(group=[{0}], EXPR$1=[SUM(DISTINCT $1)], EXPR$2=[SUM(DISTINCT $2)]) LogicalAggregate(group=[{0}], CN=[COUNT()], SM=[SUM($1)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8344,7 +8344,7 @@ LogicalProject(NAME=[$0], EXPR$1=[CAST($1):BIGINT NOT NULL], EXPR$2=[CAST($2):IN LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1}, {0, 2}]], $g=[GROUPING($0, $1, $2)]) LogicalAggregate(group=[{0}], CN=[COUNT()], SM=[SUM($1)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8378,7 +8378,7 @@ group by job, empno, sal]]> LogicalAggregate(group=[{0, 1, 2}], S=[SUM($2)]) LogicalProject(JOB=[$2], EMPNO=[$0], SAL=[$5]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8387,7 +8387,7 @@ LogicalProject(JOB=[$0], EMPNO=[10], SAL=[$1], S=[$2]) LogicalAggregate(group=[{0, 2}], S=[SUM($2)]) LogicalProject(JOB=[$2], EMPNO=[$0], SAL=[$5]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8401,7 +8401,7 @@ group by ename, sal]]> @@ -8409,7 +8409,7 @@ LogicalAggregate(group=[{1, 5}]) LogicalProject(ENAME=['John':VARCHAR(20)], SAL=[$0]) LogicalAggregate(group=[{5}]) LogicalFilter(condition=[=($1, 'John')]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8426,10 +8426,10 @@ LogicalProject(ENAME=[$0]) LogicalFilter(condition=[IN($1, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=(+($5, 1), $cor0.SALPLUS)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) LogicalProject(ENAME=[$1], DEPTNO=[$7], SALPLUS=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8439,11 +8439,11 @@ LogicalProject(ENAME=[$0]) LogicalFilter(condition=[=($1, $3)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{2}]) LogicalProject(ENAME=[$1], DEPTNO=[$7], SALPLUS=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=(+($5, 1), $cor0.SALPLUS)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8451,12 +8451,12 @@ LogicalProject(ENAME=[$0]) LogicalProject(ENAME=[$0]) LogicalJoin(condition=[AND(=($2, $4), =($1, $3))], joinType=[inner]) LogicalProject(ENAME=[$1], DEPTNO=[$7], SALPLUS=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], $f9=[$9]) LogicalFilter(condition=[=(+($5, 1), $9)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8473,10 +8473,10 @@ LogicalProject(NAME=[$0]) LogicalFilter(condition=[IN($1, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=(+($5, 1), $cor0.DEPTNOMINUS)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) LogicalProject(NAME=[$1], DEPTNO=[$0], DEPTNOMINUS=[-($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8486,11 +8486,11 @@ LogicalProject(NAME=[$0]) LogicalFilter(condition=[=($1, $3)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{2}]) LogicalProject(NAME=[$1], DEPTNO=[$0], DEPTNOMINUS=[-($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=(+($5, 1), $cor0.DEPTNOMINUS)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8498,12 +8498,12 @@ LogicalProject(NAME=[$0]) LogicalProject(NAME=[$0]) LogicalJoin(condition=[AND(=($2, $4), =($1, $3))], joinType=[inner]) LogicalProject(NAME=[$1], DEPTNO=[$0], DEPTNOMINUS=[-($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], $f9=[$9]) LogicalFilter(condition=[=(+($5, 1), $9)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8520,9 +8520,9 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[NOT(IN($0, { LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8531,12 +8531,12 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[OR(IS NOT TRUE(OR(IS NOT NULL($13), <($11, $10))), IS TRUE(=($10, 0)))]) LogicalJoin(condition=[AND(=($0, $12), =($2, $14))], joinType=[left]) LogicalJoin(condition=[=($2, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], c=[COUNT()], ck=[COUNT($1)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], i=[true], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8546,15 +8546,15 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[OR(IS NOT TRUE(OR(IS NOT NULL($12), <($10, $9))), IS TRUE(=($9, 0)))]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{2}]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{2}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[=($cor0.EMPNO, $0)]) LogicalProject(DEPTNO=[$0], i=[true]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8569,9 +8569,9 @@ or empno NOT IN (select deptno from dept)]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(=($5, 4), NOT(IN($0, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8580,9 +8580,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(=($5, 4), NOT(CASE(IS NOT NULL($10), true, false)))]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], i=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8591,9 +8591,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(=($5, 4), NOT(CASE(IS NOT NULL($10), true, false)))]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], i=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8611,9 +8611,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$1]) LogicalFilter(condition=[AND(>($2, 2), =($cor0.ENAME, $0))]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8623,17 +8623,17 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(IS NOT TRUE(OR(IS NOT NULL($12), <($10, $9))), IS TRUE(=($9, 0)))]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{1}]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{1}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(EMPNO=[$1]) LogicalFilter(condition=[AND(>($2, 2), =($cor0.ENAME, $0))]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($cor0.EMPNO, $0)]) LogicalProject(EMPNO=[$1], i=[true]) LogicalFilter(condition=[AND(>($2, 2), =($cor0.ENAME, $0))]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8642,16 +8642,16 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(IS NOT TRUE(OR(IS NOT NULL($13), <($11, $10))), IS TRUE(=($10, 0)))]) LogicalJoin(condition=[AND(=($0, $12), =($1, $14))], joinType=[left]) LogicalJoin(condition=[=($1, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], c=[COUNT()], ck=[COUNT($1)]) LogicalProject(ENAME=[$0], EMPNO=[$1]) LogicalFilter(condition=[>($2, 2)]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$1], i=[true], ENAME=[$0]) LogicalFilter(condition=[>($2, 2)]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8663,7 +8663,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -8785,13 +8785,13 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], $f0=[$2]) LogicalCorrelate(correlation=[$cor2], joinType=[left], requiredColumns=[{1}]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject($f0=[$0]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($cor2.DEPTNO, $0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8828,14 +8828,14 @@ LogicalCorrelate(correlation=[$cor0], joinType=[anti], requiredColumns=[{0}]) ($3, 0), >(CASE(>($3, 0), /($7, $3), null:INTEGER), 1))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> ($3, 0), CASE(>($3, 0), >(/($7, $3), 1), null:BOOLEAN))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> diff --git a/core/src/test/resources/org/polypheny/db/test/SqlToRelConverterTest.xml b/core/src/test/resources/org/polypheny/db/test/SqlToRelConverterTest.xml index 3b0c63f2e2..9ce3c31d6b 100644 --- a/core/src/test/resources/org/polypheny/db/test/SqlToRelConverterTest.xml +++ b/core/src/test/resources/org/polypheny/db/test/SqlToRelConverterTest.xml @@ -48,7 +48,7 @@ LogicalProject(EXPR$0=[1]) @@ -60,7 +60,7 @@ LogicalAggregate(group=[{0}]) @@ -72,7 +72,7 @@ LogicalAggregate(group=[{0}]) @@ -85,7 +85,7 @@ LogicalAggregate(group=[{0, 1}]) LogicalProject(D=[$0], EXPR$1=[+($0, $1)]) LogicalAggregate(group=[{0, 1}]) LogicalProject(D=[+($7, $0)], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -98,7 +98,7 @@ from emp group by d,mgr]]> @@ -112,7 +112,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[0]) LogicalAggregate(group=[{0}]) LogicalProject($f0=[SUBSTRING($1, 2, 3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -126,7 +126,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalFilter(condition=[>($0, 1)]) LogicalAggregate(group=[{}], E=[COUNT()]) LogicalProject(EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -141,7 +141,7 @@ LogicalFilter(condition=[>($0, 1)]) ($7, 10), >($7, 20))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -150,7 +150,7 @@ LogicalProject(EMPNO=[$0]) @@ -163,7 +163,7 @@ LogicalAggregate(group=[{0}], SUM_SAL=[SUM($1)]) LogicalProject(EXPR$0=[+($0, 4)], EXPR$1=[$1], EXPR$2=[$2], EXPR$3=[*(2, $3)]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[SUM($2)], agg#2=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5], $f2=[+(3, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -179,7 +179,7 @@ LogicalProject(EXPR$0=[$0]) LogicalFilter(condition=[>($1, 10)]) LogicalAggregate(group=[{}], EXPR$0=[SUM($0)], agg#1=[SUM($1)]) LogicalProject($f0=[+($5, $5)], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -192,7 +192,7 @@ LogicalProject(EXPR$0=[$0]) LogicalProject(NAME=[$0]) LogicalAggregate(group=[{0}]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -205,7 +205,7 @@ LogicalProject(NAME=[$0]) LogicalProject(NAME=[$0], FOO=[$2]) LogicalAggregate(group=[{0, 1}], FOO=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -220,7 +220,7 @@ group by name, deptno, name)]]> @@ -231,39 +231,39 @@ group by deptno]]> - + - + - + @@ -281,9 +281,9 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject($f0=[$SCALAR_QUERY({ LogicalProject(EXPR$0=[CHAR_LENGTH($1)]) LogicalFilter(condition=[=($0, $cor0.EMPNO)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -304,11 +304,11 @@ LogicalProject(DEPTNO=[$7], EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject(NAME=[$1]) LogicalFilter(condition=[=($0, $cor0.DEPTNO0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))], variablesSet=[[$cor0]]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -333,7 +333,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) Uncollect Collect(field=[EXPR$0]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -344,7 +344,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) (COUNT(DISTINCT $7) OVER (ROWS BETWEEN 10 PRECEDING AND CURRENT ROW), 0), $SUM0(DISTINCT $7) OVER (ROWS BETWEEN 10 PRECEDING AND CURRENT ROW), null:INTEGER)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -359,7 +359,7 @@ from emp @@ -380,7 +380,7 @@ window w as (partition by productId)]]> @@ -392,7 +392,7 @@ LogicalProject(EXPR$0=[ITEM(ITEM($3, 1).DETAIL.SKILLS, +(2, 3)).DESC]) @@ -406,7 +406,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) Uncollect Collect(field=[EXPR$0]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -420,7 +420,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], ORDINALITY=[$2]) Uncollect(withOrdinality=[true]) Collect(field=[EXPR$0]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -432,7 +432,7 @@ LogicalProject(EXPR$0=[$1]) LogicalValues(tuples=[[{ true }]]) Collect(field=[EXPR$0]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -444,7 +444,7 @@ LogicalProject(EXPR$0=[$1]) @@ -458,7 +458,7 @@ LogicalProject(EXPR$0=['a'], EXPR$1=[$SLICE($2)]) @@ -495,11 +495,11 @@ from dept]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($0, 55)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -513,11 +513,11 @@ where exists (select 1 from dept where deptno=55)]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -532,7 +532,7 @@ LogicalProject(EXPR$0=[$0]) Uncollect LogicalProject(EXPR$0=[$SLICE($2)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) Collect(field=[EXPR$0]) LogicalUnion(all=[true]) LogicalProject(EXPR$0=[$cor0.DEPTNO]) @@ -548,10 +548,10 @@ LogicalProject(EXPR$0=[$0]) @@ -564,7 +564,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -590,9 +590,9 @@ LogicalProject(EXPR$0=[ELEMENT($SLICE($0))]) @@ -604,9 +604,9 @@ LogicalUnion(all=[true]) @@ -620,7 +620,7 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalValues(tuples=[[{ 10 }, { 20 }]]) LogicalProject(EXPR$0=[34]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalUnion(all=[true]) LogicalProject(EXPR$0=[30]) LogicalValues(tuples=[[{ 0 }]]) @@ -640,14 +640,14 @@ union all values (30), (45 + 10)]]> ($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalValues(tuples=[[{ 45 }, { 67 }]]) ]]> @@ -708,7 +708,7 @@ LogicalProject(EXPR$0=[NOT(LIKE('a', 'b', 'c'))]) (COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), $SUM0($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), null:INTEGER)], EXPR$1=[CASE(>(COUNT($7) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), $SUM0($7) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), null:INTEGER)], EXPR$2=[CASE(>=(COUNT() OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), 2), CASE(>(COUNT($7) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), 0), $SUM0($7) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), null:INTEGER), null:NULL)]) LogicalFilter(condition=[>(-($7, $5), 999)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -740,7 +740,7 @@ LogicalProject(EXPR$0=[CHAR_LENGTH('foo')]) @@ -751,7 +751,7 @@ LogicalProject(EXPR$0=[ROW(ROW(1, 2), ROW(3, 4, 5)).EXPR$1.EXPR$2]) @@ -765,7 +765,7 @@ from ( @@ -780,7 +780,7 @@ lateral (select t2."$unnest" as fake_col3 (COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), $SUM0($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), null:INTEGER)], EXPR$1=[CAST(/(CASE(>(COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), CAST($SUM0($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW)):INTEGER, null:INTEGER), COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW))):INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -806,7 +806,7 @@ window w1 as (partition by job order by hiredate rows 2 preceding)]]> @@ -823,7 +823,7 @@ window w1 as (partition by job order by hiredate rows 2 preceding)]]> @@ -836,7 +836,7 @@ from emp]]> @@ -849,7 +849,7 @@ from emp]]> @@ -861,7 +861,7 @@ LogicalProject(EXPR$0=[*(CAST($0):INTEGER NOT NULL, 3660000:INTERVAL HOUR TO MIN @@ -875,24 +875,24 @@ LogicalAggregate(group=[{0}]) LogicalProject(EXPR$0=[$1]) LogicalAggregate(group=[{0}], EXPR$0=[SUM($1)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> - + - + @@ -921,7 +921,7 @@ LogicalProject(EMPNO=[$0]) @@ -935,25 +935,25 @@ LogicalProject(EMPNO=[$0]) - + @@ -965,7 +965,7 @@ LogicalProject(NAME=[$0]) @@ -977,7 +977,7 @@ LogicalSort(sort0=[$0], dir0=[ASC]) @@ -989,7 +989,7 @@ LogicalSort(sort0=[$0], dir0=[DESC-nulls-last]) @@ -1001,7 +1001,7 @@ LogicalSort(sort0=[$0], dir0=[DESC]) @@ -1013,7 +1013,7 @@ LogicalSort(sort0=[$1], dir0=[ASC]) @@ -1026,7 +1026,7 @@ LogicalSort(sort0=[$1], dir0=[DESC]) LogicalProject(EXPR$0=[$0]) LogicalSort(sort0=[$1], sort1=[$0], dir0=[ASC], dir1=[DESC]) LogicalProject(EXPR$0=[+($0, 1)], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1039,7 +1039,7 @@ LogicalProject(EXPR$0=[$0]) LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EMPNO=[$2]) LogicalSort(sort0=[$3], dir0=[DESC]) LogicalProject(EXPR$0=[+($0, 1)], DEPTNO=[$7], EMPNO=[$0], EXPR$3=[-1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1052,7 +1052,7 @@ LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EMPNO=[$2]) LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EMPNO=[$2]) LogicalSort(sort0=[$3], dir0=[DESC]) LogicalProject(EXPR$0=[+($0, 1)], DEPTNO=[$7], EMPNO=[$0], EXPR$3=[+(1, 2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1065,7 +1065,7 @@ LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EMPNO=[$2]) ProjectRel(EMPNO=[$0], Y=[$1]) SortRel(sort0=[$2], dir0=[Ascending]) ProjectRel(EMPNO=[+($0, 1)], Y=[-($0, 2)], EXPR$2=[+($0, 3)]) - TableAccessRel(table=[[SALES, EMP]]) + TableAccessRel(entity=[[SALES, EMP]]) ]]> @@ -1079,7 +1079,7 @@ from emp order by y + 3]]> LogicalProject(X=[$0], Y=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalProject(X=[+($0, 1)], Y=[-($0, 2)], EXPR$2=[+(-($0, 2), 3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1093,7 +1093,7 @@ from emp order by empno + 3]]> LogicalProject(EMPNO=[$0], Y=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalProject(EMPNO=[+($0, 1)], Y=[-($0, 2)], EXPR$2=[+(+($0, 1), 3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1110,9 +1110,9 @@ LogicalSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[ASC]) LogicalProject(EMPNO=[$0], SAL=[$1]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1130,9 +1130,9 @@ LogicalProject(EMPNO=[$0], SAL=[$1]) LogicalProject(EMPNO=[$0], SAL=[$1], EXPR$2=[+(*($0, $1), 2)]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1150,7 +1150,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$1]) LogicalProject(DEPTNO=[$0], EXPR$1=[$1], EXPR$2=[*($0, $2)], EXPR$3=[$3]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()], agg#1=[SUM($1)], agg#2=[MIN($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1165,7 +1165,7 @@ LogicalProject(EMPNO=[$0], EXPR$1=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalAggregate(group=[{0, 1, 2}]) LogicalProject(EMPNO=[$0], EXPR$1=[+($7, 1)], EXPR$2=[+(+($7, 1), $0)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1180,8 +1180,8 @@ LogicalProject(EMPNO=[$0]) LogicalSort(sort0=[$1], sort1=[$2], dir0=[DESC], dir1=[ASC]) LogicalProject(EMPNO=[$0], EXPR$1=[+($5, $0)], EXPR$2=[*($5, $0)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1198,9 +1198,9 @@ LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(EMPNO=[$0], SAL=[$1]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1212,7 +1212,7 @@ LogicalSort(sort0=[$1], dir0=[ASC]) ($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1230,8 +1230,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[>($0, 5)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1244,7 +1244,7 @@ from emp @@ -1258,7 +1258,7 @@ from emp @@ -1284,7 +1284,7 @@ window w1 as (partition by job order by hiredate rows 2 preceding)]]> (COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), $SUM0($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), null:INTEGER)], EXPR$1=[/(CASE(>(COUNT(CAST($5):REAL NOT NULL) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), CAST($SUM0(CAST($5):REAL NOT NULL) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW)):REAL, null:REAL), COUNT(CAST($5):REAL NOT NULL) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1296,8 +1296,8 @@ LogicalProject(EXPR$0=[CASE(>(COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS B @@ -1309,9 +1309,9 @@ LogicalProject(DEPTNO=[$7], EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE @@ -1324,8 +1324,8 @@ JOIN dept on emp.deptno = dept.deptno]]> @@ -1338,8 +1338,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -1352,7 +1352,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($0, 5)]) Sample(mode=[bernoulli], rate=[0.5], repeatableSeed=[-]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1372,8 +1372,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) Sample(mode=[bernoulli], rate=[0.1], repeatableSeed=[1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1386,7 +1386,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($0, 5)]) Sample(mode=[system], rate=[0.5], repeatableSeed=[-]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1406,8 +1406,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) Sample(mode=[system], rate=[0.1], repeatableSeed=[1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1421,7 +1421,7 @@ LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EXPR$2=[$0]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EXPR$0=[+($5, 5)], DEPTNO=[$7]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1436,7 +1436,7 @@ where empno > 10]]> LogicalAggregate(group=[{}], EXPR$0=[COUNT()], EXPR$1=[SUM($0)]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1452,7 +1452,7 @@ where c like 'X%']]> LogicalProject(EXPR$0=[+($0, $1)]) LogicalFilter(condition=[LIKE($2, 'X%')]) LogicalProject(DEPTNO=[$0], UNO=[1], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1471,7 +1471,7 @@ LogicalProject(A=[$0], B=[$1], C=[$2], DEPTNO=[$3], NAME=[$4]) LogicalJoin(condition=[=($3, $2)], joinType=[inner]) LogicalProject(A=[$2], B=[$1], C=[$0]) LogicalValues(tuples=[[{ 1, 2, 3 }]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1483,7 +1483,7 @@ from (select row(row(1)) r from dept) t]]> @@ -1499,10 +1499,10 @@ LogicalProject(GRADE=[$1]) LogicalJoin(condition=[true], joinType=[inner]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, SALGRADE]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, SALGRADE]]) ]]> @@ -1514,8 +1514,8 @@ LogicalProject(GRADE=[$1]) @@ -1528,9 +1528,9 @@ FROM emp NATURAL JOIN (SELECT deptno AS foo, name FROM dept) AS d]]> @@ -1544,9 +1544,9 @@ NATURAL JOIN (SELECT deptno, name AS ename FROM dept) AS d]]> @@ -1559,7 +1559,7 @@ order by empno offset 10 rows fetch next 5 rows only]]> @@ -1572,7 +1572,7 @@ order by empno offset ? rows fetch next ? rows only]]> @@ -1584,7 +1584,7 @@ LogicalSort(sort0=[$0], dir0=[ASC], offset=[?0], fetch=[?1]) @@ -1596,7 +1596,7 @@ LogicalSort(fetch=[5]) @@ -1609,7 +1609,7 @@ offset 10 rows fetch next 5 rows only]]> @@ -1622,7 +1622,7 @@ offset ? rows fetch next ? rows only]]> @@ -1634,7 +1634,7 @@ LogicalSort(offset=[?0], fetch=[?1]) @@ -1646,7 +1646,7 @@ LogicalSort(offset=[10]) @@ -1660,8 +1660,8 @@ using (n_nationkey)]]> @@ -1676,7 +1676,7 @@ and (deptno = 8 or empno < 100)]]> ($7, 5), OR(=($7, 8), <($0, 100)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1692,9 +1692,9 @@ join emp as e2 using (empno)]]> LogicalProject(EMPNO=[$0], DEPTNO=[$7], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8], NAME=[$10], ENAME0=[$12], JOB0=[$13], MGR0=[$14], HIREDATE0=[$15], SAL0=[$16], COMM0=[$17], DEPTNO1=[$18], SLACKER0=[$19]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1708,9 +1708,9 @@ JOIN dept on emp.deptno + 1 = dept.deptno - 2]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$10], NAME=[$11]) LogicalJoin(condition=[=($9, $12)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[-($0, 2)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1722,7 +1722,7 @@ select empno as "e", deptno as d, 1 as "e" from EMP)]]> @@ -1734,7 +1734,7 @@ select * from emp2]]> @@ -1752,9 +1752,9 @@ LogicalUnion(all=[true]) LogicalFilter(condition=[<($7, 30)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1770,13 +1770,13 @@ where exists ( LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor1], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[<=($0, $cor1.DEPTNO)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[>=($0, $cor1.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1790,12 +1790,12 @@ from emp]]> ($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1807,7 +1807,7 @@ LogicalProject(C=[$9]) @@ -1821,8 +1821,8 @@ JOIN dept on dept.deptno = emp.deptno + 0]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$10], NAME=[$11]) LogicalJoin(condition=[=($10, $9)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, 0)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1836,8 +1836,8 @@ JOIN dept on emp.deptno + 0 = dept.deptno]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$10], NAME=[$11]) LogicalJoin(condition=[=($9, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, 0)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1850,9 +1850,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -1870,18 +1870,18 @@ LogicalProject(D2=[$0], D3=[$1]) LogicalFilter(condition=[IS NOT NULL($2)]) LogicalCorrelate(correlation=[$cor3], joinType=[left], requiredColumns=[{0, 1}]) LogicalProject(D2=[+(2, $7)], D3=[+(3, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[AND(=($0, $cor3.D2), IS NOT NULL($1))]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}]) LogicalProject(D1=[+($0, 1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[AND(=($0, $cor0.D1), =($1, $cor0.D1), =($2, $cor3.D3))]) LogicalProject(D4=[+($0, 4)], D5=[+($0, 5)], D6=[+($0, 6)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1899,19 +1899,19 @@ LogicalProject(D2=[$0], D3=[$1]) LogicalProject(D2=[$0], D3=[$1], D1=[CAST($2):INTEGER], D6=[$3], $f2=[CAST($4):BOOLEAN]) LogicalJoin(condition=[AND(=($0, $2), =($1, $3))], joinType=[inner]) LogicalProject(D2=[+(2, $7)], D3=[+(3, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], agg#0=[MIN($2)]) LogicalProject(D1=[$0], D6=[$2], $f0=[true]) LogicalFilter(condition=[IS NOT NULL($1)]) LogicalProject(D1=[$0], $f0=[$3], D6=[$2]) LogicalJoin(condition=[=($0, $1)], joinType=[left]) LogicalProject(D1=[+($0, 1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}], agg#0=[MIN($2)]) LogicalProject(D4=[$0], D6=[$2], $f0=[true]) LogicalFilter(condition=[=($1, $0)]) LogicalProject(D4=[+($0, 4)], D5=[+($0, 5)], D6=[+($0, 6)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1927,16 +1927,16 @@ where exists ( LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[CAST($9):INTEGER], $f1=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO0=[$2], $f0=[true]) LogicalFilter(condition=[<=($0, $2)]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$2]) LogicalJoin(condition=[>=($0, $2)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1950,10 +1950,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[CAST($9):INTEGER], $f1=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO=[$0], $f0=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1967,13 +1967,13 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalSort(fetch=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1987,12 +1987,12 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[CAST($9):INTEGER], $f1=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO=[$1], $f0=[true]) LogicalSort(fetch=[1]) LogicalProject(EXPR$0=[1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2004,7 +2004,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EXPR$0=[CAST(/(SUM(+(+($1, *(2, $2)), *(3, $3))) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), COUNT(+(+($1, *(2, $2)), *(3, $3))) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING))):INTEGER NOT NULL]) LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[MIN($2)], agg#2=[AVG($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2044,9 +2044,9 @@ LogicalUnion(all=[true]) LogicalFilter(condition=[<($0, 30)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2059,7 +2059,7 @@ select * from emp2 order by deptno]]> @@ -2078,9 +2078,9 @@ LogicalProject(EMPNO=[$0], X=[$1]) LogicalProject(EMPNO=[$0], X=[$1], EXPR$2=[+($0, $1)]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], X=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], X=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2096,13 +2096,13 @@ LogicalProject(NAME=[$1], EXPR$1=[OR(AND(IS NOT NULL($6), <>($2, 0)), AND(<($3, LogicalJoin(condition=[=($4, $5)], joinType=[left]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f0=[$2], $f1=[$3], DEPTNO0=[$0]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{}], agg#0=[COUNT()], agg#1=[COUNT($0)]) LogicalProject(EXPR$0=[CAST($7):INTEGER], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EXPR$0=[CAST($7):INTEGER], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2118,13 +2118,13 @@ LogicalProject(EMPNO=[$0], EXPR$1=[NOT(CASE(=($9, 0), false, IS NOT NULL($13), t LogicalJoin(condition=[=($11, $12)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f0=[$9], $f1=[$10], DEPTNO0=[$7]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[COUNT()], agg#1=[COUNT($0)]) LogicalProject(EXPR$0=[CAST($0):INTEGER], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EXPR$0=[CAST($0):INTEGER], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2138,10 +2138,10 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[NOT(AND(IS TRUE($11), IS NOT NULL($9)))]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO=[$0], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2156,10 +2156,10 @@ from emp]]> LogicalProject(EMPNO=[$0], EXPR$1=[IS NOT TRUE($11)]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO=[$0], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2170,7 +2170,7 @@ LogicalProject(EMPNO=[$0], EXPR$1=[IS NOT TRUE($11)]) @@ -2199,11 +2199,11 @@ from emp]]> LogicalProject(EMPNO=[$0], EXPR$1=[CAST(IS NOT TRUE($11)):BOOLEAN]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(MGR=[$3], $f1=[true]) LogicalFilter(condition=[>($3, 5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2218,11 +2218,11 @@ from emp]]> LogicalProject(EMPNO=[$0], EXPR$1=[CAST(IS NOT TRUE($11)):BOOLEAN]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(MGR=[$3], $f1=[true]) LogicalFilter(condition=[IS NOT NULL($3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2238,15 +2238,15 @@ from emp]]> LogicalProject(EMPNO=[$0], EXPR$1=[CAST(IS NOT TRUE($11)):BOOLEAN]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(MGR=[$3], $f1=[true]) LogicalJoin(condition=[=($3, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(MGR=[$3]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2262,13 +2262,13 @@ LogicalProject(EMPNO=[$0], EXPR$1=[NOT(CASE(=($9, 0), false, IS NOT NULL($13), t LogicalJoin(condition=[=($11, $12)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f0=[$9], $f1=[$10], DEPTNO0=[$7]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[COUNT()], agg#1=[COUNT($0)]) LogicalProject(MGR=[$3], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(MGR=[$3], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2281,7 +2281,7 @@ LogicalProject(EMPNO=[$0], EXPR$1=[NOT(CASE(=($9, 0), false, IS NOT NULL($13), t LogicalProject(EXPR$0=[$1]) LogicalAggregate(group=[{0}], EXPR$0=[SUM($1)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2293,7 +2293,7 @@ LogicalProject(EXPR$0=[$1]) @@ -2305,7 +2305,7 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) @@ -2320,7 +2320,7 @@ order by 2]]> LogicalSort(sort0=[$1], dir0=[ASC]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2335,7 +2335,7 @@ order by 2]]> LogicalSort(sort0=[$1], dir0=[ASC]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2350,7 +2350,7 @@ order by 2]]> LogicalSort(sort0=[$1], dir0=[ASC]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {1}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2365,7 +2365,7 @@ order by 2]]> LogicalSort(sort0=[$1], dir0=[ASC]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {1}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2383,7 +2383,7 @@ group by sal, LogicalProject(EXPR$0=[$3]) LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}, {0, 2}]], EXPR$0=[SUM($0)]) LogicalProject(SAL=[$5], DEPTNO=[$7], ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2498,7 +2498,7 @@ group by substring(ename FROM 1 FOR 1)]]> LogicalProject(EXPR$0=[$1]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject($f0=[SUBSTRING($1, 1, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2530,7 +2530,7 @@ LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(DEPTNO=[$1], EXPR$1=[$2], EXPR$2=[$3], EXPR$3=[$4]) LogicalAggregate(group=[{0, 1}], EXPR$1=[GROUPING($1)], EXPR$2=[COUNT()], EXPR$3=[GROUPING($0)]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2546,7 +2546,7 @@ group by rollup(empno, deptno)]]> LogicalProject(DEPTNO=[$1], EXPR$1=[$2], EXPR$2=[$3], EXPR$3=[$4]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$1=[GROUPING($1)], EXPR$2=[COUNT()], EXPR$3=[GROUPING($0)]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2566,7 +2566,7 @@ LogicalProject(DEPTNO=[$1], EXPR$1=[$2], EXPR$2=[$3], EXPR$3=[$4]) (PREV(UP.$3, 0), PREV(UP.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2590,7 +2590,7 @@ from emp match_recognize ( (PREV(UP.$3, 0), PREV(UP.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2614,7 +2614,7 @@ from emp match_recognize ( (PREV(UP.$3, 0), PREV(UP.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2639,7 +2639,7 @@ from emp match_recognize ( (PREV(UP.$3, 0), PREV(UP.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2659,7 +2659,7 @@ LogicalProject(JOB=[$0], SAL=[$1], EMPNO=[$2], ENAME=[$3], MGR=[$4], HIREDATE=[$ (PREV(UP.$3, 0), NEXT(PREV(UP.$3, 0), 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2682,7 +2682,7 @@ MATCH_RECOGNIZE ( (PREV(UP.$3, 0), LAST(DOWN.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2706,7 +2706,7 @@ MATCH_RECOGNIZE ( (PREV(UP.$3, 0), PREV(LAST(DOWN.$3, 1), 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2727,7 +2727,7 @@ LogicalProject(START_MGR=[$0], BOTTOM_MGR=[$1], END_MGR=[$2]) (PREV(UP.$3, 0), NEXT(PREV(UP.$3, 0), 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2755,7 +2755,7 @@ MATCH_RECOGNIZE ( (PREV(UP.$3, 0), 15), >(PREV(UP.$3, 0), 20))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2767,7 +2767,7 @@ LogicalProject(START_MGR=[$0], UP_DAYS=[$1], TOTAL_DAYS=[$2]) @@ -2778,7 +2778,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -2789,7 +2789,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) @@ -2800,7 +2800,7 @@ LogicalProject(EXPR$0=[+($0, $0)]) @@ -2811,7 +2811,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], X=[$2]) @@ -2822,7 +2822,7 @@ LogicalProject(DEPTNO=[$0], X=[$2]) @@ -2834,7 +2834,7 @@ from EMP_MODIFIABLEVIEW extend (x varchar(5) not null)]]> @@ -2846,7 +2846,7 @@ from EMP_MODIFIABLEVIEW extend (x varchar(5) not null)]]> @@ -2858,7 +2858,7 @@ from EMP_MODIFIABLEVIEW extend (x int not null)]]> @@ -2871,7 +2871,7 @@ where deptno = ?]]> @@ -2882,7 +2882,7 @@ LogicalProject(DEPTNO=[$3]) @@ -2896,7 +2896,7 @@ GROUP BY (CASE WHEN emp.empno IN (3) THEN 0 ELSE 1 END)]]> @@ -2912,7 +2912,7 @@ GROUP BY empno, EXPR$2]]> LogicalProject(EMPNO=[$0], EXPR$2=[$1], EXPR$20=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(EMPNO=[$0], EXPR$2=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2924,7 +2924,7 @@ LogicalProject(EMPNO=[$0], EXPR$2=[$1], EXPR$20=[$2]) @@ -2939,9 +2939,9 @@ FROM emp]]> LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject($f0=[CASE(IN($7, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }), 1, 0)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2954,10 +2954,10 @@ LogicalProject(DEPTNO=[$0]) LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(EXPR$0=[$9]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2970,7 +2970,7 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalDelta LogicalProject(PRODUCTID=[$1]) LogicalFilter(condition=[=($1, 10)]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -2986,7 +2986,7 @@ group by floor(rowtime to second)]]> LogicalDelta LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject(ROWTIME=[FLOOR($0, FLAG(SECOND))]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -3002,7 +3002,7 @@ from orders]]> @@ -3018,7 +3018,7 @@ LogicalDelta LogicalProject(ROWTIME=[+($0, 7200000:INTERVAL HOUR)], PRODUCTID=[$1]) LogicalAggregate(group=[{0, 1}]) LogicalProject($f0=[TUMBLE($0, 7200000:INTERVAL HOUR)], PRODUCTID=[$1]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -3035,7 +3035,7 @@ LogicalDelta LogicalProject(ROWTIME=[$0], C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject($f0=[HOP($0, 3600000:INTERVAL HOUR, 10800000:INTERVAL HOUR)]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -3053,10 +3053,10 @@ LogicalProject(**=[$1]) LogicalFilter(condition=[NOT(AND(IS TRUE($4), IS NOT NULL($2)))]) LogicalJoin(condition=[=($2, $3)], joinType=[left]) LogicalProject(N_NAME=[$0], **=[$1], N_NAME0=[$0]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EXPR$0=[''], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -3074,7 +3074,7 @@ LogicalDelta LogicalProject(ROWTIME=[$0], EXPR$1=[$0], C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject($f0=[SESSION($0, 3600000:INTERVAL HOUR)]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -3089,7 +3089,7 @@ group by (CASE WHEN (deptno IN (10, 20)) THEN 0 ELSE deptno END)]]> @@ -3104,7 +3104,7 @@ group by deptno]]> @@ -3119,7 +3119,7 @@ group by deptno]]> ($0, 1), <>($0, 2))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3134,11 +3134,11 @@ where deptno > (select min(deptno) * 2 + 10 from EMP)]]> LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, $9)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EXPR$0=[+(*($0, 2), 10)]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3153,7 +3153,7 @@ where deptno > (values 10)]]> LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, $9)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalValues(tuples=[[{ 10 }]]) ]]> @@ -3170,10 +3170,10 @@ from EMP order by deptno limit 1)]]> LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, $9)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[1]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3187,7 +3187,7 @@ where deptno in (1, 2) or deptno in (1, 2)]]> @@ -3205,7 +3205,7 @@ LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[>(+($1, $2), 10)]) LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[SUM($2)]) LogicalProject(DEPTNO=[$7], $f1=[CASE(OR(=($7, 1), =($7, 2)), 0, 1)], $f2=[CASE(OR(=($7, 3), =($7, 4)), 0, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3226,13 +3226,13 @@ LogicalProject(SAL=[$0]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[>($1, 0)]) LogicalAggregate(group=[{0}], agg#0=[SUM($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -3251,10 +3251,10 @@ LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[true], joinType=[left]) LogicalAggregate(group=[{0}], agg#0=[MAX($1)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3272,10 +3272,10 @@ LogicalProject(DEPTNO=[$0], B=[>($1, $2)]) LogicalJoin(condition=[true], joinType=[left]) LogicalAggregate(group=[{0}], agg#0=[MAX($1)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3286,7 +3286,7 @@ values (10, 150, 'Fred')]]> @@ -3299,7 +3299,7 @@ values (50, 'Fred')]]> @@ -3312,7 +3312,7 @@ values (?, ?, ?)]]> @@ -3325,7 +3325,7 @@ values (?, ?)]]> @@ -3337,7 +3337,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPNULLABLES]], operation=[INSERT], f @@ -3349,7 +3349,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], fl ($0, 100)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3393,7 +3393,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -3407,7 +3407,7 @@ LogicalProject(ENAME=[$1], EMPNO=[$0], JOB=[$2], SLACKER=[$6], SAL=[$5], HIREDAT @@ -3421,7 +3421,7 @@ LogicalProject(ENAME=[$1], EMPNO=[$0], JOB=[$2], SLACKER=[$6], sal=[$7], HIREDAT @@ -3435,7 +3435,7 @@ LogicalProject(ENAME=[$0], EMPNO=[$1], JOB=[$2], SLACKER=[$4], SAL=[$5], HIREDAT @@ -3449,7 +3449,7 @@ LogicalProject(ENAME=[$0], EMPNO=[$1], JOB=[$2], SLACKER=[$4], SAL=[$5], HIREDAT @@ -3463,7 +3463,7 @@ LogicalProject(ENAME=[$1], EMPNO=[$0], JOB=[$2], SLACKER=[$6], SAL=[$5], HIREDAT @@ -3474,7 +3474,7 @@ values (150, 'Fred')]]> ($5, 1000)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[null:VARCHAR(10)], MGR=[null:INTEGER], HIREDATE=[null:TIMESTAMP(0)], SAL=[null:INTEGER], COMM=[null:INTEGER], DEPTNO=[20], SLACKER=[null:BOOLEAN]) LogicalValues(tuples=[[{ 150, 'Fred' }]]) @@ -3488,7 +3488,7 @@ values (10, 'Fred')]]> ($5, 1000)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[null:VARCHAR(10)], MGR=[null:INTEGER], HIREDATE=[null:TIMESTAMP(0)], SAL=[null:INTEGER], COMM=[null:INTEGER], DEPTNO=[20], SLACKER=[null:BOOLEAN]) LogicalValues(tuples=[[{ 10, 'Fred' }]]) @@ -3501,7 +3501,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPNULLABLES]], operation=[INSERT], f @@ -3513,7 +3513,7 @@ LogicalTableModify(table=[[SALES, EMPDEFAULTS]], operation=[INSERT], flattened=[ @@ -3525,7 +3525,7 @@ LogicalTableModify(table=[[SALES, EMPDEFAULTS]], operation=[INSERT], flattened=[ @@ -3537,7 +3537,7 @@ LogicalTableModify(table=[[SALES, EMPDEFAULTS]], operation=[INSERT], flattened=[ @@ -3549,7 +3549,7 @@ LogicalTableModify(table=[[SALES, EMPDEFAULTS]], operation=[INSERT], flattened=[ @@ -3561,7 +3561,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], fl @@ -3573,7 +3573,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], fl @@ -3585,7 +3585,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], fl @@ -3597,7 +3597,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], fl @@ -3610,7 +3610,7 @@ values (?, ?, ?)]]> @@ -3624,7 +3624,7 @@ values (?, ?, ?, ?, ?, ?, ?, ?)]]> @@ -3650,7 +3650,7 @@ LogicalTableModify(table=[[CATALOG, STRUCT, T]], operation=[UPDATE], updateColum @@ -3661,7 +3661,7 @@ LogicalProject(K0=[$0]) @@ -3672,7 +3672,7 @@ LogicalProject(C2=[$7]) @@ -3685,7 +3685,7 @@ LogicalProject(C2=[$7]) LogicalProject(C1=[$0]) LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(C1=[$1], "F0"."C1"=[$6]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -3698,7 +3698,7 @@ LogicalProject(C1=[$0]) LogicalProject(EXPR$0=[$1]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject("F0"."C1"=[$6], "C1"=[$1]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -3709,7 +3709,7 @@ LogicalProject(EXPR$0=[$1]) @@ -3720,7 +3720,7 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ @@ -3736,7 +3736,7 @@ from emp group by deptno, empno LogicalProject(EXPR$0=[$2], EXPR$1=[RANK() OVER (ORDER BY $1 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)], EXPR$2=[MAX($1) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)]) LogicalAggregate(group=[{0, 1}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3751,7 +3751,7 @@ group by deptno]]> LogicalProject(EXPR$0=[CAST(/(SUM($0) OVER (RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), COUNT($0) OVER (RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING))):INTEGER NOT NULL]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3771,8 +3771,8 @@ LogicalProject(EXPR$0=[$3], EXPR$1=[RANK() OVER (ORDER BY $1 RANGE BETWEEN UNBOU LogicalProject(DEPTNO0=[$9], EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[=($7, $9)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -3790,7 +3790,7 @@ LogicalProject(EXPR$0=[$2], EXPR$1=[RANK() OVER (ORDER BY $1 RANGE BETWEEN UNBOU LogicalFilter(condition=[AND(<($1, 10), <($2, 20))]) LogicalAggregate(group=[{0, 1}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3812,9 +3812,9 @@ LogicalProject(X=[$0], Y=[$1], Z=[$2], EMPNO=[$3]) LogicalProject(X=[$2], Y=[RANK() OVER (ORDER BY $1 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)], Z=[MAX($1) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)]) LogicalAggregate(group=[{0, 1}], X=[MIN($0)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], EMPNO0=[CAST($0):BIGINT NOT NULL]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3828,7 +3828,7 @@ from emp order by row_number() over(partition by empno order by deptno)]]> LogicalProject(DEPTNO=[$0], EXPR$1=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalProject(DEPTNO=[$7], EXPR$1=[RANK() OVER (PARTITION BY $0 ORDER BY $7 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)], EXPR$2=[ROW_NUMBER() OVER (PARTITION BY $0 ORDER BY $7 ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3864,12 +3864,12 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[AND(=($0, $11), >($5, $12))], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], EXPR$0=[AVG($1)]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3888,11 +3888,11 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10], DEPTNO1=[CAST($11):INTEGER], EXPR$0=[CAST($12):INTEGER]) LogicalJoin(condition=[AND(=($9, $11), >($5, $12))], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], EXPR$0=[AVG($1)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3910,12 +3910,12 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EMPNO=[$0], $f0=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3933,12 +3933,12 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($0, $11)], joinType=[left]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EMPNO=[$0], $f0=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3959,10 +3959,10 @@ LogicalProject(A=[$0], *=[$1]) @@ -3972,10 +3972,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP]], operation=[DELETE], flattened= @@ -3985,10 +3985,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP]], operation=[DELETE], flattened= @@ -3998,10 +3998,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP]], operation=[DELETE], flattened= @@ -4012,10 +4012,10 @@ where note = ?]]> @@ -4040,9 +4040,9 @@ LogicalProject(EXPR$0=[+(2, 2)]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[> SOME($7, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4054,7 +4054,7 @@ LogicalProject(DEPTNO=[$0]) ($7, 10), >($7, 20))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4070,9 +4070,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(=($7, 10), IN($7, { LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[<($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4082,10 +4082,10 @@ LogicalProject(DEPTNO=[$0]) @@ -4095,10 +4095,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP]], operation=[UPDATE], updateColu @@ -4108,10 +4108,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP]], operation=[UPDATE], updateColu @@ -4121,10 +4121,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP]], operation=[UPDATE], updateColu @@ -4134,10 +4134,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[UPDATE], up @@ -4147,10 +4147,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[UPDATE], up @@ -4160,10 +4160,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW3]], operation=[UPD @@ -4173,10 +4173,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW2]], operation=[UPD @@ -4186,10 +4186,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW2]], operation=[UPD @@ -4199,10 +4199,10 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW2]], operation=[UPD @@ -4212,7 +4212,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW3]], operation=[UPD @@ -4232,7 +4232,7 @@ LogicalDelta LogicalProject(S=[$0], E=[+($0, 60000:INTERVAL MINUTE)]) LogicalAggregate(group=[{0}]) LogicalProject($f0=[TUMBLE($1, 60000:INTERVAL MINUTE)]) - LogicalScan(table=[[CATALOG, SALES, SHIPMENTS]]) + LogicalScan(entity=[[CATALOG, SALES, SHIPMENTS]]) ]]> @@ -4249,7 +4249,7 @@ LogicalDelta LogicalProject(ROWTIME=[+($0, 7200000:INTERVAL HOUR)], ORDERID=[$1]) LogicalAggregate(group=[{0, 1}]) LogicalProject($f0=[TUMBLE($1, 7200000:INTERVAL HOUR)], ORDERID=[$0]) - LogicalScan(table=[[CATALOG, SALES, SHIPMENTS]]) + LogicalScan(entity=[[CATALOG, SALES, SHIPMENTS]]) ]]> @@ -4259,10 +4259,10 @@ LogicalDelta @@ -4273,10 +4273,10 @@ set sal = sal + 5000 where slacker = false]]> @@ -4288,10 +4288,10 @@ where slacker = false]]> @@ -4301,9 +4301,9 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW2]], operation=[UPD @@ -4313,9 +4313,9 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP]], operation=[DELETE], flattened= @@ -4327,19 +4327,19 @@ set empno = ( @@ -4350,7 +4350,7 @@ LogicalTableModify(table=[[CATALOG, SALES, EMP]], operation=[UPDATE], updateColu @@ -4366,9 +4366,9 @@ LogicalProject(C=[$SCALAR_QUERY({ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[0]) LogicalFilter(condition=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4386,9 +4386,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[<=($0, $cor1.DEPTNO)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[>=($0, $cor1.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor1]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4402,9 +4402,9 @@ from dept]]> @@ -4418,9 +4418,9 @@ from emp]]> @@ -4433,9 +4433,9 @@ LogicalProject(DEPTNO=[$0]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[NOT(IN($7, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4447,7 +4447,7 @@ LogicalProject(DEPTNO=[$0]) @@ -4459,7 +4459,7 @@ LogicalProject(EMPNO=[$0]) @@ -4471,7 +4471,7 @@ LogicalProject(EMPNO=[$0]) @@ -4485,9 +4485,9 @@ from emp]]> @@ -4500,9 +4500,9 @@ LogicalProject(EXPR$0=[CAST($0):INTEGER]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IN($7, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4518,7 +4518,7 @@ LogicalProject(DEPTNO=[$0], EMPNO_AVG=[$7]) LogicalProject(DEPTNO=[$0], NAME=[$1], TYPE=[$2], DESC=[$3], A=[$4], B=[$5], EMPLOYEES=[$6], EMPNO_AVG=[$7]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{6}]) LogicalProject(DEPTNO=[$0], NAME=[$1], TYPE=[$2.TYPE], DESC=[$2.DESC], A=[$2.OTHERS.A], B=[$2.OTHERS.B], EMPLOYEES=[$3]) - LogicalScan(table=[[CATALOG, SALES, DEPT_NESTED]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT_NESTED]]) LogicalAggregate(group=[{}], EMPNO_AVG=[AVG($0)]) LogicalProject(EMPNO=[$0]) Uncollect @@ -4538,7 +4538,7 @@ from dept_nested as d, LogicalProject(DEPTNO=[$0], EMPNO=[$7]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{6}]) LogicalProject(DEPTNO=[$0], NAME=[$1], TYPE=[$2.TYPE], DESC=[$2.DESC], A=[$2.OTHERS.A], B=[$2.OTHERS.B], EMPLOYEES=[$3]) - LogicalScan(table=[[CATALOG, SALES, DEPT_NESTED]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT_NESTED]]) Uncollect LogicalProject(EMPLOYEES=[$cor0.EMPLOYEES_6]) LogicalValues(tuples=[[{ 0 }]]) @@ -4556,7 +4556,7 @@ from dept_nested as d, LogicalProject(DEPTNO=[$0], EMPNO=[$7]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{6}]) LogicalProject(DEPTNO=[$0], NAME=[$1], TYPE=[$2.TYPE], DESC=[$2.DESC], A=[$2.OTHERS.A], B=[$2.OTHERS.B], EMPLOYEES=[$3]) - LogicalScan(table=[[CATALOG, SALES, DEPT_NESTED]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT_NESTED]]) LogicalProject(EMPNO=[$0], Y=[$1], Z=[$2]) Uncollect LogicalProject(EMPLOYEES=[$cor0.EMPLOYEES_6]) @@ -4578,9 +4578,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[<=($0, $cor1.DEPTNO)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[>=($0, $cor1.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor1]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4593,9 +4593,9 @@ LogicalFilter(condition=[<=($0, $cor1.DEPTNO)]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IN($0, $7, { LogicalProject(EXPR$0=[-($0, 10)], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4611,10 +4611,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[OR(=($0, 1), IN($9, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4629,10 +4629,10 @@ or exists (select deptno from emp where empno > dept.deptno + 5)]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[OR(=($0, 1), EXISTS({ LogicalFilter(condition=[>($0, +($cor0.DEPTNO0, 5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4650,11 +4650,11 @@ and deptno in ( LogicalProject(S=[$1]) LogicalFilter(condition=[AND(>($2, 2), IN($0, { LogicalProject(EXPR$0=[CAST($7):INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))]) LogicalAggregate(group=[{0}], S=[SUM($1)], agg#1=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4672,11 +4672,11 @@ LogicalProject(S=[$0]) LogicalSort(sort0=[$1], sort1=[$2], dir0=[DESC], dir1=[ASC]) LogicalProject(S=[$1], EXPR$1=[$SCALAR_QUERY({ LogicalProject(EXPR$0=[CAST($7):INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], EXPR$2=[$2]) LogicalAggregate(group=[{0}], S=[SUM($1)], agg#1=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4693,9 +4693,9 @@ LogicalProject(ENAME=[$0]) LogicalSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[ASC]) LogicalProject(ENAME=[$1], EXPR$1=[$SCALAR_QUERY({ LogicalProject(EXPR$0=[CAST($7):INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4713,9 +4713,9 @@ LogicalProject(S=[$1]) LogicalProject(DEPTNO=[$7], $f1=[$SCALAR_QUERY({ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4731,9 +4731,9 @@ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(X0=[$0], X1=[$1]) LogicalUnion(all=[true]) LogicalProject(X0=['a'], X1=['a'], X2=['a']) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(X0=['bb'], X1=['bb'], X2=['bb']) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4750,11 +4750,11 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IN($5, { LogicalProject(SAL=[$5]) LogicalFilter(condition=[>($7, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4767,9 +4767,9 @@ LogicalProject(SAL=[$5]) @@ -4782,13 +4782,13 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -4802,9 +4802,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[EXISTS({ LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4822,19 +4822,19 @@ LogicalProject(D2=[$0], D3=[$1]) LogicalProject(D2=[$0], D3=[$1], D1=[CAST($2):INTEGER], D6=[$3], $f2=[CAST($4):BOOLEAN]) LogicalJoin(condition=[AND(=($0, $2), =($1, $3))], joinType=[inner]) LogicalProject(D2=[+(2, $7)], D3=[+(3, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], agg#0=[MIN($2)]) LogicalProject(D1=[$0], D6=[$2], $f0=[true]) LogicalFilter(condition=[IS NOT NULL($1)]) LogicalProject(D1=[$0], $f0=[$3], D6=[$2]) LogicalJoin(condition=[=($0, $1)], joinType=[left]) LogicalProject(D1=[+($0, 1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}], agg#0=[MIN($2)]) LogicalProject(D4=[$0], D6=[$2], $f0=[true]) LogicalFilter(condition=[=($1, $0)]) LogicalProject(D4=[+($0, 4)], D5=[+($0, 5)], D6=[+($0, 6)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4846,7 +4846,7 @@ LogicalProject(D2=[$0], D3=[$1]) @@ -4890,9 +4890,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalSort(fetch=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4906,7 +4906,7 @@ LogicalProject(EXPR$0=[$0]) Uncollect LogicalProject(EXPR$0=[$SLICE($2)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) Collect(field=[EXPR$0]) LogicalUnion(all=[true]) LogicalProject(EXPR$0=[$cor0.DEPTNO]) @@ -4928,12 +4928,12 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EMPNO=[$0], $f0=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4951,20 +4951,20 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EMPNO0=[$9], $f0=[true]) LogicalJoin(condition=[<($9, $0)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4978,11 +4978,11 @@ from dept]]> @@ -4994,7 +4994,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], EMPSET=[$2]) @@ -5029,7 +5029,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -5040,7 +5040,7 @@ LogicalProject(N_NATIONKEY=[$0], N_NAME=[$1]) @@ -5055,7 +5055,7 @@ order by n_regionkey]]> LogicalProject(N_NATIONKEY=[$0], N_NAME=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalProject(N_NATIONKEY=[ITEM($0, 'N_NATIONKEY')], N_NAME=[ITEM($0, 'N_NAME')], EXPR$2=[ITEM($0, 'N_REGIONKEY')]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5069,9 +5069,9 @@ LogicalProject(**=[$0], **0=[$1]) LogicalFilter(condition=[=(ITEM($0, 'N_NATIONKEY'), ITEM($1, 'C_NATIONKEY'))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(**=[$0]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) LogicalProject(**=[$0]) - LogicalScan(table=[[CATALOG, SALES, CUSTOMER]]) + LogicalScan(entity=[[CATALOG, SALES, CUSTOMER]]) ]]> @@ -5085,7 +5085,7 @@ LogicalAggregate(group=[{0}], CNT=[COUNT()]) LogicalProject(N_REGIONKEY=[ITEM($0, 'N_REGIONKEY')]) LogicalFilter(condition=[>(ITEM($0, 'N_NATIONKEY'), 5)]) LogicalProject(**=[$0]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5098,8 +5098,8 @@ LogicalAggregate(group=[{0}], CNT=[COUNT()]) LogicalProject(**=[$1], **0=[$3]) LogicalFilter(condition=[=($0, $2)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) - LogicalScan(table=[[CATALOG, SALES, CUSTOMER]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, CUSTOMER]]) ]]> @@ -5112,8 +5112,8 @@ LogicalProject(**=[$1], **0=[$3]) LogicalProject(**=[$1], R_REGIONKEY=[$2], R_NAME=[$3], R_COMMENT=[$4]) LogicalFilter(condition=[=($0, $2)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) - LogicalScan(table=[[CATALOG, SALES, REGION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, REGION]]) ]]> @@ -5125,7 +5125,7 @@ LogicalProject(**=[$1], R_REGIONKEY=[$2], R_NAME=[$3], R_COMMENT=[$4]) @@ -5139,10 +5139,10 @@ from SALES.REGION where exists (select * from SALES.NATION)]]> LogicalProject(R_REGIONKEY=[$0], R_NAME=[$1], R_COMMENT=[$2]) LogicalFilter(condition=[IS NOT NULL($3)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, REGION]]) + LogicalScan(entity=[[CATALOG, SALES, REGION]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5155,7 +5155,7 @@ LogicalProject(R_REGIONKEY=[$0], R_NAME=[$1], R_COMMENT=[$2]) LogicalProject(**=[$0]) LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(**=[$0], EXPR$1=[ITEM($0, 'N_NATIONKEY')]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5169,7 +5169,7 @@ WHERE cast(e.empno as bigint) in (130, 131, 132, 133, 134)]]> @@ -5177,7 +5177,7 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($9, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], EMPNO0=[CAST($0):BIGINT NOT NULL]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalValues(tuples=[[{ 130 }, { 131 }, { 132 }, { 133 }, { 134 }]]) ]]> @@ -5195,7 +5195,7 @@ order by row_number() over(partition by empno order by deptno)]]> LogicalProject(DEPTNO=[$0], EXPR$1=[$1]) LogicalSort(sort0=[$2], dir0=[ASC-nulls-first]) LogicalProject(DEPTNO=[$7], EXPR$1=[RANK() OVER (PARTITION BY $0 ORDER BY $7 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)], EXPR$2=[ROW_NUMBER() OVER (PARTITION BY $0 ORDER BY $7 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5208,7 +5208,7 @@ WINDOW w AS (PARTITION BY REGION ORDER BY n_nationkey)]]> (COUNT(ITEM($0, 'N_NATIONKEY')) OVER (PARTITION BY ITEM($0, 'REGION') ORDER BY ITEM($0, 'N_NATIONKEY') RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), 0), $SUM0(ITEM($0, 'N_NATIONKEY')) OVER (PARTITION BY ITEM($0, 'REGION') ORDER BY ITEM($0, 'N_NATIONKEY') RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), null:ANY)]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5225,7 +5225,7 @@ GROUP BY n_regionkey]]> LogicalProject(N_REGIONKEY=[$0], EXPR$1=[MAX($1) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(N_REGIONKEY=[ITEM($0, 'N_REGIONKEY')], $f1=[ITEM($0, 'N_NATIONKEY')]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5246,7 +5246,7 @@ LogicalProject(A=[$0], B=[$1]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($7, $cor0.A)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5261,7 +5261,7 @@ lateral (select t2."$unnest" as fake_col3 LogicalProject(C1=[$1]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}]) LogicalProject(**=[$0]) - LogicalScan(table=[[CATALOG, SALES, CUSTOMER]]) + LogicalScan(entity=[[CATALOG, SALES, CUSTOMER]]) LogicalProject(C1=[$0]) Uncollect LogicalProject(EXPR$0=[ITEM($cor0.**, 'FAKE_COL')]) @@ -5313,7 +5313,7 @@ LogicalProject(C1=[$1]) @@ -5326,7 +5326,7 @@ LogicalAggregate(group=[{}], ANYEMPNO=[ANY_VALUE($0)]) LogicalProject(ANYEMPNO=[$1]) LogicalAggregate(group=[{0}], ANYEMPNO=[ANY_VALUE($1)]) LogicalProject(SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5338,7 +5338,7 @@ from emp]]> @@ -5350,7 +5350,7 @@ from emp]]> @@ -5362,7 +5362,7 @@ from emp]]> @@ -5374,7 +5374,7 @@ from emp]]> @@ -5387,7 +5387,7 @@ from emp]]> ($0)]) LogicalProject($f0=[JSON_STRUCTURED_VALUE_EXPRESSION($1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5399,7 +5399,7 @@ from emp]]> @@ -5412,7 +5412,7 @@ from emp]]> ($0, $1)]) LogicalProject(ENAME=[$1], $f1=[JSON_STRUCTURED_VALUE_EXPRESSION($7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5434,7 +5434,7 @@ from emp]]> @@ -5449,7 +5449,7 @@ group by deptno]]> @@ -5469,8 +5469,8 @@ group by dept.deptno]]> LogicalAggregate(group=[{0}], S=[COLLECT($1) WITHIN GROUP ([1 DESC])], S1=[COLLECT($1) WITHIN GROUP ([2])], S2=[COLLECT($1) WITHIN GROUP ([1]) FILTER $3]) LogicalProject(DEPTNO=[$9], SAL=[$5], $f2=[1], $f3=[>($5, 2000)]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5485,7 +5485,7 @@ group by deptno]]> ($0, 1), <>($0, 2))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5498,7 +5498,7 @@ order by empno desc]]> @@ -5513,7 +5513,7 @@ LogicalSort(sort0=[$0], dir0=[DESC]) LogicalProject(EMPNO=[$0]) LogicalSort(sort0=[$1], dir0=[ASC], offset=[1]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5528,7 +5528,7 @@ LogicalSort(sort0=[$0], dir0=[ASC]) LogicalProject(EMPNO=[$0]) LogicalSort(sort0=[$1], dir0=[ASC], fetch=[10]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> diff --git a/core/src/test/resources/sql/agg.iq b/core/src/test/resources/sql/agg.iq index a0b94355c3..db7614e842 100644 --- a/core/src/test/resources/sql/agg.iq +++ b/core/src/test/resources/sql/agg.iq @@ -18,7 +18,7 @@ !use post !set outputformat mysql -# count(*) returns number of rows in table +# count(*) returns number of rows in entity select count(ename) as c from emp; +---+ | C | @@ -1450,9 +1450,9 @@ EnumerableAggregate(group=[{0}], CF=[COUNT() FILTER $1], C=[COUNT()]) EnumerableCalc(expr#0..1=[{inputs}], expr#2=['CLERK':VARCHAR(9)], expr#3=[=($t0, $t2)], expr#4=[IS TRUE($t3)], DEPTNO=[$t1], $f1=[$t4]) EnumerableUnion(all=[true]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[20], expr#9=[<($t7, $t8)], JOB=[$t2], DEPTNO=[$t7], $condition=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[20], expr#9=[>($t7, $t8)], JOB=[$t2], DEPTNO=[$t7], $condition=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-751] Aggregate join transpose @@ -1469,9 +1469,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{}], EXPR$0=[COUNT()]) EnumerableJoin(condition=[=($0, $2)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum: splits into sum * count @@ -1488,9 +1488,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{}], EXPR$0=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum; no aggregate needed after join @@ -1511,9 +1511,9 @@ EnumerableCalc(expr#0..2=[{inputs}], EXPR$0=[$t2]) EnumerableAggregate(group=[{0, 3}], EXPR$0=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum; group by only one of the join keys @@ -1534,9 +1534,9 @@ EnumerableCalc(expr#0..1=[{inputs}], EXPR$0=[$t1]) EnumerableAggregate(group=[{3}], EXPR$0=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push min; Join-Aggregate is optimized to SemiJoin @@ -1557,9 +1557,9 @@ EnumerableCalc(expr#0..1=[{inputs}], EXPR$0=[$t1]) EnumerableAggregate(group=[{3}], EXPR$0=[MIN($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum and count @@ -1576,9 +1576,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{}], C=[COUNT()], S=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum and count, group by join key @@ -1599,9 +1599,9 @@ EnumerableCalc(expr#0..2=[{inputs}], C=[$t1], S=[$t2]) EnumerableAggregate(group=[{3}], C=[COUNT()], S=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum and count, group by join key plus another column @@ -1627,9 +1627,9 @@ EnumerableCalc(expr#0..3=[{inputs}], C=[$t2], S=[$t3]) EnumerableAggregate(group=[{0, 2}], C=[COUNT()], S=[SUM($3)]) EnumerableJoin(condition=[=($0, $4)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum and count, group by non-join column @@ -1651,9 +1651,9 @@ EnumerableCalc(expr#0..2=[{inputs}], C=[$t1], S=[$t2]) EnumerableAggregate(group=[{2}], C=[COUNT()], S=[SUM($3)]) EnumerableJoin(condition=[=($0, $4)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push count and sum, group by superset of join key @@ -1679,9 +1679,9 @@ EnumerableCalc(expr#0..3=[{inputs}], C=[$t2], S=[$t3]) EnumerableAggregate(group=[{0, 2}], C=[COUNT()], S=[SUM($3)]) EnumerableJoin(condition=[=($0, $4)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push count and sum, group by a column being aggregated @@ -1710,9 +1710,9 @@ EnumerableCalc(expr#0..2=[{inputs}], C=[$t1], S=[$t2]) EnumerableAggregate(group=[{2}], C=[COUNT()], S=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum, self-join, returning one row with a null value @@ -1849,9 +1849,9 @@ using (deptno); EnumerableCalc(expr#0..2=[{inputs}], EMPNO=[$t1], DEPTNO=[$t0]) EnumerableJoin(condition=[=($0, $2)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1016] "GROUP BY constant" on empty relation should return 0 rows @@ -1897,7 +1897,7 @@ select count('1') from "scott".emp where false; !ok -# As above, but on VALUES rather than table +# As above, but on VALUES rather than entity # Should return 0 rows select '1' from (values (1, 2), (3, 4)) where false group by 1; +--------+ @@ -2006,7 +2006,7 @@ group by deptno, job; EnumerableCalc(expr#0..2=[{inputs}], JOB=[$t0], SUM_SAL=[$t2], DEPTNO=[$t1]) EnumerableAggregate(group=[{2, 7}], SUM_SAL=[SUM($5)]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[CAST($t7):INTEGER], expr#9=[10], expr#10=[=($t8, $t9)], proj#0..7=[{exprs}], $condition=[$t10]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan !} @@ -2171,7 +2171,7 @@ EnumerableCalc(expr#0..3=[{inputs}], expr#4=[CAST($t1):BIGINT NOT NULL], EXPR$0= EnumerableAggregate(group=[{}], EXPR$0=[COUNT($0) FILTER $4], EXPR$1=[MIN($1) FILTER $5], EXPR$2=[MIN($2) FILTER $5], EXPR$3=[MIN($3) FILTER $5]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[0], expr#6=[=($t4, $t5)], expr#7=[1], expr#8=[=($t4, $t7)], proj#0..3=[{exprs}], $g_0=[$t6], $g_1=[$t8]) EnumerableAggregate(group=[{0}], groups=[[{0}, {}]], EXPR$1=[COUNT($5)], EXPR$2=[MIN($5)], EXPR$3=[MAX($5)], $g=[GROUPING($0)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1930] AggregateExpandDistinctAggregateRules should handle multiple aggregate calls with same input ref @@ -2189,7 +2189,7 @@ EnumerableCalc(expr#0..3=[{inputs}], expr#4=[CAST($t1):BIGINT NOT NULL], EXPR$0= EnumerableAggregate(group=[{}], EXPR$0=[COUNT($0) FILTER $4], EXPR$1=[MIN($1) FILTER $5], EXPR$2=[MIN($2) FILTER $5], EXPR$3=[MIN($3) FILTER $5]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[0], expr#6=[=($t4, $t5)], expr#7=[1], expr#8=[=($t4, $t7)], proj#0..3=[{exprs}], $g_0=[$t6], $g_1=[$t8]) EnumerableAggregate(group=[{7}], groups=[[{7}, {}]], EXPR$1=[COUNT($2)], EXPR$2=[MIN($5)], EXPR$3=[MAX($5)], $g=[GROUPING($7)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1930] AggregateExpandDistinctAggregateRules should handle multiple aggregate calls with same input ref @@ -2213,7 +2213,7 @@ EnumerableCalc(expr#0..4=[{inputs}], expr#5=[CAST($t2):BIGINT NOT NULL], proj#0. EnumerableAggregate(group=[{0}], EXPR$1=[COUNT($1) FILTER $5], EXPR$2=[MIN($2) FILTER $6], EXPR$3=[MIN($3) FILTER $6], EXPR$4=[MIN($4) FILTER $6]) EnumerableCalc(expr#0..5=[{inputs}], expr#6=[0], expr#7=[=($t5, $t6)], expr#8=[1], expr#9=[=($t5, $t8)], proj#0..4=[{exprs}], $g_0=[$t7], $g_1=[$t9]) EnumerableAggregate(group=[{3, 7}], groups=[[{3, 7}, {3}]], EXPR$2=[COUNT($2)], EXPR$3=[MIN($5)], EXPR$4=[MAX($5)], $g=[GROUPING($3, $7)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1930] AggregateExpandDistinctAggregateRules should handle multiple aggregate calls with same input ref @@ -2236,7 +2236,7 @@ select MGR, count(distinct DEPTNO, JOB), MIN(SAL), MAX(SAL) from "scott".emp gro EnumerableAggregate(group=[{0}], EXPR$1=[COUNT($1, $2) FILTER $5], EXPR$2=[MIN($3) FILTER $6], EXPR$3=[MIN($4) FILTER $6]) EnumerableCalc(expr#0..5=[{inputs}], expr#6=[0], expr#7=[=($t5, $t6)], expr#8=[3], expr#9=[=($t5, $t8)], MGR=[$t1], DEPTNO=[$t2], JOB=[$t0], EXPR$2=[$t3], EXPR$3=[$t4], $g_0=[$t7], $g_3=[$t9]) EnumerableAggregate(group=[{2, 3, 7}], groups=[[{2, 3, 7}, {3}]], EXPR$2=[MIN($5)], EXPR$3=[MAX($5)], $g=[GROUPING($3, $7, $2)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-2366] Add support for ANY_VALUE function @@ -2252,7 +2252,7 @@ SELECT any_value(empno) as anyempno from "scott".emp; !ok EnumerableAggregate(group=[{}], ANYEMPNO=[ANY_VALUE($0)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-2366] Add support for ANY_VALUE function @@ -2280,7 +2280,7 @@ SELECT any_value(empno) as anyempno from "scott".emp group by sal; EnumerableCalc(expr#0..1=[{inputs}], ANYEMPNO=[$t1]) EnumerableAggregate(group=[{5}], ANYEMPNO=[ANY_VALUE($0)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1776, CALCITE-2402] REGR_COUNT @@ -2297,7 +2297,7 @@ from "scott".emp; !ok EnumerableAggregate(group=[{}], REGR_COUNT(COMM, SAL)=[REGR_COUNT($6, $5)], REGR_COUNT(EMPNO, SAL)=[REGR_COUNT($5)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1776, CALCITE-2402] REGR_SXX, REGR_SXY, REGR_SYY @@ -2396,7 +2396,7 @@ group by deptno; !ok EnumerableAggregate(group=[{7}], EMPNOS=[COLLECT($0) WITHIN GROUP ([0])]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select deptno, collect(empno) within group (order by empno desc) as empnos @@ -2414,7 +2414,7 @@ group by deptno; !ok EnumerableAggregate(group=[{7}], EMPNOS=[COLLECT($0) WITHIN GROUP ([0 DESC])]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select deptno, collect(empno) within group (order by empno desc) @@ -2434,7 +2434,7 @@ group by deptno; !ok EnumerableAggregate(group=[{0}], EMPNOS=[COLLECT($1) WITHIN GROUP ([1 DESC]) FILTER $2]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[7500], expr#9=[>($t0, $t8)], DEPTNO=[$t7], EMPNO=[$t0], $f2=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select deptno, collect(empno) within group (order by empno desc) as empnos1, @@ -2453,7 +2453,7 @@ group by deptno; !ok EnumerableAggregate(group=[{7}], EMPNOS1=[COLLECT($0) WITHIN GROUP ([0 DESC])], EMPNOS2=[COLLECT($0) WITHIN GROUP ([0])]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Aggregate WITHIN GROUP with JOIN @@ -2479,9 +2479,9 @@ EnumerableAggregate(group=[{0}], S=[COLLECT($1) WITHIN GROUP ([1 DESC])], S1=[CO EnumerableCalc(expr#0..3=[{inputs}], expr#4=[1], expr#5=[2000], expr#6=[>($t2, $t5)], expr#7=[IS TRUE($t6)], DEPTNO=[$t0], SAL=[$t2], $f2=[$t4], $f3=[$t7]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select deptno, collect(empno + 1) within group (order by 1) as empnos @@ -2500,7 +2500,7 @@ group by deptno; !ok EnumerableAggregate(group=[{0}], EMPNOS=[COLLECT($1) WITHIN GROUP ([2])]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[1], expr#9=[+($t0, $t8)], DEPTNO=[$t7], $f1=[$t9], $f2=[$t8]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # BIT_AND, BIT_OR aggregate functions diff --git a/core/src/test/resources/sql/blank.iq b/core/src/test/resources/sql/blank.iq index 3ebb7c55a8..3b403aacd7 100644 --- a/core/src/test/resources/sql/blank.iq +++ b/core/src/test/resources/sql/blank.iq @@ -1,4 +1,4 @@ -# blank.iq - Queries that start from a blank schema and create their own tables +# blank.iq - Queries that start from a blank namespace and create their own tables # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -18,7 +18,7 @@ !use blank !set outputformat mysql -create table foo (i int not null, j int); +create entity foo (i int not null, j int); (0 rows modified) !update @@ -54,15 +54,15 @@ select * from foo as f where i in ( !ok # Test case for "Wrong plan for NOT IN correlated queries" -create table table1(i int, j int); +create entity entity1(i int, j int); (0 rows modified) !update -create table table2(i int, j int); +create entity table2(i int, j int); (0 rows modified) !update -insert into table1 values (1, 2), (1, 3); +insert into entity1 values (1, 2), (1, 3); (2 rows modified) !update @@ -72,19 +72,19 @@ insert into table2 values (NULL, 1), (2, 1); !update # Checked on Oracle !set lateDecorrelate true -select i, j from table1 where table1.j NOT IN (select i from table2 where table1.i=table2.j); +select i, j from entity1 where entity1.j NOT IN (select i from table2 where entity1.i=table2.j); EnumerableCalc(expr#0..7=[{inputs}], expr#8=[IS NOT NULL($t7)], expr#9=[<($t4, $t3)], expr#10=[OR($t8, $t9)], expr#11=[IS NOT TRUE($t10)], expr#12=[0], expr#13=[=($t3, $t12)], expr#14=[IS TRUE($t13)], expr#15=[IS NULL($t1)], expr#16=[OR($t11, $t14, $t15)], proj#0..1=[{exprs}], $condition=[$t16]) EnumerableJoin(condition=[AND(=($0, $6), =($1, $5))], joinType=[left]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[IS NOT NULL($t1)], expr#6=[0], expr#7=[=($t3, $t6)], expr#8=[IS TRUE($t7)], expr#9=[OR($t5, $t8)], proj#0..4=[{exprs}], $condition=[$t9]) EnumerableJoin(condition=[=($0, $2)], joinType=[left]) - EnumerableScan(table=[[BLANK, TABLE1]]) + EnumerableScan(entity=[[BLANK, TABLE1]]) EnumerableAggregate(group=[{1}], c=[COUNT()], ck=[COUNT($0)]) EnumerableCalc(expr#0..1=[{inputs}], expr#2=[IS NOT NULL($t1)], proj#0..1=[{exprs}], $condition=[$t2]) - EnumerableScan(table=[[BLANK, TABLE2]]) + EnumerableScan(entity=[[BLANK, TABLE2]]) EnumerableCalc(expr#0..1=[{inputs}], expr#2=[true], proj#0..2=[{exprs}]) EnumerableAggregate(group=[{0, 1}]) EnumerableCalc(expr#0..1=[{inputs}], expr#2=[IS NOT NULL($t1)], expr#3=[IS NOT NULL($t0)], expr#4=[AND($t2, $t3)], proj#0..1=[{exprs}], $condition=[$t4]) - EnumerableScan(table=[[BLANK, TABLE2]]) + EnumerableScan(entity=[[BLANK, TABLE2]]) !plan +---+---+ | I | J | @@ -94,7 +94,7 @@ EnumerableCalc(expr#0..7=[{inputs}], expr#8=[IS NOT NULL($t7)], expr#9=[<($t4, $ !ok -select * from table1 where j not in (select i from table2); +select * from entity1 where j not in (select i from table2); +---+---+ | I | J | +---+---+ @@ -103,7 +103,7 @@ select * from table1 where j not in (select i from table2); !ok -select * from table1 where j not in (select i from table2) or i = 1; +select * from entity1 where j not in (select i from table2) or i = 1; +---+---+ | I | J | +---+---+ @@ -114,7 +114,7 @@ select * from table1 where j not in (select i from table2) or i = 1; !ok -select * from table1 where j not in (select i from table2) or j = 2; +select * from entity1 where j not in (select i from table2) or j = 2; +---+---+ | I | J | +---+---+ @@ -124,7 +124,7 @@ select * from table1 where j not in (select i from table2) or j = 2; !ok -select * from table1 where j not in (select i from table2) or j = 3; +select * from entity1 where j not in (select i from table2) or j = 3; +---+---+ | I | J | +---+---+ diff --git a/core/src/test/resources/sql/join.iq b/core/src/test/resources/sql/join.iq index f56b4e18da..861e4401b1 100644 --- a/core/src/test/resources/sql/join.iq +++ b/core/src/test/resources/sql/join.iq @@ -125,9 +125,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{0, 2}]) EnumerableJoin(condition=[=($0, $2)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select distinct dept.deptno @@ -145,9 +145,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{0}]) EnumerableJoin(condition=[=($0, $2)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-676] AssertionError in GROUPING SETS query @@ -199,13 +199,13 @@ EnumerableCalc(expr#0..1=[{inputs}], DEPTNO=[$t1], ENAME=[$t0]) EnumerableAggregate(group=[{1, 3}]) EnumerableJoin(condition=[=($2, $4)], joinType=[inner]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[10], expr#9=[+($t7, $t8)], proj#0..1=[{exprs}], $f8=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[+($t1, $t3)], expr#5=[CAST($t4):INTEGER], DEPTNO=[$t1], $f16=[$t5]) EnumerableJoin(condition=[=($1, $3)], joinType=[inner]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan @@ -233,12 +233,12 @@ where e.deptno + 10 = d.deptno * 2; EnumerableCalc(expr#0..4=[{inputs}], DEPTNO=[$t3], DEPTNO0=[$t0]) EnumerableJoin(condition=[=($1, $4)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[2], expr#4=[*($t0, $t3)], DEPTNO=[$t0], $f1=[$t4]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[10], expr#9=[+($t7, $t8)], EMPNO=[$t0], DEPTNO=[$t7], $f2=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan -### [CALCITE-801] NullPointerException using USING on table alias with column aliases +### [CALCITE-801] NullPointerException using USING on entity alias with column aliases select * from (values (100, 'Bill', 1), (200, 'Eric', 1), @@ -283,8 +283,8 @@ EnumerableCalc(expr#0..10=[{inputs}], expr#11=[COALESCE($t7, $t8)], DEPTNO=[$t11 EnumerableLimit(fetch=[10]) EnumerableJoin(condition=[=($7, $8)], joinType=[left]) EnumerableLimit(fetch=[10]) - EnumerableScan(table=[[scott, EMP]]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, EMP]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # End join.iq diff --git a/core/src/test/resources/sql/lateral.iq b/core/src/test/resources/sql/lateral.iq index 324df153f5..169fe3ab11 100644 --- a/core/src/test/resources/sql/lateral.iq +++ b/core/src/test/resources/sql/lateral.iq @@ -33,7 +33,7 @@ Was expecting one of: !error # Bad: LATERAL TABLE -select * from "scott".emp join lateral table "scott".dept using (deptno); +select * from "scott".emp join lateral entity "scott".dept using (deptno); parse failed: Encountered "\"scott\"" at line 1, column 46. Was expecting: "(" ... @@ -66,7 +66,7 @@ select * from lateral (select * from "scott".emp) as e where deptno = 10; !ok # Good: Explicit TABLE in parentheses -select * from (table "scott".emp) where deptno = 10; +select * from (entity "scott".emp) where deptno = 10; +-------+--------+-----------+------+------------+---------+------+--------+ | EMPNO | ENAME | JOB | MGR | HIREDATE | SAL | COMM | DEPTNO | +-------+--------+-----------+------+------------+---------+------+--------+ @@ -79,8 +79,8 @@ select * from (table "scott".emp) where deptno = 10; !ok # Bad: Explicit TABLE -select * from table "scott".emp; -parse failed: Encountered "table \"scott\"" at line 1, column 15. +select * from entity "scott".emp; +parse failed: Encountered "entity \"scott\"" at line 1, column 15. Was expecting one of: ... ... @@ -95,7 +95,7 @@ Was expecting one of: !error select * from lateral (select * from "scott".emp) as e -join (table "scott".dept) using (deptno) +join (entity "scott".dept) using (deptno) where e.deptno = 10; +--------+-------+--------+-----------+------+------------+---------+------+------------+----------+ | DEPTNO | EMPNO | ENAME | JOB | MGR | HIREDATE | SAL | COMM | DNAME | LOC | diff --git a/core/src/test/resources/sql/misc.iq b/core/src/test/resources/sql/misc.iq index c6e2802f1a..6af4df9380 100644 --- a/core/src/test/resources/sql/misc.iq +++ b/core/src/test/resources/sql/misc.iq @@ -18,7 +18,7 @@ !use post !set outputformat mysql -# [CALCITE-356] Allow column references of the form schema.table.column +# [CALCITE-356] Allow column references of the form namespace.entity.column select "hr"."emps"."empid" from "hr"."emps"; +-------+ @@ -33,7 +33,7 @@ from "hr"."emps"; !ok -# [CALCITE-881] Allow schema.table.column references in GROUP BY +# [CALCITE-881] Allow namespace.entity.column references in GROUP BY select "hr"."emps"."empid", count(*) as c from "hr"."emps" group by "hr"."emps"."empid"; @@ -66,7 +66,7 @@ group by "hr"."emps"."empid"; # Case-sensitive errors select empid from "hr"."emps"; -Column 'EMPID' not found in any table; did you mean 'empid'? +Column 'EMPID' not found in any entity; did you mean 'empid'? !error select empid from "hr".emps; @@ -293,9 +293,9 @@ and e."name" <> d."name"; EnumerableCalc(expr#0..4=[{inputs}], expr#5=[CAST($t2):VARCHAR], expr#6=[CAST($t4):VARCHAR], expr#7=[<>($t5, $t6)], empid=[$t0], name=[$t4], name0=[$t2], $condition=[$t7]) EnumerableJoin(condition=[=($1, $3)], joinType=[inner]) EnumerableCalc(expr#0..4=[{inputs}], proj#0..2=[{exprs}]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableCalc(expr#0..3=[{inputs}], proj#0..1=[{exprs}]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # Same query, expressed using WHERE. @@ -317,9 +317,9 @@ and e."name" <> d."name"; EnumerableCalc(expr#0..4=[{inputs}], expr#5=[CAST($t2):VARCHAR], expr#6=[CAST($t4):VARCHAR], expr#7=[<>($t5, $t6)], empid=[$t0], name=[$t4], name0=[$t2], $condition=[$t7]) EnumerableJoin(condition=[=($1, $3)], joinType=[inner]) EnumerableCalc(expr#0..4=[{inputs}], proj#0..2=[{exprs}]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableCalc(expr#0..3=[{inputs}], proj#0..1=[{exprs}]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # Un-correlated EXISTS @@ -340,9 +340,9 @@ EnumerableCalc(expr#0..1=[{inputs}], deptno=[$t1]) EnumerableCalc(expr#0=[{inputs}], expr#1=[IS NOT NULL($t0)], $f0=[$t0], $condition=[$t1]) EnumerableAggregate(group=[{}], agg#0=[MIN($0)]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[true], $f0=[$t5]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableCalc(expr#0..3=[{inputs}], deptno=[$t0]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # Un-correlated NOT EXISTS @@ -358,13 +358,13 @@ where not exists (select 1 from "hr"."emps"); EnumerableCalc(expr#0..1=[{inputs}], expr#2=[IS NOT NULL($t1)], expr#3=[NOT($t2)], deptno=[$t0], $condition=[$t3]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..3=[{inputs}], deptno=[$t0]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) EnumerableAggregate(group=[{}], agg#0=[MIN($0)]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[true], $f0=[$t5]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan -# Un-correlated EXISTS (table empty) +# Un-correlated EXISTS (entity empty) select "deptno" from "hr"."depts" where exists (select 1 from "hr"."emps" where "empid" < 0); +--------+ @@ -379,12 +379,12 @@ EnumerableCalc(expr#0..1=[{inputs}], deptno=[$t1]) EnumerableCalc(expr#0=[{inputs}], expr#1=[IS NOT NULL($t0)], $f0=[$t0], $condition=[$t1]) EnumerableAggregate(group=[{}], agg#0=[MIN($0)]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[true], expr#6=[0], expr#7=[<($t0, $t6)], $f0=[$t5], $condition=[$t7]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableCalc(expr#0..3=[{inputs}], deptno=[$t0]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan -# Un-correlated NOT EXISTS (table empty) +# Un-correlated NOT EXISTS (entity empty) select "deptno" from "hr"."depts" where not exists (select 1 from "hr"."emps" where "empid" < 0); +--------+ @@ -400,10 +400,10 @@ where not exists (select 1 from "hr"."emps" where "empid" < 0); EnumerableCalc(expr#0..1=[{inputs}], expr#2=[IS NOT NULL($t1)], expr#3=[NOT($t2)], deptno=[$t0], $condition=[$t3]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..3=[{inputs}], deptno=[$t0]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) EnumerableAggregate(group=[{}], agg#0=[MIN($0)]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[true], expr#6=[0], expr#7=[<($t0, $t6)], $f0=[$t5], $condition=[$t7]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan # EXISTS @@ -421,8 +421,8 @@ where exists ( !ok EnumerableSemiJoin(condition=[=($1, $5)], joinType=[inner]) - EnumerableScan(table=[[hr, emps]]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, emps]]) + EnumerableScan(entity=[[hr, depts]]) !plan # NOT EXISTS @@ -440,10 +440,10 @@ where not exists ( !ok EnumerableCalc(expr#0..6=[{inputs}], expr#7=[IS NULL($t6)], proj#0..4=[{exprs}], $condition=[$t7]) EnumerableJoin(condition=[=($1, $5)], joinType=[left]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableAggregate(group=[{0}], agg#0=[MIN($1)]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[true], deptno=[$t0], $f0=[$t4]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # NOT EXISTS .. OR NOT EXISTS @@ -468,13 +468,13 @@ EnumerableCalc(expr#0..7=[{inputs}], expr#8=[IS NULL($t5)], expr#9=[IS NULL($t7) EnumerableJoin(condition=[=($0, $6)], joinType=[left]) EnumerableCalc(expr#0..6=[{inputs}], proj#0..4=[{exprs}], $f0=[$t6]) EnumerableJoin(condition=[=($1, $5)], joinType=[left]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableAggregate(group=[{0}], agg#0=[MIN($1)]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[true], deptno=[$t0], $f0=[$t4]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) EnumerableAggregate(group=[{0}], agg#0=[MIN($1)]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[90], expr#5=[+($t0, $t4)], expr#6=[true], $f4=[$t5], $f0=[$t6], $condition=[$t6]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # Left join to a relation with one row is recognized as a trivial semi-join @@ -494,7 +494,7 @@ left join (select count(*) from "hr"."depts") on true; !ok EnumerableCalc(expr#0..4=[{inputs}], deptno=[$t1]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan # Filter combined with an OR filter. @@ -527,7 +527,7 @@ where e."deptno" >= 10 and e."name" = 'Sebastian'; !ok -# [CALCITE-393] If no fields are projected from a table, field trimmer should +# [CALCITE-393] If no fields are projected from a entity, field trimmer should # project a dummy expression select 1 from "hr"."emps"; +--------+ @@ -542,10 +542,10 @@ select 1 from "hr"."emps"; !ok EnumerableCalc(expr#0..4=[{inputs}], expr#5=[1], EXPR$0=[$t5]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan -# [CALCITE-393] for table scan under join +# [CALCITE-393] for entity scan under join select count(*) as c from "hr"."emps", "hr"."depts"; +----+ | C | @@ -558,9 +558,9 @@ select count(*) as c from "hr"."emps", "hr"."depts"; EnumerableAggregate(group=[{}], C=[COUNT()]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[0], DUMMY=[$t4]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[0], DUMMY=[$t5]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan # [CALCITE-345] AssertionError in RexToLixTranslator comparing to date literal @@ -640,7 +640,7 @@ from "customer" where period ("birthdate", DATE '1970-02-05') contains DATE '1964-01-01'; EnumerableAggregate(group=[{}], C=[COUNT()]) EnumerableCalc(expr#0..28=[{inputs}], expr#29=[1970-02-05], expr#30=[<=($t16, $t29)], expr#31=[CASE($t30, $t16, $t29)], expr#32=[1964-01-01], expr#33=[<=($t31, $t32)], expr#34=[CASE($t30, $t29, $t16)], expr#35=[>=($t34, $t32)], expr#36=[AND($t33, $t35)], proj#0..28=[{exprs}], $condition=[$t36]) - EnumerableScan(table=[[foodmart2, customer]]) + EnumerableScan(entity=[[foodmart2, customer]]) !plan +------+ | C | @@ -662,10 +662,10 @@ from "sales_fact_1997" as s where c."city" = 'San Francisco'; EnumerableJoin(condition=[=($0, $38)], joinType=[inner]) EnumerableJoin(condition=[=($2, $8)], joinType=[inner]) - EnumerableScan(table=[[foodmart2, sales_fact_1997]]) + EnumerableScan(entity=[[foodmart2, sales_fact_1997]]) EnumerableCalc(expr#0..28=[{inputs}], expr#29=['San Francisco':VARCHAR(30)], expr#30=[=($t9, $t29)], proj#0..28=[{exprs}], $condition=[$t30]) - EnumerableScan(table=[[foodmart2, customer]]) - EnumerableScan(table=[[foodmart2, product]]) + EnumerableScan(entity=[[foodmart2, customer]]) + EnumerableScan(entity=[[foodmart2, product]]) !plan # 4-way join whose optimal plan requires bushy join. @@ -687,12 +687,12 @@ EnumerableCalc(expr#0..56=[{inputs}], product_id=[$t20], time_id=[$t21], custome EnumerableJoin(condition=[=($6, $20)], joinType=[inner]) EnumerableJoin(condition=[=($0, $5)], joinType=[inner]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=['Snacks':VARCHAR(30)], expr#6=[=($t3, $t5)], proj#0..4=[{exprs}], $condition=[$t6]) - EnumerableScan(table=[[foodmart2, product_class]]) - EnumerableScan(table=[[foodmart2, product]]) + EnumerableScan(entity=[[foodmart2, product_class]]) + EnumerableScan(entity=[[foodmart2, product]]) EnumerableJoin(condition=[=($2, $8)], joinType=[inner]) - EnumerableScan(table=[[foodmart2, sales_fact_1997]]) + EnumerableScan(entity=[[foodmart2, sales_fact_1997]]) EnumerableCalc(expr#0..28=[{inputs}], expr#29=['San Francisco':VARCHAR(30)], expr#30=[=($t9, $t29)], proj#0..28=[{exprs}], $condition=[$t30]) - EnumerableScan(table=[[foodmart2, customer]]) + EnumerableScan(entity=[[foodmart2, customer]]) !plan # Check that when filters are merged, duplicate conditions are eliminated. @@ -701,7 +701,7 @@ select * from ( where "day" = 1) where "day" = 1; EnumerableCalc(expr#0..1=[{inputs}], expr#2=[1], expr#3=[=($t0, $t2)], proj#0..1=[{exprs}], $condition=[$t3]) - EnumerableScan(table=[[foodmart2, days]]) + EnumerableScan(entity=[[foodmart2, days]]) !plan # [HIVE-5873] Semi-join to count sub-query @@ -1124,7 +1124,7 @@ select * from "scott".emp where hiredate < '1981-01-02'; !ok EnumerableCalc(expr#0..7=[{inputs}], expr#8=['1981-01-02'], expr#9=[CAST($t8):DATE NOT NULL], expr#10=[<($t4, $t9)], proj#0..7=[{exprs}], $condition=[$t10]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select * from "scott".emp where '1981-01-02' > hiredate; +-------+-------+-------+------+------------+--------+------+--------+ @@ -1225,7 +1225,7 @@ select * from "scott".emp where '7369' between empno and '7876'; !ok -# [CALCITE-546] Allow table, column and field called "*" +# [CALCITE-546] Allow entity, column and field called "*" # See [DRILL-3859], [DRILL-3860]. SELECT * FROM (VALUES (0, 0)) AS T(A, "*"); +---+---+ @@ -1248,7 +1248,7 @@ SELECT a FROM (VALUES (0, 0)) AS T(A, "*"); !ok SELECT b FROM (VALUES (0, 0)) AS T(A, "*"); -Column 'B' not found in any table +Column 'B' not found in any entity !error # See [DRILL-3860]. diff --git a/core/src/test/resources/sql/schema.iq b/core/src/test/resources/sql/schema.iq index 08250b50b3..7962d16bb3 100644 --- a/core/src/test/resources/sql/schema.iq +++ b/core/src/test/resources/sql/schema.iq @@ -1,4 +1,4 @@ -# schema.iq - DDL on schemas +# namespace.iq - DDL on schemas # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -18,14 +18,14 @@ !use server !set outputformat mysql -# Create a schema -create schema s; +# Create a namespace +create namespace s; (0 rows modified) !update -# Create a table and a view in the schema -create table s.t (i int); +# Create a entity and a view in the namespace +create entity s.t (i int); (0 rows modified) !update @@ -45,52 +45,52 @@ select count(*) as c from s.v; !ok -# Try to create a schema that already exists -create schema s; +# Try to create a namespace that already exists +create namespace s; Schema 'S' already exists !error -create or replace schema s; +create or replace namespace s; (0 rows modified) !update -#create schema if exists s; +#create namespace if exists s; #Encountered "exists" at line 1, column 18. #!error -create schema if not exists s; +create namespace if not exists s; (0 rows modified) !update # Bad library -create foreign schema fs library 'com.example.BadSchemaFactory'; -Property 'com.example.BadSchemaFactory' not valid for plugin type org.polypheny.db.schema.SchemaFactory +create foreign namespace fs library 'com.example.BadSchemaFactory'; +Property 'com.example.BadSchemaFactory' not valid for plugin type org.polypheny.db.namespace.SchemaFactory !error # Bad type -create foreign schema fs type 'bad'; -Invalid schema type 'bad'; valid values: [MAP, JDBC, CUSTOM] +create foreign namespace fs type 'bad'; +Invalid namespace type 'bad'; valid values: [MAP, JDBC, CUSTOM] !error # Can not specify both type and library -create foreign schema fs +create foreign namespace fs type 'jdbc' library 'org.polypheny.db.test.JdbcTest.MySchemaFactory'; Encountered "library" at line 3, column 3. !error -# Cannot specify type or library with non-foreign schema -create schema fs type 'jdbc'; +# Cannot specify type or library with non-foreign namespace +create namespace fs type 'jdbc'; Encountered "type" at line 1, column 18. !error -create schema fs library 'org.polypheny.db.test.JdbcTest.MySchemaFactory'; +create namespace fs library 'org.polypheny.db.test.JdbcTest.MySchemaFactory'; Encountered "library" at line 1, column 18. !error -create foreign schema fs; +create foreign namespace fs; Encountered "" at line 1, column 24. Was expecting one of: "TYPE" ... @@ -98,8 +98,8 @@ Was expecting one of: "." ... !error -# JDBC schema -create foreign schema scott type 'jdbc' options ( +# JDBC namespace +create foreign namespace scott type 'jdbc' options ( "jdbcUrl" 'jdbc:hsqldb:res:scott', "jdbcSchema" 'SCOTT', "jdbcUser" 'SCOTT', @@ -118,8 +118,8 @@ select count(*) as c from scott.dept; !ok -# Drop schema, then make sure that a query can't find it -drop schema if exists s; +# Drop namespace, then make sure that a query can't find it +drop namespace if exists s; (0 rows modified) !update @@ -129,7 +129,7 @@ Object 'T' not found !error # Create again and objects are still gone -create schema s; +create namespace s; select * from s.t; Object 'T' not found @@ -139,30 +139,30 @@ select * from s.v; Object 'V' not found !error -# Try to drop schema that does not exist -drop schema sss; +# Try to drop namespace that does not exist +drop namespace sss; Schema 'SSS' not found !error -drop schema if exists sss; +drop namespace if exists sss; (0 rows modified) !update -drop foreign schema if exists sss; +drop foreign namespace if exists sss; (0 rows modified) !update -# Use 'if exists' to drop a foreign schema that does exist -drop foreign schema if exists scott; +# Use 'if exists' to drop a foreign namespace that does exist +drop foreign namespace if exists scott; (0 rows modified) !update -drop foreign schema if exists scott; +drop foreign namespace if exists scott; (0 rows modified) !update -# End schema.iq +# End namespace.iq diff --git a/core/src/test/resources/sql/sequence.iq b/core/src/test/resources/sql/sequence.iq index 163d92cf54..03d9a2aed2 100644 --- a/core/src/test/resources/sql/sequence.iq +++ b/core/src/test/resources/sql/sequence.iq @@ -44,7 +44,7 @@ select next value for "my_seq" as c from (values 1, 2); C BIGINT(19) NOT NULL !type -# Qualified with schema name +# Qualified with namespace name select next value for "s"."my_seq" as c from (values 1, 2); C BIGINT(19) NOT NULL !type @@ -53,7 +53,7 @@ select next value for "unknown_seq" as c from (values 1, 2); From line 1, column 23 to line 1, column 35: Table 'unknown_seq' not found !error -# Qualified with bad schema name +# Qualified with bad namespace name select next value for "unknown_schema"."my_seq" as c from (values 1, 2); From line 1, column 23 to line 1, column 47: Table 'unknown_schema.my_seq' not found !error diff --git a/core/src/test/resources/sql/some.iq b/core/src/test/resources/sql/some.iq index c111381b4d..a07c279f29 100644 --- a/core/src/test/resources/sql/some.iq +++ b/core/src/test/resources/sql/some.iq @@ -111,8 +111,8 @@ from "scott".emp; EnumerableCalc(expr#0..10=[{inputs}], expr#11=[0], expr#12=[=($t1, $t11)], expr#13=[false], expr#14=[<=($t8, $t0)], expr#15=[IS TRUE($t14)], expr#16=[true], expr#17=[>($t1, $t2)], expr#18=[null:NULL], expr#19=[CASE($t12, $t13, $t15, $t16, $t17, $t18, $t14)], expr#20=[NOT($t19)], EMPNO=[$t3], ENAME=[$t4], JOB=[$t5], MGR=[$t6], HIREDATE=[$t7], SAL=[$t8], COMM=[$t9], DEPTNO=[$t10], X=[$t20]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableAggregate(group=[{}], m=[MAX($6)], c=[COUNT()], d=[COUNT($6)]) - EnumerableScan(table=[[scott, EMP]]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # NOT SOME; left side NOT NULL, right side nullable; converse of previous query. diff --git a/core/src/test/resources/sql/sort.iq b/core/src/test/resources/sql/sort.iq index 14890209d5..69d63f243b 100644 --- a/core/src/test/resources/sql/sort.iq +++ b/core/src/test/resources/sql/sort.iq @@ -21,7 +21,7 @@ # The ArrayTable "days" is sorted by "day", so plan must not contain sort select * from "days" order by "day"; !verify -EnumerableScan(table=[[foodmart2, days]]) +EnumerableScan(entity=[[foodmart2, days]]) !plan # The ArrayTable "days" is sorted by "day", so the plan does not sort, only applies limit @@ -36,7 +36,7 @@ select * from "days" order by "day" limit 2; !ok EnumerableLimit(fetch=[2]) - EnumerableScan(table=[[foodmart2, days]]) + EnumerableScan(entity=[[foodmart2, days]]) !plan # The ArrayTable "days" is sorted by "day", so the plan must not contain Sort @@ -52,7 +52,7 @@ select * from "days" where "day" between 2 and 4 order by "day"; !ok EnumerableCalc(expr#0..1=[{inputs}], expr#2=[2], expr#3=[>=($t0, $t2)], expr#4=[4], expr#5=[<=($t0, $t4)], expr#6=[AND($t3, $t5)], proj#0..1=[{exprs}], $condition=[$t6]) - EnumerableScan(table=[[foodmart2, days]]) + EnumerableScan(entity=[[foodmart2, days]]) !plan # [CALCITE-970] Default collation of NULL values @@ -135,12 +135,12 @@ order by deptno desc, dname, deptno; !ok EnumerableSort(sort0=[$0], sort1=[$1], dir0=[DESC], dir1=[ASC]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan !use post -# [CALCITE-603] WITH ... ORDER BY cannot find table +# [CALCITE-603] WITH ... ORDER BY cannot find entity with e as (select "empid" as empid from "hr"."emps" where "empid" < 120) select * from e as e1, e as e2 order by e1.empid + e2.empid, e1.empid; +-------+--------+ @@ -181,7 +181,7 @@ select * from "hr"."emps" offset 0; (4 rows) !ok -EnumerableScan(table=[[hr, emps]]) +EnumerableScan(entity=[[hr, emps]]) !plan # [CALCITE-634] Allow ORDER BY aggregate function in SELECT DISTINCT, provided diff --git a/core/src/test/resources/sql/sub-query.iq b/core/src/test/resources/sql/sub-query.iq index 9f69976b56..0e3bf7854e 100644 --- a/core/src/test/resources/sql/sub-query.iq +++ b/core/src/test/resources/sql/sub-query.iq @@ -350,13 +350,13 @@ EnumerableCalc(expr#0..5=[{inputs}], EMPNO=[$t0]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[>($t3, $t0)], proj#0..3=[{exprs}], $condition=[$t4]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableAggregate(group=[{7}]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan !} @@ -446,7 +446,7 @@ EnumerableCalc(expr#0..2=[{inputs}], proj#0..1=[{exprs}]) EnumerableValues(tuples=[[{ 1, 2 }]]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[true], expr#9=[CAST($t7):INTEGER], expr#10=[$cor0], expr#11=[$t10.A], expr#12=[=($t9, $t11)], i=[$t8], $condition=[$t12]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Similar query, identical plan @@ -457,7 +457,7 @@ EnumerableCalc(expr#0..2=[{inputs}], proj#0..1=[{exprs}]) EnumerableValues(tuples=[[{ 1, 2 }]]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[true], expr#9=[CAST($t7):INTEGER], expr#10=[$cor0], expr#11=[$t10.A], expr#12=[=($t9, $t11)], i=[$t8], $condition=[$t12]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Uncorrelated @@ -466,7 +466,7 @@ select * from t where a in (select deptno from "scott".dept); EnumerableCalc(expr#0..2=[{inputs}], A=[$t1], B=[$t2]) EnumerableMergeJoin(condition=[=($0, $1)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableValues(tuples=[[{ 60, 'b' }]]) !plan +---+---+ @@ -601,9 +601,9 @@ where empno IN ( EnumerableCalc(expr#0..4=[{inputs}], SAL=[$t4]) EnumerableJoin(condition=[AND(=($1, $3), =($0, $2))], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[IS NOT NULL($t1)], proj#0..1=[{exprs}], $condition=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # As above, but for EXISTS @@ -622,9 +622,9 @@ where exists ( !ok EnumerableSemiJoin(condition=[=($0, $10)], joinType=[inner]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[=($t7, $t7)], expr#9=['SMITH':VARCHAR(10)], expr#10=[=($t1, $t9)], expr#11=[AND($t8, $t10)], proj#0..7=[{exprs}], $condition=[$t11]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [DRILL-5644] @@ -789,12 +789,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], SAL=[$t1], EXPR$1=[$t6]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal IN null non-correlated @@ -825,12 +825,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS TRUE($t5)], expr#7=[null:BOOLEAN], expr#8=[IS NOT NULL($t3)], expr#9=[AND($t6, $t7, $t8)], expr#10=[IS NOT NULL($t2)], expr#11=[IS NOT TRUE($t5)], expr#12=[AND($t10, $t8, $t11)], expr#13=[OR($t9, $t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], expr#4=[123], expr#5=[null:INTEGER], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null IN literal non-correlated @@ -861,12 +861,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], SAL=[$t1], EXPR$1=[$t6]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null IN required @@ -897,12 +897,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], SAL=[$t1], EXPR$1=[$t6]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null IN nullable @@ -933,12 +933,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], SAL=[$t1], EXPR$1=[$t6]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal IN required @@ -969,10 +969,10 @@ from "scott".emp; EnumerableCalc(expr#0..2=[{inputs}], expr#3=[IS NOT NULL($t2)], SAL=[$t1], EXPR$1=[$t3]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], cs=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal IN nullable @@ -1003,12 +1003,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS TRUE($t5)], expr#7=[null:BOOLEAN], expr#8=[IS NOT NULL($t3)], expr#9=[AND($t6, $t7, $t8)], expr#10=[IS NOT NULL($t2)], expr#11=[IS NOT TRUE($t5)], expr#12=[AND($t10, $t8, $t11)], expr#13=[OR($t9, $t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[CAST($t0):TINYINT], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null NOT IN null non-correlated @@ -1039,12 +1039,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[null:INTEGER], expr#9=[IS NULL($t8)], expr#10=[IS NOT NULL($t2)], expr#11=[true], expr#12=[CASE($t4, $t5, $t6, $t7, $t9, $t7, $t10, $t11, $t5)], expr#13=[NOT($t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal NOT IN null non-correlated @@ -1075,12 +1075,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[IS NOT NULL($t2)], expr#9=[true], expr#10=[CASE($t4, $t5, $t6, $t7, $t8, $t9, $t5)], expr#11=[NOT($t10)], SAL=[$t1], EXPR$1=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], expr#4=[123], expr#5=[null:INTEGER], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null NOT IN literal non-correlated @@ -1111,12 +1111,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[null:INTEGER], expr#9=[IS NULL($t8)], expr#10=[IS NOT NULL($t2)], expr#11=[true], expr#12=[CASE($t4, $t5, $t6, $t7, $t9, $t7, $t10, $t11, $t5)], expr#13=[NOT($t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null NOT IN required @@ -1147,12 +1147,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[null:INTEGER], expr#9=[IS NULL($t8)], expr#10=[IS NOT NULL($t2)], expr#11=[true], expr#12=[CASE($t4, $t5, $t6, $t7, $t9, $t7, $t10, $t11, $t5)], expr#13=[NOT($t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null NOT IN nullable @@ -1183,12 +1183,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[null:INTEGER], expr#9=[IS NULL($t8)], expr#10=[IS NOT NULL($t2)], expr#11=[true], expr#12=[CASE($t4, $t5, $t6, $t7, $t9, $t7, $t10, $t11, $t5)], expr#13=[NOT($t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal NOT IN required @@ -1219,10 +1219,10 @@ from "scott".emp; EnumerableCalc(expr#0..2=[{inputs}], expr#3=[IS NOT NULL($t2)], expr#4=[true], expr#5=[false], expr#6=[CASE($t3, $t4, $t5)], expr#7=[NOT($t6)], SAL=[$t1], EXPR$1=[$t7]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], cs=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal NOT IN nullable @@ -1253,12 +1253,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[IS NOT NULL($t2)], expr#9=[true], expr#10=[CASE($t4, $t5, $t6, $t7, $t8, $t9, $t5)], expr#11=[NOT($t10)], SAL=[$t1], EXPR$1=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[CAST($t0):TINYINT], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null IN required is unknown @@ -1289,12 +1289,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], expr#7=[IS NULL($t6)], SAL=[$t1], EXPR$1=[$t7]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null IN null @@ -1390,9 +1390,9 @@ EnumerableCalc(expr#0..2=[{inputs}], SAL=[$t2]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], cs=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Test filter literal IN nullable @@ -1423,9 +1423,9 @@ EnumerableCalc(expr#0..2=[{inputs}], SAL=[$t2]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[CAST($t0):TINYINT], expr#6=[=($t4, $t5)], cs=[$t3], $condition=[$t6]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Test filter null NOT IN null non-correlated @@ -1441,12 +1441,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT TRUE($t5)], expr#7=[IS NULL($t3)], expr#8=[OR($t6, $t7)], expr#9=[IS TRUE($t5)], expr#10=[OR($t7, $t9)], expr#11=[AND($t8, $t10)], SAL=[$t1], $condition=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN null non-correlated @@ -1462,12 +1462,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT NULL($t2)], expr#7=[OR($t5, $t6)], expr#8=[IS NOT TRUE($t7)], expr#9=[IS NULL($t3)], expr#10=[OR($t8, $t9)], SAL=[$t1], $condition=[$t10]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], expr#4=[123], expr#5=[null:INTEGER], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN literal non-correlated @@ -1483,12 +1483,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT TRUE($t5)], expr#7=[IS NULL($t3)], expr#8=[OR($t6, $t7)], expr#9=[IS TRUE($t5)], expr#10=[OR($t7, $t9)], expr#11=[AND($t8, $t10)], SAL=[$t1], $condition=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN required @@ -1504,12 +1504,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT TRUE($t5)], expr#7=[IS NULL($t3)], expr#8=[OR($t6, $t7)], expr#9=[IS TRUE($t5)], expr#10=[OR($t7, $t9)], expr#11=[AND($t8, $t10)], SAL=[$t1], $condition=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN nullable @@ -1525,12 +1525,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT TRUE($t5)], expr#7=[IS NULL($t3)], expr#8=[OR($t6, $t7)], expr#9=[IS TRUE($t5)], expr#10=[OR($t7, $t9)], expr#11=[AND($t8, $t10)], SAL=[$t1], $condition=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN required @@ -1546,12 +1546,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT NULL($t2)], expr#7=[OR($t5, $t6)], expr#8=[IS NOT TRUE($t7)], expr#9=[IS NULL($t3)], expr#10=[OR($t8, $t9)], SAL=[$t1], $condition=[$t10]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], cs=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN nullable @@ -1567,12 +1567,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT NULL($t2)], expr#7=[OR($t5, $t6)], expr#8=[IS NOT TRUE($t7)], expr#9=[IS NULL($t3)], expr#10=[OR($t8, $t9)], SAL=[$t1], $condition=[$t10]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[CAST($t0):TINYINT], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null IN required is unknown @@ -1602,12 +1602,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], expr#7=[IS NULL($t6)], SAL=[$t1], $condition=[$t7]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan #------------------------------- @@ -1625,7 +1625,7 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableValues(tuples=[[]]) !plan @@ -1655,7 +1655,7 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableValues(tuples=[[]]) !plan @@ -1672,7 +1672,7 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableValues(tuples=[[]]) !plan @@ -1689,7 +1689,7 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableValues(tuples=[[]]) !plan @@ -1709,9 +1709,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t2]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[10], expr#4=[=($t3, $t0)], DEPTNO=[$t0], $condition=[$t4]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Test filter literal IN nullable correlated @@ -1730,9 +1730,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t2]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[10], expr#4=[=($t3, $t0)], DEPTNO=[$t0], $condition=[$t4]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Test filter null NOT IN null correlated @@ -1748,9 +1748,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT TRUE($t6)], expr#8=[IS TRUE($t6)], expr#9=[AND($t7, $t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN null correlated @@ -1766,9 +1766,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT NULL($t4)], expr#8=[OR($t6, $t7)], expr#9=[IS NOT TRUE($t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN literal correlated @@ -1784,9 +1784,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT TRUE($t6)], expr#8=[IS TRUE($t6)], expr#9=[AND($t7, $t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN required correlated @@ -1802,9 +1802,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT TRUE($t6)], expr#8=[IS TRUE($t6)], expr#9=[AND($t7, $t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN nullable correlated @@ -1820,9 +1820,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT TRUE($t6)], expr#8=[IS TRUE($t6)], expr#9=[AND($t7, $t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN required correlated @@ -1849,9 +1849,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT NULL($t4)], expr#8=[OR($t6, $t7)], expr#9=[IS NOT TRUE($t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], DEPTNO1=[$t0], $f1=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN nullable correlated @@ -1878,9 +1878,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT NULL($t4)], expr#8=[OR($t6, $t7)], expr#9=[IS NOT TRUE($t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], DEPTNO=[$t0], $f1=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null IN required is unknown correlated @@ -1910,9 +1910,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan diff --git a/core/src/test/resources/sql/table.iq b/core/src/test/resources/sql/table.iq index 5dc8919078..61adf23446 100644 --- a/core/src/test/resources/sql/table.iq +++ b/core/src/test/resources/sql/table.iq @@ -1,4 +1,4 @@ -# table.iq - Table DDL +# entity.iq - Table DDL # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -18,18 +18,18 @@ !use server !set outputformat mysql -# Create a basic table -create table t (i int, j int not null); +# Create a basic entity +create entity t (i int, j int not null); (0 rows modified) !update -create table if not exists t (i int, j int not null, k date); +create entity if not exists t (i int, j int not null, k date); (0 rows modified) !update -# There is no "K" column, because table was not re-created +# There is no "K" column, because entity was not re-created select * from t; I INTEGER(10) J INTEGER(10) NOT NULL @@ -50,13 +50,13 @@ select * from t; !ok -drop table t; +drop entity t; (0 rows modified) !update -# Create a table with a DEFAULT column -create table t (i int, j int default i + 2); +# Create a entity with a DEFAULT column +create entity t (i int, j int default i + 2); (0 rows modified) !update @@ -82,14 +82,14 @@ select * from t; !ok -drop table t; +drop entity t; (0 rows modified) !update -# Create a table with a VIRTUAL column +# Create a entity with a VIRTUAL column -create table t (i int, j int as (i + k + 2) virtual, k int); +create entity t (i int, j int as (i + k + 2) virtual, k int); (0 rows modified) !update @@ -106,7 +106,7 @@ insert into t (i, k) values (1, 3); (1 row modified) !update -EnumerableTableModify(table=[[T]], operation=[INSERT], flattened=[false]) +EnumerableTableModify(entity=[[T]], operation=[INSERT], flattened=[false]) EnumerableValues(tuples=[[{ 1, 3 }]]) !plan @@ -114,7 +114,7 @@ insert into t (k, i) values (5, 2); (1 row modified) !update -EnumerableTableModify(table=[[T]], operation=[INSERT], flattened=[false]) +EnumerableTableModify(entity=[[T]], operation=[INSERT], flattened=[false]) EnumerableCalc(expr#0..1=[{inputs}], I=[$t1], K=[$t0]) EnumerableValues(tuples=[[{ 5, 2 }]]) !plan @@ -130,10 +130,10 @@ select * from t; !ok EnumerableCalc(expr#0..1=[{inputs}], expr#2=[+($t0, $t1)], expr#3=[2], expr#4=[+($t2, $t3)], I=[$t0], J=[$t4], K=[$t1]) - EnumerableScan(table=[[T]]) + EnumerableScan(entity=[[T]]) !plan -drop table if exists t; +drop entity if exists t; (0 rows modified) !update @@ -142,13 +142,13 @@ select * from t; Object 'T' not found !error -drop table t; +drop entity t; Table 'T' not found !error -drop table if exists t; +drop entity if exists t; (0 rows modified) !update -# End table.iq +# End entity.iq diff --git a/core/src/test/resources/sql/table_as.iq b/core/src/test/resources/sql/table_as.iq index 68ca9f1773..d83dd1d998 100644 --- a/core/src/test/resources/sql/table_as.iq +++ b/core/src/test/resources/sql/table_as.iq @@ -18,8 +18,8 @@ !use server !set outputformat mysql -# Create a source table -create table dept (deptno int not null, name varchar(10)); +# Create a source entity +create entity dept (deptno int not null, name varchar(10)); (0 rows modified) !update @@ -31,7 +31,7 @@ values (10, 'Sales'), (20, 'Marketing'), (30, 'Engineering'); !update # Create as select -create table d as +create entity d as select * from dept where deptno > 10; (0 rows modified) @@ -50,13 +50,13 @@ select * from d; !ok # Try to create again - fails -create table d as +create entity d as select * from dept where deptno < 30; Table 'D' already exists !error # Try to create again - fails silently -create table if not exists d as +create entity if not exists d as select * from dept where deptno < 30; (0 rows modified) @@ -75,7 +75,7 @@ select * from d; !ok # Drop -drop table if exists d; +drop entity if exists d; (0 rows modified) !update @@ -85,34 +85,34 @@ select * from d; Object 'D' not found !error -# Drop does nothing because table does not exist -drop table if exists d; +# Drop does nothing because entity does not exist +drop entity if exists d; (0 rows modified) !update -# Create table without either AS or column list - fails -create table d; +# Create entity without either AS or column list - fails +create entity d; At line 1, column 14: Missing column list !error -# Create table without AS or column types - fails -create table d (x, y); +# Create entity without AS or column types - fails +create entity d (x, y); At line 1, column 17: Type required for column 'X' in CREATE TABLE without AS !error -# Create table without AS or column types - fails -create table d (x int, y); +# Create entity without AS or column types - fails +create entity d (x int, y); At line 1, column 24: Type required for column 'Y' in CREATE TABLE without AS !error # Create based on itself - fails -create table d2 as select * from d2; +create entity d2 as select * from d2; Object 'D2' not found !error -# Create table based on UNION -create table d3 as +# Create entity based on UNION +create entity d3 as select deptno as dd from dept where deptno < 15 union all select deptno as ee from dept where deptno > 25; @@ -133,13 +133,13 @@ select * from d3; !ok # Drop -drop table d3; +drop entity d3; (0 rows modified) !update -# Create table based on UNION and ORDER BY -create table d4 as +# Create entity based on UNION and ORDER BY +create entity d4 as select deptno as dd from dept where deptno < 15 union all select deptno as dd from dept where deptno > 25 @@ -161,10 +161,10 @@ select * from d4; !ok # Drop -drop table d4; +drop entity d4; -# Create table based on VALUES -create table d5 as +# Create entity based on VALUES +create entity d5 as values (1, 'a'), (2, 'b'); (0 rows modified) @@ -183,7 +183,7 @@ select * from d5; !ok # Use just aliases -create table d6 (x, y) as +create entity d6 (x, y) as select * from dept where deptno < 15; (0 rows modified) @@ -201,7 +201,7 @@ select * from d6; !ok # Use a mixture of aliases and column declarations -create table d7 (x int, y) as +create entity d7 (x int, y) as select * from dept where deptno < 15; (0 rows modified) @@ -219,19 +219,19 @@ select * from d7; !ok # Too many columns -create table d8 (x, y, z) as +create entity d8 (x, y, z) as select * from dept where deptno < 15; Number of columns must match number of query columns !error # Too few columns -create table d9 (x) as +create entity d9 (x) as select * from dept where deptno < 15; Number of columns must match number of query columns !error # Specify column names and types -create table d10 (x int, y varchar(20)) as +create entity d10 (x int, y varchar(20)) as select * from dept where deptno < 15; (0 rows modified) diff --git a/core/src/test/resources/sql/type.iq b/core/src/test/resources/sql/type.iq index dada3b0506..a985b4282f 100644 --- a/core/src/test/resources/sql/type.iq +++ b/core/src/test/resources/sql/type.iq @@ -23,8 +23,8 @@ create type myint1 as int; !update -# Create a basic table -create table t (i myint1 not null, j int not null); +# Create a basic entity +create entity t (i myint1 not null, j int not null); (0 rows modified) !update @@ -50,7 +50,7 @@ select * from t; !ok -# Create a table with complex structure type +# Create a entity with complex structure type # This is to test struct type inference create type mytype1 as (ii int not null); @@ -58,8 +58,8 @@ create type mytype1 as (ii int not null); !update -# Create a complex table -create table v (i int not null, j mytype1 not null); +# Create a complex entity +create entity v (i int not null, j mytype1 not null); (0 rows modified) !update @@ -69,8 +69,8 @@ MYINT INTEGER(10) NOT NULL MYSTRUCT STRUCT NOT NULL !type -drop table t; -drop table v; +drop entity t; +drop entity v; (0 rows modified) !update diff --git a/core/src/test/resources/sql/view.iq b/core/src/test/resources/sql/view.iq index e1aeff55a9..f2e04e8655 100644 --- a/core/src/test/resources/sql/view.iq +++ b/core/src/test/resources/sql/view.iq @@ -91,7 +91,7 @@ select * from v; create or replace view v (x, y, z) as select a, a + 5 as b from (values 1, 2) as t(a); -List of column aliases must have same degree as table; table has 2 columns ('A', 'B'), whereas alias list has 3 columns +List of column aliases must have same degree as entity; entity has 2 columns ('A', 'B'), whereas alias list has 3 columns !error # Column names not unique @@ -119,8 +119,8 @@ select * from v; !ok -# View based on table -create table t (i int); +# View based on entity +create entity t (i int); (0 rows modified) !update diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index a155eed106..f9b6cb2f8f 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -65,7 +65,7 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.schema.graph.PolyGraph; import org.polypheny.db.tools.AlgBuilder; @@ -323,7 +323,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List handleHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, - LogicalTable logicalTable, + LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ); @@ -101,7 +101,7 @@ protected abstract List handleVerticalPartitioningOrReplicatio AlgNode node, CatalogTable catalogTable, Statement statement, - LogicalTable logicalTable, + LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ); @@ -219,7 +219,7 @@ protected List buildSelect( AlgNode node, List buildSelect( AlgNode node, List handleRelationalOnGraphScan( AlgNode node, Statement statement, LogicalTable logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + private List handleRelationalOnGraphScan( AlgNode node, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // todo dl: remove after RowType refactor AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index f779f5bc91..b57c1c612b 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -109,11 +109,11 @@ import org.polypheny.db.routing.DmlRouter; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.RoutingManager; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ModelTrait; import org.polypheny.db.schema.ModifiableCollection; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.PolySchemaBuilder; -import org.polypheny.db.schema.Table; import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.schema.graph.ModifiableGraph; import org.polypheny.db.tools.AlgBuilder; @@ -413,7 +413,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { ), catalogTable.name + "_" + currentPartitionId ); AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); - ModifiableTable modifiableTable = physical.unwrap( ModifiableTable.class ); + ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML Modify adjustedModify = modifiableTable.toModificationAlg( @@ -504,7 +504,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { ), catalogTable.name + "_" + entry.getKey() ); AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); - ModifiableTable modifiableTable = physical.unwrap( ModifiableTable.class ); + ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML Modify adjustedModify = modifiableTable.toModificationAlg( @@ -607,9 +607,9 @@ else if ( identifiedPartitionForSetValue != -1 ) { false, statement.getDataContext().getParameterValues() ).build(); - ModifiableTable modifiableTable = physical.unwrap( ModifiableTable.class ); + ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); - if ( modifiableTable != null && modifiableTable == physical.unwrap( Table.class ) ) { + if ( modifiableTable != null && modifiableTable == physical.unwrap( Entity.class ) ) { adjustedModify = modifiableTable.toModificationAlg( cluster, physical, @@ -1212,7 +1212,7 @@ private AlgNode switchContext( AlgNode node ) { private Modify getModify( AlgOptEntity table, AlgNode input, Statement statement, Operation operation, List updateList, List sourceList ) { - return table.unwrap( ModifiableTable.class ).toModificationAlg( input.getCluster(), table, statement.getTransaction().getCatalogReader(), input, operation, updateList, sourceList, true ); + return table.unwrap( ModifiableEntity.class ).toModificationAlg( input.getCluster(), table, statement.getTransaction().getCatalogReader(), input, operation, updateList, sourceList, true ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index 16d1be7b0e..ff795eb351 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -35,7 +35,7 @@ import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.Router; import org.polypheny.db.routing.factories.RouterFactory; -import org.polypheny.db.schema.LogicalTable; +import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; @@ -48,7 +48,7 @@ protected List handleHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, - LogicalTable logicalTable, + LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { @@ -81,7 +81,7 @@ protected List handleVerticalPartitioningOrReplication( AlgNode node, CatalogTable catalogTable, Statement statement, - LogicalTable logicalTable, + LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index 2b280888c1..ad2c6d6883 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -31,7 +31,7 @@ import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.routing.LogicalQueryInformation; -import org.polypheny.db.schema.LogicalTable; +import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Pair; @@ -41,14 +41,14 @@ public class IcarusRouter extends FullPlacementQueryRouter { @Override - protected List handleHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalTable logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { this.cancelQuery = true; return Collections.emptyList(); } @Override - protected List handleVerticalPartitioningOrReplication( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalTable logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleVerticalPartitioningOrReplication( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // same as no partitioning return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java index 2417c0c6a3..3c7a357bef 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java @@ -30,7 +30,7 @@ import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.Router; import org.polypheny.db.routing.factories.RouterFactory; -import org.polypheny.db.schema.LogicalTable; +import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; @@ -44,7 +44,7 @@ private SimpleRouter() { @Override - protected List handleVerticalPartitioningOrReplication( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalTable logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleVerticalPartitioningOrReplication( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // Do same as without any partitioning return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); } @@ -64,7 +64,7 @@ protected List handleNonePartitioning( AlgNode node, CatalogTa @Override - protected List handleHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalTable logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( catalogTable.partitionProperty.partitionType ); diff --git a/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java index 5a09a8b0b6..b93a475477 100644 --- a/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ b/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -54,7 +54,8 @@ import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.BuiltInMethod; @@ -90,8 +91,8 @@ public AbstractPolyphenyDbSchema getCurrent() { private synchronized AbstractPolyphenyDbSchema buildSchema() { - final Schema schema = new RootSchema(); - final AbstractPolyphenyDbSchema polyphenyDbSchema = new SimplePolyphenyDbSchema( null, schema, "", NamespaceType.RELATIONAL, false ); + final Namespace namespace = new RootSchema(); + final AbstractPolyphenyDbSchema polyphenyDbSchema = new SimplePolyphenyDbSchema( null, namespace, "", NamespaceType.RELATIONAL, false ); SchemaPlus rootSchema = polyphenyDbSchema.plus(); Catalog catalog = Catalog.getInstance(); @@ -123,10 +124,10 @@ private synchronized AbstractPolyphenyDbSchema buildSchema() { private void buildGraphLogical( AbstractPolyphenyDbSchema polyphenyDbSchema, SchemaPlus rootSchema, Catalog catalog, CatalogDatabase catalogDatabase ) { for ( CatalogGraphDatabase graph : catalog.getGraphs( catalogDatabase.id, null ) ) { - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractSchema(), graph.name, NamespaceType.GRAPH, graph.caseSensitive ).plus(); + SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractNamespace( graph.id ), graph.name, NamespaceType.GRAPH, graph.caseSensitive ).plus(); rootSchema.add( graph.name, s, NamespaceType.GRAPH ); - s.polyphenyDbSchema().setSchema( new LogicalGraph( graph.id ) ); + s.polyphenyDbSchema().setNamespace( new LogicalGraph( graph.id ) ); } } @@ -136,8 +137,8 @@ private void buildRelationalLogical( AbstractPolyphenyDbSchema polyphenyDbSchema if ( catalogSchema.namespaceType != NamespaceType.RELATIONAL ) { continue; } - Map tableMap = new HashMap<>(); - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractSchema(), catalogSchema.name, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); + Map tableMap = new HashMap<>(); + SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractNamespace( catalogSchema.id ), catalogSchema.name, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); for ( CatalogTable catalogTable : catalog.getTables( catalogSchema.id, null ) ) { List columnNames = new LinkedList<>(); @@ -165,11 +166,11 @@ private void buildRelationalLogical( AbstractPolyphenyDbSchema polyphenyDbSchema } rootSchema.add( catalogSchema.name, s, catalogSchema.namespaceType ); - tableMap.forEach( rootSchema.getSubSchema( catalogSchema.name )::add ); + tableMap.forEach( rootSchema.getSubNamespace( catalogSchema.name )::add ); if ( catalogDatabase.defaultNamespaceId != null && catalogSchema.id == catalogDatabase.defaultNamespaceId ) { tableMap.forEach( rootSchema::add ); } - s.polyphenyDbSchema().setSchema( new LogicalSchema( catalogSchema.name, tableMap, new HashMap<>() ) ); + s.polyphenyDbSchema().setNamespace( new LogicalSchema( catalogSchema.id, catalogSchema.name, tableMap, new HashMap<>() ) ); } } @@ -179,8 +180,8 @@ private void buildDocumentLogical( AbstractPolyphenyDbSchema polyphenyDbSchema, if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT ) { continue; } - Map collectionMap = new HashMap<>(); - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractSchema(), catalogSchema.name, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); + Map collectionMap = new HashMap<>(); + SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractNamespace( catalogSchema.id ), catalogSchema.name, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); for ( CatalogCollection catalogEntity : catalog.getCollections( catalogSchema.id, null ) ) { List columnNames = new LinkedList<>(); @@ -194,7 +195,7 @@ private void buildDocumentLogical( AbstractPolyphenyDbSchema polyphenyDbSchema, List columnIds = new LinkedList<>(); catalog.getColumns( catalogEntity.id ).forEach( c -> columnIds.add( c.id ) ); - LogicalTable entity; + LogicalEntity entity; if ( catalogEntity.entityType == EntityType.VIEW ) { entity = new LogicalRelView( catalogEntity.id, @@ -219,16 +220,16 @@ private void buildDocumentLogical( AbstractPolyphenyDbSchema polyphenyDbSchema, } rootSchema.add( catalogSchema.name, s, catalogSchema.namespaceType ); - collectionMap.forEach( rootSchema.getSubSchema( catalogSchema.name )::add ); + collectionMap.forEach( rootSchema.getSubNamespace( catalogSchema.name )::add ); if ( catalogDatabase.defaultNamespaceId != null && catalogSchema.id == catalogDatabase.defaultNamespaceId ) { collectionMap.forEach( rootSchema::add ); } - PolyphenyDbSchema schema = s.polyphenyDbSchema().getSubSchema( catalogSchema.name, catalogSchema.caseSensitive ); + PolyphenyDbSchema schema = s.polyphenyDbSchema().getSubNamespace( catalogSchema.name, catalogSchema.caseSensitive ); if ( schema != null ) { - LogicalSchema logicalSchema = new LogicalSchema( catalogSchema.name, ((LogicalSchema) schema.getSchema()).getTableMap(), collectionMap ); - s.polyphenyDbSchema().setSchema( logicalSchema ); + LogicalSchema logicalSchema = new LogicalSchema( catalogSchema.id, catalogSchema.name, ((LogicalSchema) schema.getNamespace()).getTableMap(), collectionMap ); + s.polyphenyDbSchema().setNamespace( logicalSchema ); } else { - s.polyphenyDbSchema().setSchema( new LogicalSchema( catalogSchema.name, new HashMap<>(), collectionMap ) ); + s.polyphenyDbSchema().setNamespace( new LogicalSchema( catalogSchema.id, catalogSchema.name, new HashMap<>(), collectionMap ) ); } } @@ -253,7 +254,7 @@ private void buildPhysicalGraphs( AbstractPolyphenyDbSchema polyphenyDbSchema, S SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, adapter.getCurrentGraphNamespace(), schemaName, NamespaceType.GRAPH, graph.caseSensitive ).plus(); rootSchema.add( schemaName, s, NamespaceType.GRAPH ); - rootSchema.getSubSchema( schemaName ).polyphenyDbSchema().setSchema( adapter.getCurrentGraphNamespace() ); + rootSchema.getSubNamespace( schemaName ).polyphenyDbSchema().setNamespace( adapter.getCurrentGraphNamespace() ); } } } @@ -280,11 +281,11 @@ private void buildPhysicalDocuments( AbstractPolyphenyDbSchema polyphenyDbSchema for ( String physicalSchemaName : documentIdsPerSchema.keySet() ) { Set collectionIds = documentIdsPerSchema.get( physicalSchemaName ); - HashMap physicalTables = new HashMap<>(); + HashMap physicalTables = new HashMap<>(); final String schemaName = buildAdapterSchemaName( catalogAdapter.uniqueName, catalogSchema.name, physicalSchemaName ); - adapter.createNewSchema( rootSchema, schemaName ); + adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, adapter.getCurrentSchema(), schemaName, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); for ( long collectionId : collectionIds ) { CatalogCollection catalogCollection = catalog.getCollection( collectionId ); @@ -294,13 +295,13 @@ private void buildPhysicalDocuments( AbstractPolyphenyDbSchema polyphenyDbSchema continue; } - Table table = adapter.createDocumentSchema( catalogCollection, partitionPlacement ); + Entity entity = adapter.createDocumentSchema( catalogCollection, partitionPlacement ); - physicalTables.put( catalog.getCollection( collectionId ).name + "_" + partitionPlacement.id, table ); + physicalTables.put( catalog.getCollection( collectionId ).name + "_" + partitionPlacement.id, entity ); rootSchema.add( schemaName, s, catalogSchema.namespaceType ); - physicalTables.forEach( rootSchema.getSubSchema( schemaName )::add ); - rootSchema.getSubSchema( schemaName ).polyphenyDbSchema().setSchema( adapter.getCurrentSchema() ); + physicalTables.forEach( rootSchema.getSubNamespace( schemaName )::add ); + rootSchema.getSubNamespace( schemaName ).polyphenyDbSchema().setNamespace( adapter.getCurrentSchema() ); } } } @@ -323,12 +324,12 @@ private void buildPhysicalTables( AbstractPolyphenyDbSchema polyphenyDbSchema, S for ( String physicalSchemaName : tableIdsPerSchema.keySet() ) { Set tableIds = tableIdsPerSchema.get( physicalSchemaName ); - HashMap physicalTables = new HashMap<>(); + HashMap physicalTables = new HashMap<>(); Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); final String schemaName = buildAdapterSchemaName( catalogAdapter.uniqueName, catalogSchema.name, physicalSchemaName ); - adapter.createNewSchema( rootSchema, schemaName ); + adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, adapter.getCurrentSchema(), schemaName, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); for ( long tableId : tableIds ) { CatalogTable catalogTable = catalog.getTable( tableId ); @@ -340,16 +341,16 @@ private void buildPhysicalTables( AbstractPolyphenyDbSchema polyphenyDbSchema, S continue; } - Table table = adapter.createTableSchema( + Entity entity = adapter.createTableSchema( catalogTable, Catalog.getInstance().getColumnPlacementsOnAdapterSortedByPhysicalPosition( adapter.getAdapterId(), catalogTable.id ), partitionPlacement ); - physicalTables.put( catalog.getTable( tableId ).name + "_" + partitionPlacement.partitionId, table ); + physicalTables.put( catalog.getTable( tableId ).name + "_" + partitionPlacement.partitionId, entity ); rootSchema.add( schemaName, s, catalogSchema.namespaceType ); - physicalTables.forEach( rootSchema.getSubSchema( schemaName )::add ); - rootSchema.getSubSchema( schemaName ).polyphenyDbSchema().setSchema( adapter.getCurrentSchema() ); + physicalTables.forEach( rootSchema.getSubNamespace( schemaName )::add ); + rootSchema.getSubNamespace( schemaName ).polyphenyDbSchema().setNamespace( adapter.getCurrentSchema() ); } } } @@ -358,7 +359,7 @@ private void buildPhysicalTables( AbstractPolyphenyDbSchema polyphenyDbSchema, S } - private void buildView( Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { + private void buildView( Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { LogicalRelView view = new LogicalRelView( catalogTable.id, catalogTable.getNamespaceName(), @@ -371,10 +372,10 @@ private void buildView( Map tableMap, SchemaPlus s, Catalo } - private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, AlgDataType rowType, List columnIds ) { - LogicalTable table; + private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, AlgDataType rowType, List columnIds ) { + LogicalEntity table; if ( catalogSchema.namespaceType == NamespaceType.RELATIONAL ) { - table = new LogicalTable( + table = new LogicalEntity( catalogTable.id, catalogTable.getNamespaceName(), catalogTable.name, @@ -426,10 +427,10 @@ public void propertyChange( PropertyChangeEvent evt ) { /** * Schema that has no parents. */ - private static class RootSchema extends AbstractSchema { + private static class RootSchema extends AbstractNamespace implements Schema { RootSchema() { - super(); + super( -1L ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index c4ddae971a..98b4b7cff1 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -44,7 +44,7 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.prepare.AlgOptEntityImpl; -import org.polypheny.db.schema.LogicalTable; +import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.transaction.EntityAccessMap.EntityIdentifier.NamespaceLevel; import org.polypheny.db.transaction.Lock.LockMode; @@ -257,7 +257,7 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { if ( p instanceof Modify ) { newAccess = Mode.WRITE_ACCESS; if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { - extractWriteConstraints( (LogicalTable) table.getTable() ); + extractWriteConstraints( (LogicalEntity) table.getTable() ); } } else { newAccess = Mode.READ_ACCESS; @@ -324,7 +324,7 @@ private void attachGraph( LpgAlg p ) { /** * Retrieves an access map for linked tables based on foreign key constraints */ - private void extractWriteConstraints( LogicalTable logicalTable ) { + private void extractWriteConstraints( LogicalEntity logicalTable ) { for ( long constraintTable : logicalTable.getConstraintIds() ) { for ( long constraintPartitionIds diff --git a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java index 770f44a9eb..65c5504def 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java @@ -34,10 +34,21 @@ package org.polypheny.db.misc; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Arrays; +import java.util.List; +import java.util.TreeSet; import lombok.extern.slf4j.Slf4j; import org.hamcrest.Matcher; import org.junit.AfterClass; @@ -82,18 +93,6 @@ import org.polypheny.db.util.Util; import org.polypheny.db.util.mapping.Mappings; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Arrays; -import java.util.List; -import java.util.TreeSet; - -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.fail; - /** * Unit test for {@link AlgBuilder}. @@ -154,7 +153,7 @@ private AlgBuilder createAlgBuilder() { final SchemaPlus rootSchema = transaction.getSchema().plus(); FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) - .defaultSchema( rootSchema.getSubSchema( transaction.getDefaultSchema().name ) ) + .defaultSchema( rootSchema.getSubNamespace( transaction.getDefaultSchema().name ) ) .traitDefs( (List) null ) .programs( Programs.heuristicJoinOrder( Programs.RULE_SET, true, 2 ) ) .prepareContext( new ContextImpl( diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index fddf7bc2ad..22f517d23b 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -77,6 +77,7 @@ import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; @@ -1213,7 +1214,7 @@ public > Object getTableStatistic( long schemaId, long t /** * This method returns the number of rows for a given table, which is used in - * {@link org.polypheny.db.schema.impl.AbstractTable#getStatistic()} to update the statistics. + * {@link AbstractEntity#getStatistic()} to update the statistics. * * @param tableId of the table * @return the number of rows of a given table diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailTable.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java similarity index 91% rename from plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailTable.java rename to plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java index b2525c36d9..3693589e5f 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailTable.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java @@ -16,7 +16,6 @@ package org.polypheny.db.adapter.cottontail; -import java.util.Collection; import java.util.List; import lombok.Getter; import org.apache.calcite.linq4j.Enumerator; @@ -24,7 +23,7 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.cottontail.algebra.CottontailScan; import org.polypheny.db.adapter.cottontail.enumberable.CottontailQueryEnumerable; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Modify; @@ -40,9 +39,9 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.vitrivr.cottontail.grpc.CottontailGrpc.EntityName; import org.vitrivr.cottontail.grpc.CottontailGrpc.From; @@ -53,7 +52,7 @@ import org.vitrivr.cottontail.grpc.CottontailGrpc.SchemaName; -public class CottontailTable extends AbstractQueryableTable implements TranslatableTable, ModifiableTable { +public class CottontailEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity { private AlgProtoDataType protoRowType; private CottontailSchema cottontailSchema; @@ -67,12 +66,10 @@ public class CottontailTable extends AbstractQueryableTable implements Translata private final String physicalTableName; private final List physicalColumnNames; - private final String logicalSchemaName; - private final String logicalTableName; private final List logicalColumnNames; - protected CottontailTable( + protected CottontailEntity( CottontailSchema cottontailSchema, String logicalSchemaName, String logicalTableName, @@ -87,13 +84,11 @@ protected CottontailTable( this.cottontailSchema = cottontailSchema; this.protoRowType = protoRowType; - this.logicalSchemaName = logicalSchemaName; - this.logicalTableName = logicalTableName; this.logicalColumnNames = logicalColumnNames; this.physicalSchemaName = physicalSchemaName; this.physicalTableName = physicalTableName; this.physicalColumnNames = physicalColumnNames; - this.tableId = tableId; + this.id = tableId; this.entity = EntityName.newBuilder() .setName( this.physicalTableName ) @@ -185,14 +180,14 @@ public CottontailConvention getUnderlyingConvention() { private class CottontailTableQueryable extends AbstractTableQueryable { public CottontailTableQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - super( dataContext, schema, CottontailTable.this, tableName ); + super( dataContext, schema, CottontailEntity.this, tableName ); } @Override public Enumerator enumerator() { final JavaTypeFactory typeFactory = dataContext.getTypeFactory(); - final CottontailTable cottontailTable = (CottontailTable) this.table; + final CottontailEntity cottontailTable = (CottontailEntity) this.table; final long txId = cottontailTable.cottontailSchema.getWrapper().beginOrContinue( this.dataContext.getStatement().getTransaction() ); final Query query = Query.newBuilder() .setFrom( From.newBuilder().setScan( Scan.newBuilder().setEntity( cottontailTable.entity ) ).build() ) diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java index bf38812ca8..e6cd075c80 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java @@ -52,9 +52,9 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; @@ -207,13 +207,13 @@ public CottontailStore( int storeId, String uniqueName, Map sett @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - this.currentSchema = CottontailSchema.create( rootSchema, name, this.wrapper, this ); + public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { + this.currentSchema = CottontailSchema.create( id, rootSchema, name, this.wrapper, this ); } @Override - public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List logicalColumnNames = new LinkedList<>(); @@ -244,7 +244,7 @@ public Table createTableSchema( CatalogTable combinedTable, List tableMap; + private final Map tableMap; private final Map physicalToLogicalTableNameMap; private final CottontailStore cottontailStore; @@ -54,12 +55,14 @@ public class CottontailSchema extends AbstractSchema { private CottontailSchema( + long id, @NonNull CottontailWrapper wrapper, CottontailConvention convention, - Map tableMap, + Map tableMap, Map physicalToLogicalTableNameMap, CottontailStore cottontailStore, String name ) { + super( id ); this.wrapper = wrapper; this.convention = convention; this.tableMap = tableMap; @@ -71,10 +74,12 @@ private CottontailSchema( public CottontailSchema( + long id, CottontailWrapper wrapper, CottontailConvention convention, CottontailStore cottontailStore, String name ) { + super( id ); this.wrapper = wrapper; this.convention = convention; this.cottontailStore = cottontailStore; @@ -86,6 +91,7 @@ public CottontailSchema( public static CottontailSchema create( + Long id, SchemaPlus parentSchema, String name, CottontailWrapper wrapper, @@ -93,7 +99,7 @@ public static CottontailSchema create( ) { final Expression expression = Schemas.subSchemaExpression( parentSchema, name, CottontailSchema.class ); final CottontailConvention convention = CottontailConvention.of( name, expression ); - return new CottontailSchema( wrapper, convention, cottontailStore, name ); + return new CottontailSchema( id, wrapper, convention, cottontailStore, name ); } @@ -109,8 +115,9 @@ public boolean isMutable() { @Override - public Schema snapshot( SchemaVersion version ) { + public Namespace snapshot( SchemaVersion version ) { return new CottontailSchema( + this.id, this.wrapper, this.convention, this.tableMap, @@ -127,7 +134,7 @@ public Expression getExpression( SchemaPlus parentSchema, String name ) { @Override - protected Map getTableMap() { + protected Map getTableMap() { return ImmutableMap.copyOf( this.tableMap ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java index 0e1fd76e81..97c12a4aba 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java @@ -19,7 +19,7 @@ import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.ParameterExpression; -import org.polypheny.db.adapter.cottontail.CottontailTable; +import org.polypheny.db.adapter.cottontail.CottontailEntity; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.plan.AlgOptEntity; @@ -50,7 +50,7 @@ class CottontailImplementContext { public String tableName; public AlgOptEntity table; - public CottontailTable cottontailTable; + public CottontailEntity cottontailTable; public Expression filterBuilder; diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java index d6458adc7e..fb2047313a 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java @@ -19,7 +19,7 @@ import java.util.List; import org.polypheny.db.adapter.cottontail.CottontailConvention; -import org.polypheny.db.adapter.cottontail.CottontailTable; +import org.polypheny.db.adapter.cottontail.CottontailEntity; import org.polypheny.db.adapter.cottontail.algebra.CottontailAlg.CottontailImplementContext.QueryType; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Scan; @@ -33,10 +33,10 @@ public class CottontailScan extends Scan implements CottontailAlg { - protected final CottontailTable cottontailTable; + protected final CottontailEntity cottontailTable; - public CottontailScan( AlgOptCluster cluster, AlgOptEntity table, CottontailTable cottontailTable, AlgTraitSet traitSet, CottontailConvention cottontailConvention ) { + public CottontailScan( AlgOptCluster cluster, AlgOptEntity table, CottontailEntity cottontailTable, AlgTraitSet traitSet, CottontailConvention cottontailConvention ) { super( cluster, traitSet.replace( cottontailConvention ), table ); this.cottontailTable = cottontailTable; } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java index 28fe9dc15d..0cac3ab142 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java @@ -27,7 +27,7 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.NewExpression; import org.apache.calcite.linq4j.tree.ParameterExpression; -import org.polypheny.db.adapter.cottontail.CottontailTable; +import org.polypheny.db.adapter.cottontail.CottontailEntity; import org.polypheny.db.adapter.cottontail.algebra.CottontailAlg.CottontailImplementContext.QueryType; import org.polypheny.db.adapter.cottontail.util.CottontailTypeUtil; import org.polypheny.db.algebra.AbstractAlgNode; @@ -52,7 +52,7 @@ public class CottontailTableModify extends Modify implements CottontailAlg { - public final CottontailTable cottontailTable; + public final CottontailEntity cottontailTable; /** @@ -84,7 +84,7 @@ public CottontailTableModify( List sourceExpressionList, boolean flattened ) { super( cluster, traitSet, table, catalogReader, input, operation, updateColumnList, sourceExpressionList, flattened ); - this.cottontailTable = table.unwrap( CottontailTable.class ); + this.cottontailTable = table.unwrap( CottontailEntity.class ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java index 0f65bfe62c..7cabc6b275 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java @@ -18,7 +18,7 @@ import org.polypheny.db.adapter.cottontail.CottontailConvention; -import org.polypheny.db.adapter.cottontail.CottontailTable; +import org.polypheny.db.adapter.cottontail.CottontailEntity; import org.polypheny.db.adapter.cottontail.algebra.CottontailTableModify; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.UnsupportedFromInsertShuttle; @@ -28,7 +28,7 @@ import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.tools.AlgBuilderFactory; @@ -47,11 +47,11 @@ private static boolean supports( Modify modify ) { @Override public boolean matches( AlgOptRuleCall call ) { final Modify modify = call.alg( 0 ); - if ( modify.getTable().unwrap( CottontailTable.class ) == null ) { + if ( modify.getTable().unwrap( CottontailEntity.class ) == null ) { return false; } - if ( !modify.getTable().unwrap( CottontailTable.class ).getUnderlyingConvention().equals( this.out ) ) { + if ( !modify.getTable().unwrap( CottontailEntity.class ).getUnderlyingConvention().equals( this.out ) ) { return false; } return modify.getOperation() != Operation.MERGE; @@ -62,12 +62,12 @@ public boolean matches( AlgOptRuleCall call ) { public AlgNode convert( AlgNode alg ) { final Modify modify = (Modify) alg; - final ModifiableTable modifiableTable = modify.getTable().unwrap( ModifiableTable.class ); + final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; } - if ( modify.getTable().unwrap( CottontailTable.class ) == null ) { + if ( modify.getTable().unwrap( CottontailEntity.class ) == null ) { return null; } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java index 6103896ea1..722db6fe2a 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java @@ -33,6 +33,8 @@ package org.polypheny.db.adapter.csv; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; @@ -43,19 +45,16 @@ import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.FilterableTable; +import org.polypheny.db.schema.FilterableEntity; import org.polypheny.db.util.Source; -import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; - /** * Table based on a CSV file that can implement simple filtering. * - * It implements the {@link FilterableTable} interface, so Polypheny-DB gets data by calling the {@link #scan(DataContext, List)} method. + * It implements the {@link FilterableEntity} interface, so Polypheny-DB gets data by calling the {@link #scan(DataContext, List)} method. */ -public class CsvFilterableTable extends CsvTable implements FilterableTable { +public class CsvFilterableTable extends CsvTable implements FilterableEntity { /** * Creates a CsvFilterableTable. diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java index 5dabbe2b5e..7f512de60b 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java @@ -33,25 +33,24 @@ package org.polypheny.db.adapter.csv; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.schema.ScannableTable; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.util.Source; -import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; - /** * Table based on a CSV file. *

    - * It implements the {@link ScannableTable} interface, so Polypheny-DB gets data by calling + * It implements the {@link ScannableEntity} interface, so Polypheny-DB gets data by calling * the {@link #scan(DataContext)} method. */ -public class CsvScannableTable extends CsvTable implements ScannableTable { +public class CsvScannableTable extends CsvTable implements ScannableEntity { /** * Creates a CsvScannableTable. diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index cc584cb412..65c35197d9 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -33,33 +33,41 @@ package org.polypheny.db.adapter.csv; -import org.polypheny.db.algebra.type.*; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeImpl; +import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.Source; import org.polypheny.db.util.Sources; import org.polypheny.db.util.Util; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.*; - /** * Schema mapped onto a directory of CSV files. Each table in the schema is a CSV file in that directory. */ -public class CsvSchema extends AbstractSchema { +public class CsvSchema extends AbstractNamespace implements Schema { private final URL directoryUrl; private final CsvTable.Flavor flavor; - private Map tableMap = new HashMap<>(); + private final Map tableMap = new HashMap<>(); /** @@ -68,14 +76,14 @@ public class CsvSchema extends AbstractSchema { * @param directoryUrl Directory that holds {@code .csv} files * @param flavor Whether to instantiate flavor tables that undergo query optimization */ - public CsvSchema( URL directoryUrl, CsvTable.Flavor flavor ) { - super(); + public CsvSchema( long id, URL directoryUrl, CsvTable.Flavor flavor ) { + super( id ); this.directoryUrl = directoryUrl; this.flavor = flavor; } - public Table createCsvTable( CatalogTable catalogTable, List columnPlacementsOnStore, CsvSource csvSource, CatalogPartitionPlacement partitionPlacement ) { + public Entity createCsvTable( CatalogTable catalogTable, List columnPlacementsOnStore, CsvSource csvSource, CatalogPartitionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List fieldTypes = new LinkedList<>(); @@ -106,7 +114,7 @@ public Table createCsvTable( CatalogTable catalogTable, List getTableMap() { + public Map getTableMap() { return new HashMap<>( tableMap ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index fc9f821aae..4c51e7cebc 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -47,9 +47,9 @@ import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Source; @@ -127,19 +127,19 @@ public T parseSetting( String key, Class clazz ) { @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - currentSchema = new CsvSchema( csvDir, Flavor.SCANNABLE ); + public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { + currentSchema = new CsvSchema( id, csvDir, Flavor.SCANNABLE ); } @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentSchema.createCsvTable( catalogTable, columnPlacementsOnStore, this, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentSchema; } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java index 0af8665fc9..0d4177cd9e 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java @@ -33,21 +33,21 @@ package org.polypheny.db.adapter.csv; +import java.util.ArrayList; +import java.util.List; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.Entity.Table; +import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.util.Source; -import java.util.ArrayList; -import java.util.List; - /** * Base class for table that reads CSV files. */ -public abstract class CsvTable extends AbstractTable { +public abstract class CsvTable extends AbstractEntity implements Table { protected final Source source; protected final AlgProtoDataType protoRowType; @@ -65,7 +65,7 @@ public abstract class CsvTable extends AbstractTable { this.fieldTypes = fieldTypes; this.fields = fields; this.csvSource = csvSource; - this.tableId = tableId; + this.id = tableId; } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java index 0b4fd3cd22..bd0af66996 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java @@ -47,17 +47,17 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.QueryableTable; +import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.util.Source; /** * Table based on a CSV file. */ -public class CsvTranslatableTable extends CsvTable implements QueryableTable, TranslatableTable { +public class CsvTranslatableTable extends CsvTable implements QueryableEntity, TranslatableEntity { /** * Creates a CsvTable. diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidConnectionImpl.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidConnectionImpl.java index 01b53798af..9dc992ca13 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidConnectionImpl.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidConnectionImpl.java @@ -556,7 +556,7 @@ void metadata( String dataSourceName, String timestampColumnName, List fieldBuilder.put( timestampColumnName, PolyType.TIMESTAMP_WITH_LOCAL_TIME_ZONE ); for ( JsonSegmentMetadata o : list ) { for ( Map.Entry entry : o.columns.entrySet() ) { - if ( entry.getKey().equals( DruidTable.DEFAULT_TIMESTAMP_COLUMN ) ) { + if ( entry.getKey().equals( DruidEntity.DEFAULT_TIMESTAMP_COLUMN ) ) { // timestamp column continue; } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidDateTimeUtils.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidDateTimeUtils.java index 517f8f5fe9..5f8f552b95 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidDateTimeUtils.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidDateTimeUtils.java @@ -95,14 +95,14 @@ public static List createInterval( RexNode e ) { protected static List toInterval( List> ranges ) { List intervals = Lists.transform( ranges, range -> { if ( !range.hasLowerBound() && !range.hasUpperBound() ) { - return DruidTable.DEFAULT_INTERVAL; + return DruidEntity.DEFAULT_INTERVAL; } long start = range.hasLowerBound() ? range.lowerEndpoint().longValue() - : DruidTable.DEFAULT_INTERVAL.getStartMillis(); + : DruidEntity.DEFAULT_INTERVAL.getStartMillis(); long end = range.hasUpperBound() ? range.upperEndpoint().longValue() - : DruidTable.DEFAULT_INTERVAL.getEndMillis(); + : DruidEntity.DEFAULT_INTERVAL.getEndMillis(); if ( range.hasLowerBound() && range.lowerBoundType() == BoundType.OPEN ) { start++; } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTable.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java similarity index 87% rename from plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTable.java rename to plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java index e160eeaa8a..1d682d3abc 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTable.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java @@ -63,10 +63,10 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ModelTraitDef; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.TranslatableTable; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.TranslatableEntity; +import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.sql.language.SqlSelectKeyword; @@ -76,7 +76,7 @@ /** * Table mapped onto a Druid table. */ -public class DruidTable extends AbstractTable implements TranslatableTable { +public class DruidEntity extends AbstractEntity implements TranslatableEntity { public static final String DEFAULT_TIMESTAMP_COLUMN = "__time"; public static final Interval DEFAULT_INTERVAL = new Interval( new DateTime( "1900-01-01", ISOChronology.getInstanceUTC() ), new DateTime( "3000-01-01", ISOChronology.getInstanceUTC() ) ); @@ -101,7 +101,7 @@ public class DruidTable extends AbstractTable implements TranslatableTable { * @param intervals Default interval if query does not constrain the time, or null * @param timestampFieldName Name of the column that contains the time */ - public DruidTable( DruidSchema schema, String dataSource, AlgProtoDataType protoRowType, Set metricFieldNames, String timestampFieldName, List intervals, Map> complexMetrics, Map allFields ) { + public DruidEntity( DruidSchema schema, String dataSource, AlgProtoDataType protoRowType, Set metricFieldNames, String timestampFieldName, List intervals, Map> complexMetrics, Map allFields ) { this.timestampFieldName = Objects.requireNonNull( timestampFieldName ); this.schema = Objects.requireNonNull( schema ); this.dataSource = Objects.requireNonNull( dataSource ); @@ -114,7 +114,7 @@ public DruidTable( DruidSchema schema, String dataSource, AlgProtoDataType proto /** - * Creates a {@link DruidTable} by using the given {@link DruidConnectionImpl} to populate the other parameters. The parameters may be partially populated. + * Creates a {@link DruidEntity} by using the given {@link DruidConnectionImpl} to populate the other parameters. The parameters may be partially populated. * * @param druidSchema Druid schema * @param dataSourceName Data source name in Druid, also table name @@ -126,17 +126,17 @@ public DruidTable( DruidSchema schema, String dataSource, AlgProtoDataType proto * @param complexMetrics List of complex metrics in Druid (thetaSketch, hyperUnique) * @return A table */ - static Table create( DruidSchema druidSchema, String dataSourceName, List intervals, Map fieldMap, Set metricNameSet, String timestampColumnName, DruidConnectionImpl connection, Map> complexMetrics ) { + static Entity create( DruidSchema druidSchema, String dataSourceName, List intervals, Map fieldMap, Set metricNameSet, String timestampColumnName, DruidConnectionImpl connection, Map> complexMetrics ) { assert connection != null; connection.metadata( dataSourceName, timestampColumnName, intervals, fieldMap, metricNameSet, complexMetrics ); - return DruidTable.create( druidSchema, dataSourceName, intervals, fieldMap, metricNameSet, timestampColumnName, complexMetrics ); + return DruidEntity.create( druidSchema, dataSourceName, intervals, fieldMap, metricNameSet, timestampColumnName, complexMetrics ); } /** - * Creates a {@link DruidTable} by copying the given parameters. + * Creates a {@link DruidEntity} by copying the given parameters. * * @param druidSchema Druid schema * @param dataSourceName Data source name in Druid, also table name @@ -147,9 +147,9 @@ static Table create( DruidSchema druidSchema, String dataSourceName, List intervals, Map fieldMap, Set metricNameSet, String timestampColumnName, Map> complexMetrics ) { + static Entity create( DruidSchema druidSchema, String dataSourceName, List intervals, Map fieldMap, Set metricNameSet, String timestampColumnName, Map> complexMetrics ) { final ImmutableMap fields = ImmutableMap.copyOf( fieldMap ); - return new DruidTable( druidSchema, dataSourceName, new MapRelProtoDataType( fields, timestampColumnName ), ImmutableSet.copyOf( metricNameSet ), timestampColumnName, intervals, complexMetrics, fieldMap ); + return new DruidEntity( druidSchema, dataSourceName, new MapRelProtoDataType( fields, timestampColumnName ), ImmutableSet.copyOf( metricNameSet ), timestampColumnName, intervals, complexMetrics, fieldMap ); } @@ -265,7 +265,7 @@ private static class MapRelProtoDataType implements AlgProtoDataType { MapRelProtoDataType( ImmutableMap fields ) { this.fields = fields; - this.timestampColumn = DruidTable.DEFAULT_TIMESTAMP_COLUMN; + this.timestampColumn = DruidEntity.DEFAULT_TIMESTAMP_COLUMN; } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidExpressions.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidExpressions.java index 91559bc571..4dbba9f94f 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidExpressions.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidExpressions.java @@ -126,7 +126,7 @@ public static String toDruidExpression( final RexNode rexNode, final AlgDataType return null; } if ( druidRel.getDruidTable().timestampFieldName.equals( columnName ) ) { - return DruidExpressions.fromColumn( DruidTable.DEFAULT_TIMESTAMP_COLUMN ); + return DruidExpressions.fromColumn( DruidEntity.DEFAULT_TIMESTAMP_COLUMN ); } return DruidExpressions.fromColumn( columnName ); } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java index 890e7f9051..f74ecf367e 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java @@ -99,7 +99,7 @@ import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; import org.polypheny.db.runtime.Hook; -import org.polypheny.db.schema.ScannableTable; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFamily; import org.polypheny.db.util.ImmutableBitSet; @@ -167,7 +167,7 @@ public class DruidQuery extends AbstractAlgNode implements BindableAlg { protected QuerySpec querySpec; final AlgOptEntity table; - final DruidTable druidTable; + final DruidEntity druidTable; final ImmutableList intervals; final ImmutableList algs; /** @@ -193,7 +193,7 @@ public class DruidQuery extends AbstractAlgNode implements BindableAlg { * @param algs Internal relational expressions * @param converterOperatorMap mapping of Polypheny-DB Sql Operator to Druid Expression API. */ - protected DruidQuery( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidTable druidTable, List intervals, List algs, Map converterOperatorMap ) { + protected DruidQuery( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidEntity druidTable, List intervals, List algs, Map converterOperatorMap ) { super( cluster, traitSet ); this.table = table; this.druidTable = druidTable; @@ -215,7 +215,7 @@ static boolean isValidSignature( String signature ) { /** * Creates a DruidQuery. */ - public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidTable druidTable, List algs ) { + public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidEntity druidTable, List algs ) { final ImmutableMap.Builder mapBuilder = ImmutableMap.builder(); for ( DruidSqlOperatorConverter converter : DEFAULT_OPERATORS_LIST ) { mapBuilder.put( converter.polyphenyDbOperator(), converter ); @@ -227,7 +227,7 @@ public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, Al /** * Creates a DruidQuery. */ - public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidTable druidTable, List algs, Map converterOperatorMap ) { + public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidEntity druidTable, List algs, Map converterOperatorMap ) { return create( cluster, traitSet, table, druidTable, druidTable.intervals, algs, converterOperatorMap ); } @@ -235,7 +235,7 @@ public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, Al /** * Creates a DruidQuery. */ - private static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidTable druidTable, List intervals, List algs, Map converterOperatorMap ) { + private static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidEntity druidTable, List intervals, List algs, Map converterOperatorMap ) { return new DruidQuery( cluster, traitSet, table, druidTable, intervals, algs, converterOperatorMap ); } @@ -396,7 +396,7 @@ protected static String extractColumnName( RexNode rexNode, AlgDataType rowType, } // Polypheny-DB has this un-direct renaming of timestampFieldName to native druid `__time` if ( query.getDruidTable().timestampFieldName.equals( columnName ) ) { - return DruidTable.DEFAULT_TIMESTAMP_COLUMN; + return DruidEntity.DEFAULT_TIMESTAMP_COLUMN; } return columnName; } @@ -530,7 +530,7 @@ public AlgOptEntity getTable() { } - public DruidTable getDruidTable() { + public DruidEntity getDruidTable() { return druidTable; } @@ -638,7 +638,7 @@ public Class getElementType() { @Override public Enumerable bind( DataContext dataContext ) { - return table.unwrap( ScannableTable.class ).scan( dataContext ); + return table.unwrap( ScannableEntity.class ).scan( dataContext ); } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidSchema.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidSchema.java index 918a00f4cb..47c646ffb2 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidSchema.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidSchema.java @@ -46,20 +46,21 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; /** * Schema mapped onto a Druid instance. */ -public class DruidSchema extends AbstractSchema { +public class DruidSchema extends AbstractNamespace implements Schema { final String url; final String coordinatorUrl; private final boolean discoverTables; - private Map tableMap = null; + private Map tableMap = null; /** @@ -69,7 +70,8 @@ public class DruidSchema extends AbstractSchema { * @param coordinatorUrl URL of coordinator REST service, e.g. "http://localhost:8081" * @param discoverTables If true, ask Druid what tables exist; if false, only create tables explicitly in the model */ - public DruidSchema( String url, String coordinatorUrl, boolean discoverTables ) { + public DruidSchema( long id, String url, String coordinatorUrl, boolean discoverTables ) { + super( id ); this.url = Objects.requireNonNull( url ); this.coordinatorUrl = Objects.requireNonNull( coordinatorUrl ); this.discoverTables = discoverTables; @@ -77,7 +79,7 @@ public DruidSchema( String url, String coordinatorUrl, boolean discoverTables ) @Override - protected Map getTableMap() { + protected Map getTableMap() { if ( !discoverTables ) { return ImmutableMap.of(); } @@ -93,14 +95,14 @@ protected Map getTableMap() { } - private Table table( String tableName, DruidConnectionImpl connection ) { + private Entity table( String tableName, DruidConnectionImpl connection ) { final Map fieldMap = new LinkedHashMap<>(); final Set metricNameSet = new LinkedHashSet<>(); final Map> complexMetrics = new HashMap<>(); - connection.metadata( tableName, DruidTable.DEFAULT_TIMESTAMP_COLUMN, null, fieldMap, metricNameSet, complexMetrics ); + connection.metadata( tableName, DruidEntity.DEFAULT_TIMESTAMP_COLUMN, null, fieldMap, metricNameSet, complexMetrics ); - return DruidTable.create( DruidSchema.this, tableName, null, fieldMap, metricNameSet, DruidTable.DEFAULT_TIMESTAMP_COLUMN, complexMetrics ); + return DruidEntity.create( DruidSchema.this, tableName, null, fieldMap, metricNameSet, DruidEntity.DEFAULT_TIMESTAMP_COLUMN, complexMetrics ); } } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTableFactory.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTableFactory.java index 405df96e52..11871e0637 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTableFactory.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidTableFactory.java @@ -45,8 +45,8 @@ import org.joda.time.Interval; import org.joda.time.chrono.ISOChronology; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; import org.polypheny.db.schema.TableFactory; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Util; @@ -69,7 +69,7 @@ private DruidTableFactory() { // name that is also the same name as a complex metric @Override - public Table create( SchemaPlus schema, String name, Map operand, AlgDataType rowType ) { + public Entity create( SchemaPlus schema, String name, Map operand, AlgDataType rowType ) { final DruidSchema druidSchema = schema.unwrap( DruidSchema.class ); // If "dataSource" operand is present it overrides the table name. final String dataSource = (String) operand.get( "dataSource" ); @@ -99,7 +99,7 @@ public Table create( SchemaPlus schema, String name, Map operand, AlgDataType ro timestampColumnType = PolyType.TIMESTAMP_WITH_LOCAL_TIME_ZONE; } } else { - timestampColumnName = DruidTable.DEFAULT_TIMESTAMP_COLUMN; + timestampColumnName = DruidEntity.DEFAULT_TIMESTAMP_COLUMN; timestampColumnType = PolyType.TIMESTAMP_WITH_LOCAL_TIME_ZONE; } fieldBuilder.put( timestampColumnName, timestampColumnType ); @@ -174,9 +174,9 @@ public Table create( SchemaPlus schema, String name, Map operand, AlgDataType ro if ( dimensionsRaw == null || metricsRaw == null ) { DruidConnectionImpl connection = new DruidConnectionImpl( druidSchema.url, druidSchema.url.replace( ":8082", ":8081" ) ); - return DruidTable.create( druidSchema, dataSourceName, intervals, fieldBuilder, metricNameBuilder, timestampColumnName, connection, complexMetrics ); + return DruidEntity.create( druidSchema, dataSourceName, intervals, fieldBuilder, metricNameBuilder, timestampColumnName, connection, complexMetrics ); } else { - return DruidTable.create( druidSchema, dataSourceName, intervals, fieldBuilder, metricNameBuilder, timestampColumnName, complexMetrics ); + return DruidEntity.create( druidSchema, dataSourceName, intervals, fieldBuilder, metricNameBuilder, timestampColumnName, complexMetrics ); } } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/ExtractionDimensionSpec.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/ExtractionDimensionSpec.java index 64627f17f9..73610a3f63 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/ExtractionDimensionSpec.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/ExtractionDimensionSpec.java @@ -110,7 +110,7 @@ public void write( JsonGenerator generator ) throws IOException { */ @Nullable public static Granularity toQueryGranularity( DimensionSpec dimensionSpec ) { - if ( !DruidTable.DEFAULT_TIMESTAMP_COLUMN.equals( dimensionSpec.getDimension() ) ) { + if ( !DruidEntity.DEFAULT_TIMESTAMP_COLUMN.equals( dimensionSpec.getDimension() ) ) { // Only __time column can be substituted by granularity return null; } diff --git a/plugins/druid-adapter/src/test/java/org/polypheny/db/adapter/druid/DruidQueryFilterTest.java b/plugins/druid-adapter/src/test/java/org/polypheny/db/adapter/druid/DruidQueryFilterTest.java index 8e3f49691c..c5f1157200 100644 --- a/plugins/druid-adapter/src/test/java/org/polypheny/db/adapter/druid/DruidQueryFilterTest.java +++ b/plugins/druid-adapter/src/test/java/org/polypheny/db/adapter/druid/DruidQueryFilterTest.java @@ -75,7 +75,7 @@ public void testSetup() { final PolyphenyDbConnectionConfig connectionConfigMock = Mockito.mock( PolyphenyDbConnectionConfig.class ); Mockito.when( connectionConfigMock.timeZone() ).thenReturn( "UTC" ); Mockito.when( druidQuery.getConnectionConfig() ).thenReturn( connectionConfigMock ); - Mockito.when( druidQuery.getDruidTable() ).thenReturn( new DruidTable( Mockito.mock( DruidSchema.class ), "dataSource", null, ImmutableSet.of(), "timestamp", null, null, null ) ); + Mockito.when( druidQuery.getDruidTable() ).thenReturn( new DruidEntity( Mockito.mock( DruidSchema.class ), "dataSource", null, ImmutableSet.of(), "timestamp", null, null, null ) ); } @@ -122,7 +122,7 @@ static class Fixture { final JavaTypeFactoryImpl typeFactory = new JavaTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final RexBuilder rexBuilder = new RexBuilder( typeFactory ); - final DruidTable druidTable = new DruidTable( Mockito.mock( DruidSchema.class ), "dataSource", null, ImmutableSet.of(), "timestamp", null, null, null ); + final DruidEntity druidTable = new DruidEntity( Mockito.mock( DruidSchema.class ), "dataSource", null, ImmutableSet.of(), "timestamp", null, null, null ); final AlgDataType varcharType = typeFactory.createPolyType( PolyType.VARCHAR ); final AlgDataType varcharRowType = typeFactory.builder().add( "dimensionName", null, varcharType ).build(); diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchTable.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java similarity index 97% rename from plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchTable.java rename to plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java index 9e48221a50..802e55dde9 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchTable.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java @@ -59,7 +59,7 @@ import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.function.Function1; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; @@ -70,7 +70,7 @@ import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.ModelTraitDef; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.type.PolyType; @@ -78,7 +78,7 @@ /** * Table based on an Elasticsearch type. */ -public class ElasticsearchTable extends AbstractQueryableTable implements TranslatableTable { +public class ElasticsearchEntity extends AbstractQueryableEntity implements TranslatableEntity { /** * Used for constructing (possibly nested) Elastic aggregation nodes. @@ -95,7 +95,7 @@ public class ElasticsearchTable extends AbstractQueryableTable implements Transl /** * Creates an ElasticsearchTable. */ - ElasticsearchTable( ElasticsearchTransport transport ) { + ElasticsearchEntity( ElasticsearchTransport transport ) { super( Object[].class ); this.transport = Objects.requireNonNull( transport, "transport" ); this.version = transport.version; @@ -342,13 +342,13 @@ public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitS /** - * Implementation of {@link Queryable} based on a {@link ElasticsearchTable}. + * Implementation of {@link Queryable} based on a {@link ElasticsearchEntity}. * * @param element type */ public static class ElasticsearchQueryable extends AbstractTableQueryable { - ElasticsearchQueryable( DataContext dataContext, SchemaPlus schema, ElasticsearchTable table, String tableName ) { + ElasticsearchQueryable( DataContext dataContext, SchemaPlus schema, ElasticsearchEntity table, String tableName ) { super( dataContext, schema, table, tableName ); } @@ -359,8 +359,8 @@ public Enumerator enumerator() { } - private ElasticsearchTable getTable() { - return (ElasticsearchTable) table; + private ElasticsearchEntity getTable() { + return (ElasticsearchEntity) table; } diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchMethod.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchMethod.java index 2c00586aeb..55b267f47d 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchMethod.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchMethod.java @@ -46,7 +46,7 @@ */ enum ElasticsearchMethod { - ELASTICSEARCH_QUERYABLE_FIND( ElasticsearchTable.ElasticsearchQueryable.class, + ELASTICSEARCH_QUERYABLE_FIND( ElasticsearchEntity.ElasticsearchQueryable.class, "find", List.class, // ops - projections and other stuff List.class, // fields diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java index f95615f13e..ed44e80bd0 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java @@ -108,7 +108,7 @@ class Implementor { Long fetch; AlgOptEntity table; - ElasticsearchTable elasticsearchTable; + ElasticsearchEntity elasticsearchTable; void add( String findOp ) { diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java index 682a41603e..97b6cb1752 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java @@ -57,7 +57,7 @@ */ public class ElasticsearchScan extends Scan implements ElasticsearchRel { - private final ElasticsearchTable elasticsearchTable; + private final ElasticsearchEntity elasticsearchTable; private final AlgDataType projectRowType; @@ -70,7 +70,7 @@ public class ElasticsearchScan extends Scan implements ElasticsearchRel { * @param elasticsearchTable Elasticsearch table * @param projectRowType Fields and types to project; null to project raw row */ - ElasticsearchScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, ElasticsearchTable elasticsearchTable, AlgDataType projectRowType ) { + ElasticsearchScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, ElasticsearchEntity elasticsearchTable, AlgDataType projectRowType ) { super( cluster, traitSet, table ); this.elasticsearchTable = Objects.requireNonNull( elasticsearchTable, "elasticsearchTable" ); this.projectRowType = projectRowType; diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java index 573dbc21cd..dcdce77455 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java @@ -50,8 +50,9 @@ import java.util.Set; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; /** @@ -59,7 +60,7 @@ * * Each table in the schema is an ELASTICSEARCH type in that index. */ -public class ElasticsearchSchema extends AbstractSchema { +public class ElasticsearchSchema extends AbstractNamespace implements Schema { private final String index; @@ -67,7 +68,7 @@ public class ElasticsearchSchema extends AbstractSchema { private final ObjectMapper mapper; - private final Map tableMap; + private final Map tableMap; /** * Default batch size to be used during scrolling. @@ -82,19 +83,19 @@ public class ElasticsearchSchema extends AbstractSchema { * @param mapper mapper for JSON (de)serialization * @param index name of ES index */ - public ElasticsearchSchema( RestClient client, ObjectMapper mapper, String index ) { - this( client, mapper, index, null ); + public ElasticsearchSchema( long id, RestClient client, ObjectMapper mapper, String index ) { + this( id, client, mapper, index, null ); } - public ElasticsearchSchema( RestClient client, ObjectMapper mapper, String index, String type ) { - this( client, mapper, index, type, ElasticsearchTransport.DEFAULT_FETCH_SIZE ); + public ElasticsearchSchema( long id, RestClient client, ObjectMapper mapper, String index, String type ) { + this( id, client, mapper, index, type, ElasticsearchTransport.DEFAULT_FETCH_SIZE ); } @VisibleForTesting - ElasticsearchSchema( RestClient client, ObjectMapper mapper, String index, String type, int fetchSize ) { - super(); + ElasticsearchSchema( long id, RestClient client, ObjectMapper mapper, String index, String type, int fetchSize ) { + super( id ); this.client = Objects.requireNonNull( client, "client" ); this.mapper = Objects.requireNonNull( mapper, "mapper" ); this.index = Objects.requireNonNull( index, "index" ); @@ -114,16 +115,16 @@ public ElasticsearchSchema( RestClient client, ObjectMapper mapper, String index @Override - protected Map getTableMap() { + protected Map getTableMap() { return tableMap; } - private Map createTables( Iterable types ) { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + private Map createTables( Iterable types ) { + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( String type : types ) { final ElasticsearchTransport transport = new ElasticsearchTransport( client, mapper, index, type, fetchSize ); - builder.put( type, new ElasticsearchTable( transport ) ); + builder.put( type, new ElasticsearchEntity( transport ) ); } return builder.build(); } diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchToEnumerableConverter.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchToEnumerableConverter.java index d5b2e83efa..b4d597d602 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchToEnumerableConverter.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchToEnumerableConverter.java @@ -107,7 +107,7 @@ public int size() { } } ), Pair.class ) ); - final Expression table = block.append( "table", implementor.table.getExpression( ElasticsearchTable.ElasticsearchQueryable.class ) ); + final Expression table = block.append( "table", implementor.table.getExpression( ElasticsearchEntity.ElasticsearchQueryable.class ) ); final Expression ops = block.append( "ops", Expressions.constant( implementor.list ) ); final Expression sort = block.append( "sort", constantArrayList( implementor.sort, Pair.class ) ); final Expression groupBy = block.append( "groupBy", Expressions.constant( implementor.groupBy ) ); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileAlg.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileAlg.java index 8cccb3551b..a928ca560e 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileAlg.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileAlg.java @@ -43,7 +43,7 @@ public enum Operation { @Getter - private transient FileTranslatableTable fileTable; + private transient FileTranslatableEntity fileTable; @Getter private final List columnNames = new ArrayList<>(); private final List project = new ArrayList<>(); @@ -69,7 +69,7 @@ public FileImplementor() { } - public void setFileTable( final FileTranslatableTable fileTable ) { + public void setFileTable( final FileTranslatableEntity fileTable ) { this.fileTable = fileTable; this.columnNames.clear(); this.columnNames.addAll( fileTable.getColumnNames() ); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java index 85111244df..2d1773d1a6 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java @@ -60,9 +60,9 @@ import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.PolyphenyHomeDirManager; @@ -174,22 +174,22 @@ private void setInformationPage() { @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { + public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { // it might be worth it to check why createNewSchema is called multiple times with different names if ( currentSchema == null ) { - currentSchema = new FileStoreSchema( rootSchema, name, this ); + currentSchema = new FileStoreSchema( id, rootSchema, name, this ); } } @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentSchema.createFileTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentSchema; } @@ -454,7 +454,7 @@ private void cleanupHardlinks( final PolyXid xid ) { public void truncate( Context context, CatalogTable table ) { //context.getStatement().getTransaction().registerInvolvedStore( this ); for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( getAdapterId(), table.id ) ) { - FileTranslatableTable fileTable = (FileTranslatableTable) currentSchema.getTable( table.name + "_" + partitionPlacement.partitionId ); + FileTranslatableEntity fileTable = (FileTranslatableEntity) currentSchema.getEntity( table.name + "_" + partitionPlacement.partitionId ); try { for ( String colName : fileTable.getColumnNames() ) { File columnFolder = getColumnFolder( fileTable.getColumnIdMap().get( colName ), fileTable.getPartitionId() ); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java index c92e50263a..c17a822b8c 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java @@ -42,27 +42,28 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; -public class FileStoreSchema extends AbstractSchema implements FileSchema { +public class FileStoreSchema extends AbstractNamespace implements FileSchema, Schema { @Getter private final String schemaName; - private final Map tableMap = new HashMap<>(); + private final Map tableMap = new HashMap<>(); @Getter private final FileStore store; @Getter private final FileConvention convention; - public FileStoreSchema( SchemaPlus parentSchema, String schemaName, FileStore store ) { - super(); + public FileStoreSchema( long id, SchemaPlus parentSchema, String schemaName, FileStore store ) { + super( id ); this.schemaName = schemaName; this.store = store; final Expression expression = Schemas.subSchemaExpression( parentSchema, schemaName, FileStoreSchema.class ); @@ -83,12 +84,12 @@ public int getAdapterId() { @Override - protected Map getTableMap() { + protected Map getTableMap() { return new HashMap<>( tableMap ); } - public Table createFileTable( + public Entity createFileTable( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { @@ -128,7 +129,7 @@ public Table createFileTable( pkIds = new ArrayList<>(); } // FileTable table = new FileTable( store.getRootDir(), schemaName, catalogEntity.id, columnIds, columnTypes, columnNames, store, this ); - FileTranslatableTable table = new FileTranslatableTable( + FileTranslatableEntity table = new FileTranslatableEntity( this, catalogTable.name + "_" + partitionPlacement.partitionId, catalogTable.id, diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableTable.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java similarity index 91% rename from plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableTable.java rename to plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java index 8424ff6078..04d7e332f1 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableTable.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java @@ -19,7 +19,6 @@ import java.io.File; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -28,7 +27,7 @@ import org.apache.calcite.linq4j.Queryable; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.file.algebra.FileScan; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.core.Modify.Operation; @@ -43,14 +42,14 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.type.PolyType; -public class FileTranslatableTable extends AbstractQueryableTable implements TranslatableTable, ModifiableTable { +public class FileTranslatableEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity { private final File rootDir; @Getter @@ -72,7 +71,7 @@ public class FileTranslatableTable extends AbstractQueryableTable implements Tra private final AlgProtoDataType protoRowType; - public FileTranslatableTable( + public FileTranslatableEntity( final FileSchema fileSchema, final String tableName, final Long tableId, @@ -86,7 +85,7 @@ public FileTranslatableTable( this.fileSchema = fileSchema; this.rootDir = fileSchema.getRootDir(); this.tableName = tableName; - this.tableId = tableId; + this.id = tableId; this.partitionId = partitionId; this.adapterId = fileSchema.getAdapterId(); this.pkIds = pkIds; @@ -159,8 +158,8 @@ public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, public class FileQueryable extends AbstractTableQueryable { - public FileQueryable( DataContext dataContext, SchemaPlus schema, FileTranslatableTable table, String tableName ) { - super( dataContext, schema, FileTranslatableTable.this, tableName ); + public FileQueryable( DataContext dataContext, SchemaPlus schema, FileTranslatableEntity table, String tableName ) { + super( dataContext, schema, FileTranslatableEntity.this, tableName ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java index 11d3c9705b..e607b749de 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java @@ -26,7 +26,7 @@ import org.polypheny.db.adapter.enumerable.EnumerableConvention; import org.polypheny.db.adapter.file.FileConvention; import org.polypheny.db.adapter.file.FileSchema; -import org.polypheny.db.adapter.file.FileTranslatableTable; +import org.polypheny.db.adapter.file.FileTranslatableEntity; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.UnsupportedFromInsertShuttle; import org.polypheny.db.algebra.convert.ConverterRule; @@ -47,7 +47,7 @@ import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexVisitorImpl; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.document.DocumentRules; import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.util.UnsupportedRexCallVisitor; @@ -90,7 +90,7 @@ private static boolean supports( Modify node ) { @Override public boolean matches( AlgOptRuleCall call ) { final Modify modify = call.alg( 0 ); - if ( modify.getTable().unwrap( FileTranslatableTable.class ) == null ) { + if ( modify.getTable().unwrap( FileTranslatableEntity.class ) == null ) { // todo insert from select is not correctly implemented return false; } @@ -99,7 +99,7 @@ public boolean matches( AlgOptRuleCall call ) { return false; } - FileTranslatableTable table = modify.getTable().unwrap( FileTranslatableTable.class ); + FileTranslatableEntity table = modify.getTable().unwrap( FileTranslatableEntity.class ); convention.setModification( true ); return true; } @@ -108,7 +108,7 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { final Modify modify = (Modify) alg; - final ModifiableTable modifiableTable = modify.getTable().unwrap( ModifiableTable.class ); + final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { log.warn( "Returning null during conversion" ); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java index c912d8fd06..b9ed11aa05 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java @@ -20,7 +20,7 @@ import java.util.List; import org.polypheny.db.adapter.file.FileAlg; import org.polypheny.db.adapter.file.FileAlg.FileImplementor.Operation; -import org.polypheny.db.adapter.file.FileTranslatableTable; +import org.polypheny.db.adapter.file.FileTranslatableEntity; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; @@ -35,10 +35,10 @@ public class FileScan extends Scan implements FileAlg { - private final FileTranslatableTable fileTable; + private final FileTranslatableEntity fileTable; - public FileScan( AlgOptCluster cluster, AlgOptEntity table, FileTranslatableTable fileTable ) { + public FileScan( AlgOptCluster cluster, AlgOptEntity table, FileTranslatableEntity fileTable ) { //convention was: EnumerableConvention.INSTANCE super( cluster, cluster.traitSetOf( fileTable.getFileSchema().getConvention() ).replace( ModelTrait.RELATIONAL ), table ); this.fileTable = fileTable; diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java index 926115435c..63f8b6b629 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java @@ -20,7 +20,7 @@ import java.util.ArrayList; import java.util.List; import org.polypheny.db.adapter.file.FileAlg; -import org.polypheny.db.adapter.file.FileTranslatableTable; +import org.polypheny.db.adapter.file.FileTranslatableEntity; import org.polypheny.db.adapter.file.Value; import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; @@ -77,7 +77,7 @@ public void register( AlgOptPlanner planner ) { public void implement( final FileImplementor implementor ) { setOperation( implementor );//do it first, so children know that we have an insert/update/delete implementor.visitChild( 0, getInput() ); - FileTranslatableTable fileTable = (FileTranslatableTable) getTable().getTable(); + FileTranslatableEntity fileTable = (FileTranslatableEntity) getTable().getTable(); implementor.setFileTable( fileTable ); if ( getOperation() == Operation.UPDATE ) { if ( getSourceExpressionList() != null ) { diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java index 0f0f449820..b181364487 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java @@ -44,9 +44,9 @@ import org.polypheny.db.information.InformationTable; import org.polypheny.db.information.InformationText; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.PolyphenyHomeDirManager; @@ -84,19 +84,19 @@ private void init( final Map settings ) { @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - currentSchema = new QfsSchema( rootSchema, name, this ); + public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { + currentSchema = new QfsSchema( id, rootSchema, name, this ); } @Override - public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentSchema.createFileTable( combinedTable, columnPlacementsOnStore, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentSchema; } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java index 7a05c86657..6ebec61d37 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java @@ -34,7 +34,7 @@ import org.polypheny.db.adapter.file.FileAlg.FileImplementor.Operation; import org.polypheny.db.adapter.file.FileConvention; import org.polypheny.db.adapter.file.FileSchema; -import org.polypheny.db.adapter.file.FileTranslatableTable; +import org.polypheny.db.adapter.file.FileTranslatableEntity; import org.polypheny.db.adapter.file.Value; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeImpl; @@ -46,27 +46,28 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; -public class QfsSchema extends AbstractSchema implements FileSchema { +public class QfsSchema extends AbstractNamespace implements FileSchema, Schema { @Getter private final String schemaName; - private final Map tableMap = new HashMap<>(); + private final Map tableMap = new HashMap<>(); @Getter private final Qfs source; @Getter private final FileConvention convention; - public QfsSchema( SchemaPlus parentSchema, String schemaName, Qfs source ) { - super(); + public QfsSchema( long id, SchemaPlus parentSchema, String schemaName, Qfs source ) { + super( id ); this.schemaName = schemaName; this.source = source; final Expression expression = Schemas.subSchemaExpression( parentSchema, schemaName, QfsSchema.class ); @@ -87,12 +88,12 @@ public int getAdapterId() { @Override - protected Map getTableMap() { + protected Map getTableMap() { return new HashMap<>( tableMap ); } - public Table createFileTable( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createFileTable( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); ArrayList columnIds = new ArrayList<>(); @@ -131,7 +132,7 @@ public Table createFileTable( CatalogTable catalogTable, List(); } - FileTranslatableTable table = new FileTranslatableTable( + FileTranslatableEntity table = new FileTranslatableEntity( this, catalogTable.name + "_" + partitionPlacement.partitionId, catalogTable.id, diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java index 3a97af7801..d8b7613466 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java @@ -75,7 +75,7 @@ class GeodeImplementContext { Long limitValue; AlgOptEntity table; - GeodeTable geodeTable; + GeodeEntity geodeTable; /** diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeTable.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java similarity index 96% rename from plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeTable.java rename to plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java index 9fab17f7e8..e78f46381e 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeTable.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java @@ -52,7 +52,7 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.geode.util.GeodeUtils; import org.polypheny.db.adapter.geode.util.JavaTypeFactoryExtImpl; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -64,7 +64,7 @@ import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.runtime.Hook; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Util; @@ -74,13 +74,13 @@ * Table based on a Geode Region */ @Slf4j -public class GeodeTable extends AbstractQueryableTable implements TranslatableTable { +public class GeodeEntity extends AbstractQueryableEntity implements TranslatableEntity { private final String regionName; private final AlgDataType rowType; - GeodeTable( Region region ) { + GeodeEntity( Region region ) { super( Object[].class ); this.regionName = region.getName(); this.rowType = GeodeUtils.autodetectRelTypeFromRegion( region ); @@ -234,13 +234,13 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { /** - * Implementation of {@link Queryable} based on a {@link GeodeTable}. + * Implementation of {@link Queryable} based on a {@link GeodeEntity}. * * @param type */ public static class GeodeQueryable extends AbstractTableQueryable { - public GeodeQueryable( DataContext dataContext, SchemaPlus schema, GeodeTable table, String tableName ) { + public GeodeQueryable( DataContext dataContext, SchemaPlus schema, GeodeEntity table, String tableName ) { super( dataContext, schema, table, tableName ); } @@ -252,8 +252,8 @@ public Enumerator enumerator() { } - private GeodeTable getTable() { - return (GeodeTable) table; + private GeodeEntity getTable() { + return (GeodeEntity) table; } diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java index cb590e0845..067c6fc6df 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java @@ -50,7 +50,7 @@ */ public class GeodeScan extends Scan implements GeodeAlg { - final GeodeTable geodeTable; + final GeodeEntity geodeTable; final AlgDataType projectRowType; @@ -63,7 +63,7 @@ public class GeodeScan extends Scan implements GeodeAlg { * @param geodeTable Geode table * @param projectRowType Fields and types to project; null to project raw row */ - GeodeScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, GeodeTable geodeTable, AlgDataType projectRowType ) { + GeodeScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, GeodeEntity geodeTable, AlgDataType projectRowType ) { super( cluster, traitSet, table ); this.geodeTable = geodeTable; this.projectRowType = projectRowType; diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java index 07181babb6..b647fc4528 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java @@ -42,40 +42,41 @@ import org.apache.geode.cache.GemFireCache; import org.apache.geode.cache.Region; import org.polypheny.db.adapter.geode.util.GeodeUtils; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; /** * Schema mapped onto a Geode Region. */ -public class GeodeSchema extends AbstractSchema { +public class GeodeSchema extends AbstractNamespace implements Schema { final GemFireCache cache; private final List regionNames; - private ImmutableMap tableMap; + private ImmutableMap tableMap; - GeodeSchema( String locatorHost, int locatorPort, Iterable regionNames, String pdxAutoSerializerPackageExp ) { - this( GeodeUtils.createClientCache( locatorHost, locatorPort, pdxAutoSerializerPackageExp, true ), regionNames ); + GeodeSchema( long id, String locatorHost, int locatorPort, Iterable regionNames, String pdxAutoSerializerPackageExp ) { + this( id, GeodeUtils.createClientCache( locatorHost, locatorPort, pdxAutoSerializerPackageExp, true ), regionNames ); } - GeodeSchema( final GemFireCache cache, final Iterable regionNames ) { - super(); + GeodeSchema( long id, final GemFireCache cache, final Iterable regionNames ) { + super( id ); this.cache = Objects.requireNonNull( cache, "clientCache" ); this.regionNames = ImmutableList.copyOf( Objects.requireNonNull( regionNames, "regionNames" ) ); } @Override - protected Map getTableMap() { + protected Map getTableMap() { if ( tableMap == null ) { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( String regionName : regionNames ) { Region region = GeodeUtils.createRegion( cache, regionName ); - Table table = new GeodeTable( region ); - builder.put( regionName, table ); + Entity entity = new GeodeEntity( region ); + builder.put( regionName, entity ); } tableMap = builder.build(); } diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeToEnumerableConverter.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeToEnumerableConverter.java index bbf4e63fa4..af015b9381 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeToEnumerableConverter.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeToEnumerableConverter.java @@ -89,9 +89,9 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) /** - * Reference to the method {@link GeodeTable.GeodeQueryable#query}, used in the {@link Expression}. + * Reference to the method {@link GeodeEntity.GeodeQueryable#query}, used in the {@link Expression}. */ - private static final Method GEODE_QUERY_METHOD = Types.lookupMethod( GeodeTable.GeodeQueryable.class, "query", List.class, List.class, List.class, List.class, List.class, List.class, Long.class ); + private static final Method GEODE_QUERY_METHOD = Types.lookupMethod( GeodeEntity.GeodeQueryable.class, "query", List.class, List.class, List.class, List.class, List.class, List.class, Long.class ); /** @@ -126,7 +126,7 @@ public int size() { // Expression meta-program for calling the GeodeTable.GeodeQueryable#query method form the generated code final BlockBuilder blockBuilder = new BlockBuilder().append( Expressions.call( - geodeImplementContext.table.getExpression( GeodeTable.GeodeQueryable.class ), + geodeImplementContext.table.getExpression( GeodeEntity.GeodeQueryable.class ), GEODE_QUERY_METHOD, // fields constantArrayList( Pair.zip( geodeFieldNames( rowType ), physFieldClasses ), Pair.class ), diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableTable.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java similarity index 91% rename from plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableTable.java rename to plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java index 52ac5e3768..fa0f943f19 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableTable.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java @@ -43,21 +43,21 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.ScannableEntity; +import org.polypheny.db.schema.impl.AbstractEntity; /** * Geode Simple Scannable Table Abstraction */ -public class GeodeSimpleScannableTable extends AbstractTable implements ScannableTable { +public class GeodeSimpleScannableEntity extends AbstractEntity implements ScannableEntity { private final AlgDataType algDataType; private String regionName; private ClientCache clientCache; - public GeodeSimpleScannableTable( String regionName, AlgDataType algDataType, ClientCache clientCache ) { + public GeodeSimpleScannableEntity( String regionName, AlgDataType algDataType, ClientCache clientCache ) { super(); this.regionName = regionName; this.clientCache = clientCache; diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleSchema.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleSchema.java index c48a9e65da..0d038bbd61 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleSchema.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleSchema.java @@ -39,25 +39,26 @@ import org.apache.geode.cache.Region; import org.apache.geode.cache.client.ClientCache; import org.polypheny.db.adapter.geode.util.GeodeUtils; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; /** * Geode Simple Schema. */ -public class GeodeSimpleSchema extends AbstractSchema { +public class GeodeSimpleSchema extends AbstractNamespace implements Schema { private String locatorHost; private int locatorPort; private String[] regionNames; private String pdxAutoSerializerPackageExp; private ClientCache clientCache; - private ImmutableMap tableMap; + private ImmutableMap tableMap; - public GeodeSimpleSchema( String locatorHost, int locatorPort, String[] regionNames, String pdxAutoSerializerPackageExp ) { - super(); + public GeodeSimpleSchema( long id, String locatorHost, int locatorPort, String[] regionNames, String pdxAutoSerializerPackageExp ) { + super( id ); this.locatorHost = locatorHost; this.locatorPort = locatorPort; this.regionNames = regionNames; @@ -68,13 +69,13 @@ public GeodeSimpleSchema( String locatorHost, int locatorPort, String[] regionNa @Override - protected Map getTableMap() { + protected Map getTableMap() { if ( tableMap == null ) { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( String regionName : regionNames ) { Region region = GeodeUtils.createRegion( clientCache, regionName ); - Table table = new GeodeSimpleScannableTable( regionName, GeodeUtils.autodetectRelTypeFromRegion( region ), clientCache ); - builder.put( regionName, table ); + Entity entity = new GeodeSimpleScannableEntity( regionName, GeodeUtils.autodetectRelTypeFromRegion( region ), clientCache ); + builder.put( regionName, entity ); } tableMap = builder.build(); } diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 46240bd56f..7a10b37cf7 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -43,8 +43,8 @@ import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.HsqldbSqlDialect; import org.polypheny.db.transaction.PUID; import org.polypheny.db.transaction.PUID.Type; @@ -105,13 +105,13 @@ protected ConnectionFactory deployEmbedded() { @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentJdbcSchema; } diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlTable.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java similarity index 90% rename from plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlTable.java rename to plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java index 4dfa9cf4ad..83761be7ec 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlTable.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java @@ -43,7 +43,7 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.enumerable.EnumerableConvention; import org.polypheny.db.adapter.enumerable.EnumerableScan; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; @@ -55,7 +55,7 @@ import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.util.Source; @@ -63,7 +63,7 @@ /** * Table implementation wrapping a URL / HTML table. */ -class HtmlTable extends AbstractQueryableTable implements TranslatableTable { +class HtmlEntity extends AbstractQueryableEntity implements TranslatableEntity { private final AlgProtoDataType protoRowType; private HtmlReader reader; @@ -73,7 +73,7 @@ class HtmlTable extends AbstractQueryableTable implements TranslatableTable { /** * Creates a HtmlTable. */ - private HtmlTable( Source source, String selector, Integer index, AlgProtoDataType protoRowType, List> fieldConfigs ) throws Exception { + private HtmlEntity( Source source, String selector, Integer index, AlgProtoDataType protoRowType, List> fieldConfigs ) throws Exception { super( Object[].class ); this.protoRowType = protoRowType; @@ -85,11 +85,11 @@ private HtmlTable( Source source, String selector, Integer index, AlgProtoDataTy /** * Creates a HtmlTable. */ - static HtmlTable create( Source source, Map tableDef ) throws Exception { + static HtmlEntity create( Source source, Map tableDef ) throws Exception { @SuppressWarnings("unchecked") List> fieldConfigs = (List>) tableDef.get( "fields" ); String selector = (String) tableDef.get( "selector" ); Integer index = (Integer) tableDef.get( "index" ); - return new HtmlTable( source, selector, index, null, fieldConfigs ); + return new HtmlEntity( source, selector, index, null, fieldConfigs ); } diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java index 08fc3b79b6..6adba99f63 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java @@ -63,11 +63,11 @@ */ class HtmlScan extends Scan implements EnumerableAlg { - private final HtmlTable webTable; + private final HtmlEntity webTable; private final int[] fields; - protected HtmlScan( AlgOptCluster cluster, AlgOptEntity table, HtmlTable webTable, int[] fields ) { + protected HtmlScan( AlgOptCluster cluster, AlgOptEntity table, HtmlEntity webTable, int[] fields ) { super( cluster, cluster.traitSetOf( EnumerableConvention.INSTANCE ), table ); this.webTable = webTable; this.fields = fields; @@ -104,7 +104,7 @@ public AlgDataType deriveRowType() { public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { PhysType physType = PhysTypeImpl.of( implementor.getTypeFactory(), getRowType(), pref.preferArray() ); - return implementor.result( physType, Blocks.toBlock( Expressions.call( table.getExpression( HtmlTable.class ), "project", Expressions.constant( fields ) ) ) ); + return implementor.result( physType, Blocks.toBlock( Expressions.call( table.getExpression( HtmlEntity.class ), "project", Expressions.constant( fields ) ) ) ); } } diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlSchema.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlSchema.java index bbabe925f7..666cb8e1fd 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlSchema.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlSchema.java @@ -40,9 +40,10 @@ import java.net.MalformedURLException; import java.util.List; import java.util.Map; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.util.Source; import org.polypheny.db.util.Sources; import org.polypheny.db.util.Util; @@ -51,7 +52,7 @@ /** * Schema mapped onto a set of URLs / HTML tables. Each table in the schema is an HTML table on a URL. */ -class HtmlSchema extends AbstractSchema { +class HtmlSchema extends AbstractNamespace implements Schema { private final ImmutableList> tables; private final File baseDirectory; @@ -65,7 +66,8 @@ class HtmlSchema extends AbstractSchema { * @param baseDirectory Base directory to look for relative files, or null * @param tables List containing HTML table identifiers */ - HtmlSchema( SchemaPlus parentSchema, String name, File baseDirectory, List> tables ) { + HtmlSchema( long id, SchemaPlus parentSchema, String name, File baseDirectory, List> tables ) { + super( id ); this.tables = ImmutableList.copyOf( tables ); this.baseDirectory = baseDirectory; } @@ -91,8 +93,8 @@ private static String trimOrNull( String s, String suffix ) { @Override - protected Map getTableMap() { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + protected Map getTableMap() { + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( Map tableDef : this.tables ) { String tableName = (String) tableDef.get( "name" ); @@ -119,7 +121,7 @@ protected Map getTableMap() { Source sourceSansGz = source.trim( ".gz" ); final Source sourceSansJson = sourceSansGz.trimOrNull( ".json" ); if ( sourceSansJson != null ) { - JsonTable table = new JsonTable( source ); + JsonEntity table = new JsonEntity( source ); builder.put( sourceSansJson.relative( baseSource ).path(), table ); continue; } @@ -133,7 +135,7 @@ protected Map getTableMap() { } - private boolean addTable( ImmutableMap.Builder builder, Map tableDef ) throws MalformedURLException { + private boolean addTable( ImmutableMap.Builder builder, Map tableDef ) throws MalformedURLException { final String tableName = (String) tableDef.get( "name" ); final String url = (String) tableDef.get( "url" ); final Source source0 = Sources.url( url ); @@ -147,11 +149,11 @@ private boolean addTable( ImmutableMap.Builder builder, Map builder, Source source, String tableName, Map tableDef ) { + private boolean addTable( ImmutableMap.Builder builder, Source source, String tableName, Map tableDef ) { final Source sourceSansGz = source.trim( ".gz" ); final Source sourceSansJson = sourceSansGz.trimOrNull( ".json" ); if ( sourceSansJson != null ) { - JsonTable table = new JsonTable( source ); + JsonEntity table = new JsonEntity( source ); builder.put( Util.first( tableName, sourceSansJson.path() ), table ); return true; } @@ -160,14 +162,14 @@ private boolean addTable( ImmutableMap.Builder builder, Source so // // TODO: MV: This three nulls most properly introduce trouble. Fix to have the correct row details at this point. // - final Table table = null; //new CsvFilterableTable( source, null, null, null, null, null ); TODO: if this is actually used, introduce dependency on plugin - builder.put( Util.first( tableName, sourceSansCsv.path() ), table ); + final Entity entity = null; //new CsvFilterableTable( source, null, null, null, null, null ); TODO: if this is actually used, introduce dependency on plugin + builder.put( Util.first( tableName, sourceSansCsv.path() ), entity ); return true; } if ( tableDef != null ) { try { - HtmlTable table = HtmlTable.create( source, tableDef ); + HtmlEntity table = HtmlEntity.create( source, tableDef ); builder.put( Util.first( tableName, source.path() ), table ); return true; } catch ( Exception e ) { diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonTable.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonEntity.java similarity index 90% rename from plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonTable.java rename to plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonEntity.java index 23ccbc0a28..4a1eb98a9f 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonTable.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonEntity.java @@ -23,8 +23,8 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.ScannableEntity; +import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Source; @@ -32,7 +32,7 @@ /** * Table based on a JSON file. */ -public class JsonTable extends AbstractTable implements ScannableTable { +public class JsonEntity extends AbstractEntity implements ScannableEntity { private final Source source; @@ -40,7 +40,7 @@ public class JsonTable extends AbstractTable implements ScannableTable { /** * Creates a JsonTable. */ - public JsonTable( Source source ) { + public JsonEntity( Source source ) { this.source = source; } diff --git a/plugins/html-adapter/src/test/resources/sales/DEPTS.html b/plugins/html-adapter/src/test/resources/sales/DEPTS.html index d93e477a7d..2db6cf38f5 100644 --- a/plugins/html-adapter/src/test/resources/sales/DEPTS.html +++ b/plugins/html-adapter/src/test/resources/sales/DEPTS.html @@ -1,26 +1,26 @@ -

    - + + - - + + - - + + - - + + - - + + - - + + - -
    DEPTNONAMEDEPTNONAME
    10Sales10Sales
    20Marketing20Marketing
    30Accounts30Accounts
    + + diff --git a/plugins/html-adapter/src/test/resources/sales/EMPS.html b/plugins/html-adapter/src/test/resources/sales/EMPS.html index 52c63ed361..d3c34b7309 100644 --- a/plugins/html-adapter/src/test/resources/sales/EMPS.html +++ b/plugins/html-adapter/src/test/resources/sales/EMPS.html @@ -1,16 +1,16 @@ - - + + - - - + + + - - + + - + @@ -25,16 +25,16 @@ - - - + + + - - - + + + - -
    EMPNONAMEDEPTNOEMPNONAMEDEPTNO
    100100 Fred 30
    40
    120Wilma20120Wilma20
    130Alice40130Alice40
    + + diff --git a/plugins/html-adapter/src/test/resources/tableNoTH.html b/plugins/html-adapter/src/test/resources/tableNoTH.html index daae4e0428..b32d9f628a 100644 --- a/plugins/html-adapter/src/test/resources/tableNoTH.html +++ b/plugins/html-adapter/src/test/resources/tableNoTH.html @@ -1,7 +1,7 @@ - + @@ -17,7 +17,7 @@ -
    R0C0 R0C1R2C1 R2C2
    + diff --git a/plugins/html-adapter/src/test/resources/tableNoTheadTbody.html b/plugins/html-adapter/src/test/resources/tableNoTheadTbody.html index 5fc89bc6fc..b5f9ef7950 100644 --- a/plugins/html-adapter/src/test/resources/tableNoTheadTbody.html +++ b/plugins/html-adapter/src/test/resources/tableNoTheadTbody.html @@ -1,6 +1,6 @@ - + @@ -21,6 +21,6 @@ -
    H0 H1R2C1 R2C2
    + diff --git a/plugins/html-adapter/src/test/resources/tableOK.html b/plugins/html-adapter/src/test/resources/tableOK.html index 54bebfd4e5..577036d48c 100644 --- a/plugins/html-adapter/src/test/resources/tableOK.html +++ b/plugins/html-adapter/src/test/resources/tableOK.html @@ -1,6 +1,6 @@ - + @@ -25,6 +25,6 @@ -
    H0R2C2
    + diff --git a/plugins/html-adapter/src/test/resources/tableX2.html b/plugins/html-adapter/src/test/resources/tableX2.html index 26396bf3e1..f459537e42 100644 --- a/plugins/html-adapter/src/test/resources/tableX2.html +++ b/plugins/html-adapter/src/test/resources/tableX2.html @@ -1,6 +1,6 @@ - + @@ -20,8 +20,8 @@ -
    H0R1C2
    - + + @@ -46,6 +46,6 @@ -
    R0C0 R0C1R2C4 R2C5
    + diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcTable.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java similarity index 94% rename from plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcTable.java rename to plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 0572f53844..18645eb6e7 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcTable.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -36,7 +36,6 @@ import com.google.common.collect.Lists; import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Objects; import org.apache.calcite.avatica.ColumnMetaData; @@ -45,7 +44,7 @@ import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Modify; @@ -64,11 +63,11 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableTable; -import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.Schema.TableType; +import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TableType; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.sql.language.SqlBasicCall; import org.polypheny.db.sql.language.SqlIdentifier; @@ -89,7 +88,7 @@ * applying Queryable operators such as {@link org.apache.calcite.linq4j.Queryable#where(org.apache.calcite.linq4j.function.Predicate2)}. * The resulting queryable can then be converted to a SQL query, which can be executed efficiently on the JDBC server. */ -public class JdbcTable extends AbstractQueryableTable implements TranslatableTable, ScannableTable, ModifiableTable { +public class JdbcEntity extends AbstractQueryableEntity implements TranslatableEntity, ScannableEntity, ModifiableEntity { private AlgProtoDataType protoRowType; private JdbcSchema jdbcSchema; @@ -105,7 +104,7 @@ public class JdbcTable extends AbstractQueryableTable implements TranslatableTab private final TableType jdbcTableType; - public JdbcTable( + public JdbcEntity( JdbcSchema jdbcSchema, String logicalSchemaName, String logicalTableName, @@ -126,7 +125,7 @@ public JdbcTable( this.physicalColumnNames = physicalColumnNames; this.jdbcTableType = Objects.requireNonNull( jdbcTableType ); this.protoRowType = protoRowType; - this.tableId = tableId; + this.id = tableId; } @@ -279,14 +278,14 @@ public void setSchema( JdbcSchema jdbcSchema ) { /** - * Enumerable that returns the contents of a {@link JdbcTable} by connecting to the JDBC data source. + * Enumerable that returns the contents of a {@link JdbcEntity} by connecting to the JDBC data source. * * @param element type */ private class JdbcTableQueryable extends AbstractTableQueryable { JdbcTableQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - super( dataContext, schema, JdbcTable.this, tableName ); + super( dataContext, schema, JdbcEntity.this, tableName ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java index 2a4748f422..844c32d2f2 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java @@ -34,6 +34,7 @@ package org.polypheny.db.adapter.jdbc; import com.google.common.collect.ImmutableList; +import java.util.List; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.adapter.jdbc.rel2sql.AlgToSqlConverter; import org.polypheny.db.algebra.AlgNode; @@ -42,8 +43,6 @@ import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.util.Util; -import java.util.List; - /** * State for generating a SQL statement. @@ -75,7 +74,7 @@ public Result implement( AlgNode node ) { @Override public SqlIdentifier getPhysicalTableName( List tableNames ) { - JdbcTable table; + JdbcEntity table; if ( tableNames.size() == 1 ) { // only table name // NOTICE MV: I think, this case should no longer happen because there should always be a schema in the form @@ -102,7 +101,7 @@ public SqlIdentifier getPhysicalColumnName( List tableNames, String colu return schema.getTableMap().get( tableNames.get( 0 ) ).physicalColumnName( columnName ); } else if ( tableNames.size() == 2 ) { // table name and column name - JdbcTable table = schema.getTableMap().get( tableNames.get( 1 ) ); + JdbcEntity table = schema.getTableMap().get( tableNames.get( 1 ) ); if ( table.hasPhysicalColumnName( columnName ) ) { return schema.getTableMap().get( tableNames.get( 1 ) ).physicalColumnName( columnName ); } else { diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java index 8dcdce62e2..eff132060a 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java @@ -90,7 +90,7 @@ import org.polypheny.db.rex.RexProgram; import org.polypheny.db.rex.RexVisitorImpl; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.document.DocumentRules; import org.polypheny.db.sql.language.SqlAggFunction; import org.polypheny.db.sql.language.SqlDialect; @@ -1007,8 +1007,8 @@ private JdbcTableModificationRule( JdbcConvention out, AlgBuilderFactory algBuil @Override public boolean matches( AlgOptRuleCall call ) { final Modify modify = call.alg( 0 ); - if ( modify.getTable().unwrap( JdbcTable.class ) != null ) { - JdbcTable table = modify.getTable().unwrap( JdbcTable.class ); + if ( modify.getTable().unwrap( JdbcEntity.class ) != null ) { + JdbcEntity table = modify.getTable().unwrap( JdbcEntity.class ); if ( out.getJdbcSchema() == table.getSchema() ) { return true; } @@ -1020,7 +1020,7 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { final Modify modify = (Modify) alg; - final ModifiableTable modifiableTable = modify.getTable().unwrap( ModifiableTable.class ); + final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; } @@ -1061,7 +1061,7 @@ public JdbcTableModify( super( cluster, traitSet, table, catalogReader, input, operation, updateColumnList, sourceExpressionList, flattened ); assert input.getConvention() instanceof JdbcConvention; assert getConvention() instanceof JdbcConvention; - final ModifiableTable modifiableTable = table.unwrap( ModifiableTable.class ); + final ModifiableEntity modifiableTable = table.unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { throw new AssertionError(); // TODO: user error in validator } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java index 5119b68ee7..0b77bbb6b1 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java @@ -52,7 +52,7 @@ */ public class JdbcScan extends Scan implements JdbcAlg { - protected final JdbcTable jdbcTable; + protected final JdbcEntity jdbcTable; @Override @@ -61,7 +61,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) } - public JdbcScan( AlgOptCluster cluster, AlgOptEntity table, JdbcTable jdbcTable, JdbcConvention jdbcConvention ) { + public JdbcScan( AlgOptCluster cluster, AlgOptEntity table, JdbcEntity jdbcTable, JdbcConvention jdbcConvention ) { super( cluster, cluster.traitSetOf( jdbcConvention ).replace( ModelTrait.RELATIONAL ), table ); this.jdbcTable = jdbcTable; assert jdbcTable != null; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index 7b34672a1e..caebbd5645 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -37,6 +37,13 @@ import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.sql.DataSource; import lombok.Getter; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; @@ -46,30 +53,38 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; -import org.polypheny.db.algebra.type.*; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeImpl; +import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.*; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Function; +import org.polypheny.db.schema.Namespace; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.SchemaVersion; +import org.polypheny.db.schema.Schemas; +import org.polypheny.db.schema.TableType; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlDialectFactory; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; -import javax.sql.DataSource; -import java.util.*; - /** - * Implementation of {@link Schema} that is backed by a JDBC data source. + * Implementation of {@link Namespace} that is backed by a JDBC data source. * * The tables in the JDBC data source appear to be tables in this schema; queries against this schema are executed * against those tables, pushing down as much as possible of the query logic to SQL. */ @Slf4j -public class JdbcSchema implements Schema { +public class JdbcSchema implements Namespace, Schema { final ConnectionFactory connectionFactory; public final SqlDialect dialect; @@ -77,20 +92,23 @@ public class JdbcSchema implements Schema { @Getter private final JdbcConvention convention; - private final Map tableMap; + private final Map tableMap; private final Map physicalToLogicalTableNameMap; private final Adapter adapter; + @Getter + private final long id; private JdbcSchema( + long id, @NonNull ConnectionFactory connectionFactory, @NonNull SqlDialect dialect, JdbcConvention convention, - Map tableMap, + Map tableMap, Map physicalToLogicalTableNameMap, Adapter adapter ) { - super(); + this.id = id; this.connectionFactory = connectionFactory; this.dialect = dialect; this.convention = convention; @@ -108,11 +126,12 @@ private JdbcSchema( * @param convention Calling convention */ public JdbcSchema( + long id, @NonNull ConnectionFactory connectionFactory, @NonNull SqlDialect dialect, JdbcConvention convention, Adapter adapter ) { - super(); + this.id = id; this.connectionFactory = connectionFactory; this.dialect = dialect; convention.setJdbcSchema( this ); @@ -123,7 +142,7 @@ public JdbcSchema( } - public JdbcTable createJdbcTable( + public JdbcEntity createJdbcTable( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { @@ -147,7 +166,7 @@ public JdbcTable createJdbcTable( physicalColumnNames.add( placement.physicalColumnName ); } - JdbcTable table = new JdbcTable( + JdbcEntity table = new JdbcEntity( this, catalogTable.getNamespaceName(), catalogTable.name, @@ -166,6 +185,7 @@ public JdbcTable createJdbcTable( public static JdbcSchema create( + Long id, SchemaPlus parentSchema, String name, ConnectionFactory connectionFactory, @@ -173,7 +193,7 @@ public static JdbcSchema create( Adapter adapter ) { final Expression expression = Schemas.subSchemaExpression( parentSchema, name, JdbcSchema.class ); final JdbcConvention convention = JdbcConvention.of( dialect, expression, name ); - return new JdbcSchema( connectionFactory, dialect, convention, adapter ); + return new JdbcSchema( id, connectionFactory, dialect, convention, adapter ); } @@ -192,8 +212,9 @@ public boolean isMutable() { @Override - public Schema snapshot( SchemaVersion version ) { + public Namespace snapshot( SchemaVersion version ) { return new JdbcSchema( + id, connectionFactory, dialect, convention, @@ -239,12 +260,12 @@ public final Set getFunctionNames() { @Override - public Table getTable( String name ) { + public Entity getEntity( String name ) { return getTableMap().get( name ); } - public synchronized ImmutableMap getTableMap() { + public synchronized ImmutableMap getTableMap() { return ImmutableMap.copyOf( tableMap ); } @@ -286,7 +307,7 @@ private AlgDataType parseTypeString( AlgDataTypeFactory typeFactory, String type @Override - public Set getTableNames() { + public Set getEntityNames() { // This method is called during a cache refresh. We can take it as a signal that we need to re-build our own cache. return getTableMap().keySet(); } @@ -311,14 +332,14 @@ public Set getTypeNames() { @Override - public Schema getSubSchema( String name ) { + public Namespace getSubNamespace( String name ) { // JDBC does not support sub-schemas. return null; } @Override - public Set getSubSchemaNames() { + public Set getSubNamespaceNames() { return ImmutableSet.of(); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java index c7e10be94c..b8c8001d16 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java @@ -38,11 +38,30 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; +import java.math.BigDecimal; +import java.util.AbstractList; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.IntFunction; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.apache.calcite.avatica.util.ByteString; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.adapter.jdbc.JdbcEntity; import org.polypheny.db.adapter.jdbc.JdbcScan; -import org.polypheny.db.adapter.jdbc.JdbcTable; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.JoinType; @@ -59,9 +78,40 @@ import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.Operator; import org.polypheny.db.prepare.AlgOptEntityImpl; -import org.polypheny.db.rex.*; -import org.polypheny.db.sql.language.*; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexCorrelVariable; +import org.polypheny.db.rex.RexDynamicParam; +import org.polypheny.db.rex.RexFieldAccess; +import org.polypheny.db.rex.RexFieldCollation; +import org.polypheny.db.rex.RexInputRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexLocalRef; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexOver; +import org.polypheny.db.rex.RexPatternFieldRef; +import org.polypheny.db.rex.RexProgram; +import org.polypheny.db.rex.RexSubQuery; +import org.polypheny.db.rex.RexWindow; +import org.polypheny.db.rex.RexWindowBound; +import org.polypheny.db.sql.language.SqlAggFunction; +import org.polypheny.db.sql.language.SqlBasicCall; +import org.polypheny.db.sql.language.SqlBinaryOperator; +import org.polypheny.db.sql.language.SqlCall; +import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlDialect.IntervalParameterStrategy; +import org.polypheny.db.sql.language.SqlDynamicParam; +import org.polypheny.db.sql.language.SqlIdentifier; +import org.polypheny.db.sql.language.SqlIntervalQualifier; +import org.polypheny.db.sql.language.SqlJoin; +import org.polypheny.db.sql.language.SqlLiteral; +import org.polypheny.db.sql.language.SqlMatchRecognize; +import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlNodeList; +import org.polypheny.db.sql.language.SqlOperator; +import org.polypheny.db.sql.language.SqlSelect; +import org.polypheny.db.sql.language.SqlSelectKeyword; +import org.polypheny.db.sql.language.SqlSetOperator; +import org.polypheny.db.sql.language.SqlWindow; import org.polypheny.db.sql.language.fun.SqlCase; import org.polypheny.db.sql.language.fun.SqlSumEmptyIsZeroAggFunction; import org.polypheny.db.sql.language.validate.SqlValidatorUtil; @@ -69,13 +119,11 @@ import org.polypheny.db.type.IntervalPolyType; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFamily; -import org.polypheny.db.util.*; - -import javax.annotation.Nonnull; -import java.math.BigDecimal; -import java.util.*; -import java.util.function.IntFunction; -import java.util.stream.Collectors; +import org.polypheny.db.util.DateString; +import org.polypheny.db.util.TimeString; +import org.polypheny.db.util.TimestampString; +import org.polypheny.db.util.Util; +import org.polypheny.db.util.ValidatorUtil; /** @@ -146,13 +194,13 @@ public Result setOpToSql( SqlSetOperator operator, AlgNode alg ) { final Result result = visitChild( input.i, input.e ); if ( node == null ) { if ( input.getValue() instanceof JdbcScan ) { - node = result.asSelect( ((JdbcTable) ((AlgOptEntityImpl) input.getValue().getTable()).getTable()).getNodeList() ); + node = result.asSelect( ((JdbcEntity) ((AlgOptEntityImpl) input.getValue().getTable()).getEntity()).getNodeList() ); } else { node = result.asSelect(); } } else { if ( input.getValue() instanceof JdbcScan ) { - node = (SqlNode) operator.createCall( POS, node, result.asSelect( ((JdbcTable) ((AlgOptEntityImpl) input.getValue().getTable()).getTable()).getNodeList() ) ); + node = (SqlNode) operator.createCall( POS, node, result.asSelect( ((JdbcEntity) ((AlgOptEntityImpl) input.getValue().getTable()).getEntity()).getNodeList() ) ); } else { node = (SqlNode) operator.createCall( POS, node, result.asSelect() ); } @@ -1128,7 +1176,7 @@ && hasNestedAggregations( (LogicalAggregate) alg ) ) { select = subSelect(); } else { if ( explicitColumnNames && alg.getInputs().size() == 1 && alg.getInput( 0 ) instanceof JdbcScan ) { - select = asSelect( ((JdbcTable) ((AlgOptEntityImpl) alg.getInput( 0 ).getTable()).getTable()).getNodeList() ); + select = asSelect( ((JdbcEntity) ((AlgOptEntityImpl) alg.getInput( 0 ).getTable()).getEntity()).getNodeList() ); } else { select = asSelect(); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 7dbe01e99f..58783239e7 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -113,8 +113,8 @@ protected ConnectionFactory createConnectionFactory( final Map s @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - currentJdbcSchema = JdbcSchema.create( rootSchema, name, connectionFactory, dialect, this ); + public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { + currentJdbcSchema = JdbcSchema.create( id, rootSchema, name, connectionFactory, dialect, this ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 2c00eb4f27..a2c98987fd 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -109,8 +109,8 @@ protected void registerJdbcInformation() { @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - currentJdbcSchema = JdbcSchema.create( rootSchema, name, connectionFactory, dialect, this ); + public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { + currentJdbcSchema = JdbcSchema.create( id, rootSchema, name, connectionFactory, dialect, this ); } diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java index 1189c6ad58..10345f3c1d 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java @@ -34,7 +34,12 @@ package org.polypheny.db.adapter.jdbc.rel2sql; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; + import com.google.common.collect.ImmutableList; +import java.util.List; +import java.util.function.Function; import org.junit.Ignore; import org.junit.Test; import org.polypheny.db.adapter.DataContext.SlimDataContext; @@ -67,21 +72,30 @@ import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.ScottSchema; import org.polypheny.db.sql.SqlLanguageDependent; -import org.polypheny.db.sql.language.*; +import org.polypheny.db.sql.language.SqlCall; +import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlDialect.Context; import org.polypheny.db.sql.language.SqlDialect.DatabaseProduct; -import org.polypheny.db.sql.language.dialect.*; +import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlSelect; +import org.polypheny.db.sql.language.SqlWriter; +import org.polypheny.db.sql.language.dialect.HiveSqlDialect; +import org.polypheny.db.sql.language.dialect.JethroDataSqlDialect; +import org.polypheny.db.sql.language.dialect.MysqlSqlDialect; +import org.polypheny.db.sql.language.dialect.PolyphenyDbSqlDialect; +import org.polypheny.db.sql.language.dialect.PostgresqlSqlDialect; import org.polypheny.db.sql.util.PlannerImplMock; import org.polypheny.db.test.Matchers; -import org.polypheny.db.tools.*; +import org.polypheny.db.tools.AlgBuilder; +import org.polypheny.db.tools.FrameworkConfig; +import org.polypheny.db.tools.Frameworks; +import org.polypheny.db.tools.Planner; +import org.polypheny.db.tools.Program; +import org.polypheny.db.tools.Programs; +import org.polypheny.db.tools.RuleSet; +import org.polypheny.db.tools.RuleSets; import org.polypheny.db.type.PolyType; -import java.util.List; -import java.util.function.Function; - -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; - /** * Tests for {@link AlgToSqlConverter}. @@ -108,7 +122,7 @@ public class AlgToSqlConverterTest extends SqlLanguageDependent { private Sql sql( String sql ) { final SchemaPlus schema = Frameworks .createRootSchema( true ) - .add( "foodmart", new ReflectiveSchema( new FoodmartSchema() ), NamespaceType.RELATIONAL ); + .add( "foodmart", new ReflectiveSchema( new FoodmartSchema(), -1 ), NamespaceType.RELATIONAL ); return new Sql( schema, sql, PolyphenyDbSqlDialect.DEFAULT, DEFAULT_REL_CONFIG, ImmutableList.of() ); } @@ -164,7 +178,7 @@ private static MysqlSqlDialect mySqlDialect( NullCollation nullCollation ) { */ private static AlgBuilder algBuilder() { // Creates a config based on the "scott" schema. - final SchemaPlus schema = Frameworks.createRootSchema( true ).add( "scott", new ReflectiveSchema( new ScottSchema() ), NamespaceType.RELATIONAL ); + final SchemaPlus schema = Frameworks.createRootSchema( true ).add( "scott", new ReflectiveSchema( new ScottSchema(), -1 ), NamespaceType.RELATIONAL ); Frameworks.ConfigBuilder configBuilder = Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) .defaultSchema( schema ) diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java index cb5fa52495..109ce8edb5 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java @@ -109,7 +109,7 @@ private Planner getPlanner( List traitDefs, Program... programs ) { private Planner getPlanner( List traitDefs, ParserConfig parserConfig, Program... programs ) { final SchemaPlus schema = Frameworks .createRootSchema( true ) - .add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( parserConfig ) diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java index 3e543bad2f..1c4673bf3c 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java @@ -36,6 +36,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import java.util.Collection; +import java.util.List; +import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; import org.junit.Test; import org.polypheny.db.algebra.AlgCollation; @@ -47,30 +50,33 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; -import org.polypheny.db.schema.*; +import org.polypheny.db.schema.AbstractPolyphenyDbSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Function; +import org.polypheny.db.schema.Namespace; +import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.SchemaVersion; +import org.polypheny.db.schema.Statistic; +import org.polypheny.db.schema.TableType; import org.polypheny.db.sql.language.dialect.PolyphenyDbSqlDialect; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.ImmutableBitSet; -import java.util.Collection; -import java.util.List; -import java.util.Set; - /** * Tests for {@link AlgToSqlConverter} on a schema that has nested structures of multiple levels. */ public class RelToSqlConverterStructsTest { - private static final Schema SCHEMA = new Schema() { + private static final Namespace NAMESPACE = new Namespace() { @Override - public Table getTable( String name ) { - return TABLE; + public Entity getEntity( String name ) { + return ENTITY; } @Override - public Set getTableNames() { + public Set getEntityNames() { return ImmutableSet.of( "myTable" ); } @@ -100,13 +106,13 @@ public Set getFunctionNames() { @Override - public Schema getSubSchema( String name ) { + public Namespace getSubNamespace( String name ) { return null; } @Override - public Set getSubSchemaNames() { + public Set getSubNamespaceNames() { return ImmutableSet.of(); } @@ -124,14 +130,14 @@ public boolean isMutable() { @Override - public Schema snapshot( SchemaVersion version ) { + public Namespace snapshot( SchemaVersion version ) { return null; } }; // Table schema is as following: // { a: INT, n1: { n11: { b INT }, n12: {c: Int } }, n2: { d: Int }, e: Int } - private static final Table TABLE = new Table() { + private static final Entity ENTITY = new Entity() { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { final AlgDataType aType = typeFactory.createPolyType( PolyType.BIGINT ); @@ -156,13 +162,13 @@ public Statistic getStatistic() { @Override - public Long getTableId() { + public Long getId() { return null; } @Override - public Schema.TableType getJdbcTableType() { + public TableType getJdbcTableType() { return null; } @@ -210,7 +216,7 @@ public AlgDistribution getDistribution() { } }; - private static final SchemaPlus ROOT_SCHEMA = AbstractPolyphenyDbSchema.createRootSchema( "" ).add( "myDb", SCHEMA, NamespaceType.RELATIONAL ).plus(); + private static final SchemaPlus ROOT_SCHEMA = AbstractPolyphenyDbSchema.createRootSchema( "" ).add( "myDb", NAMESPACE, NamespaceType.RELATIONAL ).plus(); private AlgToSqlConverterTest.Sql sql( String sql ) { diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index d29bbb3589..67fe8fb43f 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -34,8 +34,8 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.dialect.MonetdbSqlDialect; @@ -94,13 +94,13 @@ protected ConnectionFactory createConnectionFactory( final Map s @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentJdbcSchema; } diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java index 46f6b3779a..07fbb3cf0d 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java @@ -46,8 +46,8 @@ import org.polypheny.db.docker.DockerManager.ContainerBuilder; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.MonetdbSqlDialect; import org.polypheny.db.transaction.PUID; import org.polypheny.db.transaction.PUID.Type; @@ -229,13 +229,13 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentJdbcSchema; } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index a9719dd6c5..5e9c6509e6 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -44,7 +44,6 @@ import com.mongodb.client.model.DeleteOneModel; import com.mongodb.client.model.WriteModel; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; @@ -69,7 +68,7 @@ import org.bson.json.JsonWriterSettings; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; import org.polypheny.db.adapter.mongodb.util.MongoDynamic; import org.polypheny.db.algebra.AlgNode; @@ -93,9 +92,9 @@ import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModifiableCollection; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.util.BsonUtil; @@ -106,7 +105,7 @@ * Table based on a MongoDB collection. */ @Slf4j -public class MongoEntity extends AbstractQueryableTable implements TranslatableTable, ModifiableTable, ModifiableCollection { +public class MongoEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity, ModifiableCollection { @Getter private final String collectionName; @@ -140,7 +139,7 @@ public class MongoEntity extends AbstractQueryableTable implements TranslatableT this.mongoSchema = schema; this.collection = schema.database.getCollection( collectionName ); this.storeId = storeId; - this.tableId = catalogTable.id; + this.id = catalogTable.id; } @@ -154,7 +153,7 @@ public MongoEntity( CatalogCollection catalogEntity, MongoSchema schema, AlgProt this.mongoSchema = schema; this.collection = schema.database.getCollection( collectionName ); this.storeId = adapter; - this.tableId = catalogEntity.id; + this.id = catalogEntity.id; } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java index 9cfefdd96f..b8a7b9a3bb 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java @@ -77,9 +77,9 @@ import org.polypheny.db.docker.DockerManager.Container; import org.polypheny.db.docker.DockerManager.ContainerBuilder; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFamily; @@ -227,24 +227,24 @@ public void resetDockerConnection( ConfigDocker c ) { @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { + public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { String[] splits = name.split( "_" ); String database = name; if ( splits.length >= 2 ) { database = splits[0] + "_" + splits[1]; } - currentSchema = new MongoSchema( database, this.client, transactionProvider, this ); + currentSchema = new MongoSchema( id, database, this.client, transactionProvider, this ); } @Override - public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentSchema.createTable( combinedTable, columnPlacementsOnStore, getAdapterId(), partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return this.currentSchema; } @@ -261,7 +261,7 @@ public void truncate( Context context, CatalogTable table ) { @Override - public Table createDocumentSchema( CatalogCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { + public Entity createDocumentSchema( CatalogCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { return this.currentSchema.createCollection( catalogEntity, partitionPlacement ); } @@ -309,7 +309,7 @@ public void createTable( Context context, CatalogTable catalogTable, List commitAll(); if ( this.currentSchema == null ) { - createNewSchema( null, Catalog.getInstance().getSchema( catalogTable.namespaceId ).getName() ); + createNewSchema( null, catalogTable.getNamespaceName(), catalogTable.namespaceId ); } for ( long partitionId : partitionIds ) { @@ -340,7 +340,7 @@ public void createCollection( Context prepareContext, CatalogCollection catalogC commitAll(); if ( this.currentSchema == null ) { - createNewSchema( null, Catalog.getInstance().getSchema( catalogCollection.namespaceId ).getName() ); + createNewSchema( null, catalogCollection.getNamespaceName(), catalogCollection.namespaceId ); } String physicalCollectionName = getPhysicalTableName( catalogCollection.id, adapterId ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRowType.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRowType.java index 4c1ceaa36b..e015069c6b 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRowType.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRowType.java @@ -33,7 +33,7 @@ public class MongoRowType extends AlgRecordType { public MongoRowType( StructKind kind, List fields, MongoEntity mongoEntity ) { super( kind, fields ); - Catalog.getInstance().getColumns( mongoEntity.getTableId() ).forEach( column -> { + Catalog.getInstance().getColumns( mongoEntity.getId() ).forEach( column -> { idToName.put( column.id, column.name ); nameToId.put( column.name, column.id ); } ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java index 003744083c..402472f1d8 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java @@ -104,8 +104,8 @@ import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexVisitorImpl; -import org.polypheny.db.schema.ModifiableTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.document.DocumentRules; import org.polypheny.db.sql.language.fun.SqlDatetimePlusOperator; import org.polypheny.db.sql.language.fun.SqlDatetimeSubtractionOperator; @@ -914,7 +914,7 @@ public AlgNode visit( AlgNode other ) { @Override public AlgNode convert( AlgNode alg ) { final Modify modify = (Modify) alg; - final ModifiableTable modifiableTable = modify.getTable().unwrap( ModifiableTable.class ); + final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; } @@ -951,7 +951,7 @@ private static class MongoDocumentModificationRule extends MongoConverterRule { @Override public AlgNode convert( AlgNode alg ) { final DocumentModify modify = (DocumentModify) alg; - final ModifiableTable modifiableCollection = modify.getCollection().unwrap( ModifiableTable.class ); + final ModifiableEntity modifiableCollection = modify.getCollection().unwrap( ModifiableEntity.class ); if ( modifiableCollection == null ) { return null; } @@ -1021,13 +1021,13 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { @Override public void implement( Implementor implementor ) { implementor.setDML( true ); - Table preTable = table.getTable(); + Entity preEntity = table.getTable(); this.implementor = implementor; - if ( !(preTable instanceof MongoEntity) ) { + if ( !(preEntity instanceof MongoEntity) ) { throw new RuntimeException( "There seems to be a problem with the correct costs for one of stores." ); } - implementor.mongoEntity = (MongoEntity) preTable; + implementor.mongoEntity = (MongoEntity) preEntity; implementor.table = table; implementor.setOperation( this.getOperation() ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java index 28c4c3dcd2..c9e07599dc 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java @@ -38,6 +38,9 @@ import com.mongodb.client.MongoDatabase; import com.mongodb.client.gridfs.GridFSBucket; import com.mongodb.client.gridfs.GridFSBuckets; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import lombok.Getter; import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; import org.polypheny.db.algebra.type.AlgDataType; @@ -45,22 +48,24 @@ import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.*; +import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; +import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.Convention; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - /** * Schema mapped onto a directory of MONGO files. Each table in the schema is a MONGO file in that directory. */ -public class MongoSchema extends AbstractSchema { +public class MongoSchema extends AbstractNamespace implements Schema { @Getter final MongoDatabase database; @@ -69,10 +74,10 @@ public class MongoSchema extends AbstractSchema { private final Convention convention = MongoAlg.CONVENTION; @Getter - private final Map tableMap = new HashMap<>(); + private final Map tableMap = new HashMap<>(); @Getter - private final Map collectionMap = new HashMap<>(); + private final Map collectionMap = new HashMap<>(); private final MongoClient connection; private final TransactionProvider transactionProvider; @Getter @@ -88,8 +93,8 @@ public class MongoSchema extends AbstractSchema { * @param transactionProvider * @param mongoStore */ - public MongoSchema( String database, MongoClient connection, TransactionProvider transactionProvider, MongoStore mongoStore ) { - super(); + public MongoSchema( long id, String database, MongoClient connection, TransactionProvider transactionProvider, MongoStore mongoStore ) { + super( id ); this.transactionProvider = transactionProvider; this.connection = connection; this.database = this.connection.getDatabase( database ); @@ -119,7 +124,7 @@ public MongoEntity createTable( CatalogTable catalogTable, List operand ) { - return schema; + return namespace; } @@ -181,7 +181,7 @@ public class MongoAdapterTest implements SchemaFactory { .explainContains( "PLAN=MongoToEnumerableConverter\n" + " MongoSort(sort0=[$4], dir0=[ASC])\n" + " MongoProject(CITY=[CAST(ITEM($0, 'city')):VARCHAR(20)], LONGITUDE=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], LATITUDE=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], POP=[CAST(ITEM($0, 'pop')):INTEGER], STATE=[CAST(ITEM($0, 'state')):VARCHAR(2)], ID=[CAST(ITEM($0, '_id')):VARCHAR(5)])\n" - + " MongoScan(table=[[mongo_raw, zips]])" ); + + " MongoScan(entity=[[mongo_raw, zips]])" ); } @@ -258,7 +258,7 @@ public class MongoAdapterTest implements SchemaFactory { + " MongoSort(sort0=[$4], sort1=[$5], dir0=[ASC], dir1=[ASC])\n" + " MongoProject(CITY=[CAST(ITEM($0, 'city')):VARCHAR(20)], LONGITUDE=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], LATITUDE=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], POP=[CAST(ITEM($0, 'pop')):INTEGER], STATE=[CAST(ITEM($0, 'state')):VARCHAR(2)], ID=[CAST(ITEM($0, '_id')):VARCHAR(5)])\n" + " MongoFilter(condition=[AND(=(CAST(ITEM($0, 'city')):VARCHAR(20), 'SPRINGFIELD'), >=(CAST(ITEM($0, '_id')):VARCHAR(5), '70000'))])\n" - + " MongoScan(table=[[mongo_raw, zips]])" ); + + " MongoScan(entity=[[mongo_raw, zips]])" ); } @@ -287,10 +287,10 @@ public class MongoAdapterTest implements SchemaFactory { .explainContains( "PLAN=EnumerableUnion(all=[true])\n" + " MongoToEnumerableConverter\n" + " MongoProject(product_id=[CAST(ITEM($0, 'product_id')):DOUBLE])\n" - + " MongoScan(table=[[_foodmart, sales_fact_1997]])\n" + + " MongoScan(entity=[[_foodmart, sales_fact_1997]])\n" + " MongoToEnumerableConverter\n" + " MongoProject(product_id=[CAST(ITEM($0, 'product_id')):DOUBLE])\n" - + " MongoScan(table=[[_foodmart, sales_fact_1998]])" ) + + " MongoScan(entity=[[_foodmart, sales_fact_1998]])" ) .limit( 2 ) .returns( MongoAssertions.checkResultUnordered( "product_id=337", "product_id=1512" ) ); } @@ -335,7 +335,7 @@ public class MongoAdapterTest implements SchemaFactory { .explainContains( "PLAN=MongoToEnumerableConverter\n" + " MongoProject(warehouse_id=[CAST(ITEM($0, 'warehouse_id')):DOUBLE], warehouse_state_province=[CAST(ITEM($0, 'warehouse_state_province')):VARCHAR(20)])\n" + " MongoFilter(condition=[=(CAST(ITEM($0, 'warehouse_state_province')):VARCHAR(20), 'CA')])\n" - + " MongoScan(table=[[mongo_raw, warehouse]])" ) + + " MongoScan(entity=[[mongo_raw, warehouse]])" ) .returns( MongoAssertions.checkResultUnordered( "warehouse_id=6; warehouse_state_province=CA", @@ -421,7 +421,7 @@ public class MongoAdapterTest implements SchemaFactory { .returns( String.format( Locale.ROOT, "EXPR$0=%d\n", ZIPS_SIZE ) ) .explainContains( "PLAN=MongoToEnumerableConverter\n" + " MongoAggregate(group=[{}], EXPR$0=[COUNT()])\n" - + " MongoScan(table=[[mongo_raw, zips]])" ) + + " MongoScan(entity=[[mongo_raw, zips]])" ) .queryContains( mongoChecker( "{$group: {_id: {}, 'EXPR$0': {$sum: 1}}}" ) ); } @@ -692,7 +692,7 @@ public class MongoAdapterTest implements SchemaFactory { .explainContains( "PLAN=MongoToEnumerableConverter\n" + " MongoProject(STATE=[CAST(ITEM($0, 'state')):VARCHAR(2)], CITY=[CAST(ITEM($0, 'city')):VARCHAR(20)])\n" + " MongoFilter(condition=[=(CAST(ITEM($0, 'state')):VARCHAR(2), 'CA')])\n" - + " MongoScan(table=[[mongo_raw, zips]])" ); + + " MongoScan(entity=[[mongo_raw, zips]])" ); } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 8323700fe0..e5055f56e7 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -97,7 +97,7 @@ import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.LogicalTable; +import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.schema.PolyphenyDbSchema.TableEntryImpl; import org.polypheny.db.schema.document.DocumentUtil; import org.polypheny.db.type.PolyType; @@ -343,14 +343,14 @@ private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaNam final Builder fieldInfo = typeFactory.builder(); fieldInfo.add( new AlgDataTypeFieldImpl( "d", 0, typeFactory.createPolyType( PolyType.DOCUMENT ) ) ); AlgDataType rowType = fieldInfo.build(); - CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTable().getTableId() ); + CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTable().getId() ); return AlgOptEntityImpl.create( table.getRelOptSchema(), rowType, new TableEntryImpl( catalogReader.getRootSchema(), names.get( names.size() - 1 ), - new LogicalTable( Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, new Pattern( dbSchemaName ) ).get( 0 ).id, names.get( 0 ), names.get( names.size() - 1 ), List.of(), List.of(), AlgDataTypeImpl.proto( rowType ), NamespaceType.GRAPH ) ), + new LogicalEntity( Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, new Pattern( dbSchemaName ) ).get( 0 ).id, names.get( 0 ), names.get( names.size() - 1 ), List.of(), List.of(), AlgDataTypeImpl.proto( rowType ), NamespaceType.GRAPH ) ), catalogTable, 1.0 ); } diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index e817d6f23c..d231281faa 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -34,8 +34,8 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.MysqlSqlDialect; public class MysqlSourcePlugin extends Plugin { @@ -96,13 +96,13 @@ public MysqlSource( int storeId, String uniqueName, final Map se @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentJdbcSchema; } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java index 37de205b9b..3a76668e25 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java @@ -63,16 +63,15 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.docker.DockerManager; import org.polypheny.db.docker.DockerManager.Container; import org.polypheny.db.docker.DockerManager.ContainerBuilder; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.Table; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFamily; @@ -158,7 +157,7 @@ public static class Neo4jStore extends DataStore { private final String pass; private final AuthToken auth; @Getter - private NeoNamespace currentSchema; + private NeoSchema currentSchema; @Getter private NeoGraph currentGraph; @@ -242,7 +241,7 @@ public void createTable( Context context, CatalogTable combinedTable, List Catalog catalog = Catalog.getInstance(); if ( this.currentSchema == null ) { - createNewSchema( null, Catalog.getInstance().getSchema( combinedTable.namespaceId ).getName() ); + createNewSchema( null, combinedTable.getNamespaceName(), combinedTable.namespaceId ); } for ( long partitionId : partitionIds ) { @@ -496,7 +495,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name ) { @Override - public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return this.currentSchema.createTable( combinedTable, columnPlacementsOnStore, partitionPlacement ); } @@ -535,7 +534,7 @@ public void createGraphNamespace( SchemaPlus rootSchema, String name, long id ) @Override - public Schema getCurrentGraphNamespace() { + public Namespace getCurrentGraphNamespace() { return currentGraph; } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java index aff22b21fe..52edeb7df5 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java @@ -17,7 +17,6 @@ package org.polypheny.db.adapter.neo4j; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -33,7 +32,7 @@ import org.neo4j.driver.Result; import org.neo4j.driver.Transaction; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.neo4j.rules.relational.NeoScan; import org.polypheny.db.adapter.neo4j.util.NeoUtil; import org.polypheny.db.algebra.AlgNode; @@ -50,10 +49,10 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableTable; -import org.polypheny.db.schema.QueryableTable; +import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Pair; @@ -61,7 +60,7 @@ /** * Relational Neo4j representation of a {@link org.polypheny.db.schema.PolyphenyDbSchema} entity */ -public class NeoEntity extends AbstractQueryableTable implements TranslatableTable, ModifiableTable { +public class NeoEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity { public final String physicalEntityName; public final long id; @@ -77,7 +76,7 @@ protected NeoEntity( String physicalEntityName, AlgProtoDataType proto, long id @Override - public Long getTableId() { + public Long getId() { return id; } @@ -102,7 +101,7 @@ public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitS @Override - public Collection getModifiableCollection() { + public Collection getModifiableCollection() { throw new UnsupportedOperationException( "getModifiableCollection is not supported by the NEO4j adapter." ); } @@ -144,14 +143,14 @@ public static class NeoQueryable extends AbstractTableQueryable { @Getter private final NeoEntity entity; - private final NeoNamespace namespace; + private final NeoSchema namespace; private final AlgDataType rowType; - public NeoQueryable( DataContext dataContext, SchemaPlus schema, QueryableTable table, String tableName ) { + public NeoQueryable( DataContext dataContext, SchemaPlus schema, QueryableEntity table, String tableName ) { super( dataContext, schema, table, tableName ); this.entity = (NeoEntity) table; - this.namespace = schema.unwrap( NeoNamespace.class ); + this.namespace = schema.unwrap( NeoSchema.class ); this.rowType = entity.rowType.apply( entity.getTypeFactory() ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java index a840000c78..3f51fc07c1 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import lombok.Getter; import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.AbstractQueryable; import org.apache.calcite.linq4j.Enumerable; @@ -57,13 +56,12 @@ import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.TranslatableGraph; -import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.schema.graph.ModifiableGraph; import org.polypheny.db.schema.graph.PolyEdge; import org.polypheny.db.schema.graph.PolyGraph; import org.polypheny.db.schema.graph.PolyNode; import org.polypheny.db.schema.graph.QueryableGraph; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Pair; @@ -71,20 +69,18 @@ /** * Graph entity in the Neo4j representation. */ -public class NeoGraph extends AbstractSchema implements ModifiableGraph, TranslatableGraph, QueryableGraph { +public class NeoGraph extends AbstractNamespace implements ModifiableGraph, TranslatableGraph, QueryableGraph { public final String name; public final TransactionProvider transactionProvider; public final Driver db; - @Getter - private final long id; public final String mappingLabel; public final Neo4jStore store; public NeoGraph( String name, TransactionProvider transactionProvider, Driver db, long id, String mappingLabel, Neo4jStore store ) { + super( id ); this.name = name; - this.id = id; this.transactionProvider = transactionProvider; this.db = db; this.mappingLabel = mappingLabel; @@ -105,7 +101,7 @@ public NeoGraph( String name, TransactionProvider transactionProvider, Driver db public LpgModify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, - Graph graph, + org.polypheny.db.schema.graph.Graph graph, PolyphenyDbCatalogReader catalogReader, AlgNode input, Operation operation, @@ -147,7 +143,7 @@ public C unwrap( Class aClass ) { @Override - public AlgNode toAlg( ToAlgContext context, Graph graph ) { + public AlgNode toAlg( ToAlgContext context, org.polypheny.db.schema.graph.Graph graph ) { final AlgOptCluster cluster = context.getCluster(); return new NeoLpgScan( cluster, cluster.traitSetOf( NeoConvention.INSTANCE ).replace( ModelTrait.GRAPH ), this ); } @@ -166,7 +162,7 @@ public static class NeoQueryable extends AbstractQueryable { private final DataContext dataContext; - public NeoQueryable( DataContext dataContext, Graph graph ) { + public NeoQueryable( DataContext dataContext, org.polypheny.db.schema.graph.Graph graph ) { this.dataContext = dataContext; this.graph = (NeoGraph) graph; } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoNamespace.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java similarity index 84% rename from plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoNamespace.java rename to plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java index 505effa9c1..ea797700a9 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoNamespace.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java @@ -30,17 +30,17 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyTypeFactoryImpl; -public class NeoNamespace extends AbstractSchema { +public class NeoSchema extends AbstractNamespace implements Schema { public final Driver graph; public final Neo4jStore store; public final String physicalName; - public final long id; public final Expression rootSchemaRetrieval; public final Session session; public final TransactionProvider transactionProvider; @@ -50,14 +50,14 @@ public class NeoNamespace extends AbstractSchema { * Namespace object for the Neo4j database. * * @param db driver reference for the Neo4j database - * @param namespaceId id of the namespace + * @param id id of the namespace */ - public NeoNamespace( Driver db, Expression expression, TransactionProvider transactionProvider, Neo4jStore neo4jStore, long namespaceId ) { + public NeoSchema( Driver db, Expression expression, TransactionProvider transactionProvider, Neo4jStore neo4jStore, long id ) { + super( id ); this.graph = db; this.store = neo4jStore; - this.id = namespaceId; this.rootSchemaRetrieval = expression; - this.physicalName = Neo4jPlugin.getPhysicalNamespaceName( id ); + this.physicalName = Neo4jPlugin.getPhysicalNamespaceName( getId() ); this.session = graph.session(); this.transactionProvider = transactionProvider; } @@ -71,7 +71,7 @@ public NeoNamespace( Driver db, Expression expression, TransactionProvider trans * @param partitionPlacement reference to the partition * @return the created table */ - public Table createTable( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTable( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java index d82853e003..4de637c958 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java @@ -51,7 +51,7 @@ public NeoScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, @Override public void implement( NeoRelationalImplementor implementor ) { - if ( implementor.getTable() != null && !Objects.equals( table.getTable().getTableId(), implementor.getTable().getTable().getTableId() ) ) { + if ( implementor.getTable() != null && !Objects.equals( table.getTable().getId(), implementor.getTable().getTable().getId() ) ) { handleInsertFromOther( implementor ); return; } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigTable.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigEntity.java similarity index 93% rename from plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigTable.java rename to plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigEntity.java index 5f867bf04d..c4ab3255a0 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigTable.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigEntity.java @@ -42,8 +42,8 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.TranslatableTable; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.TranslatableEntity; +import org.polypheny.db.schema.impl.AbstractEntity; /** @@ -53,7 +53,7 @@ * * Only VARCHAR (CHARARRAY in Pig) type supported at this point. */ -public class PigTable extends AbstractTable implements TranslatableTable { +public class PigEntity extends AbstractEntity implements TranslatableEntity { private final String filePath; private final String[] fieldNames; @@ -62,7 +62,7 @@ public class PigTable extends AbstractTable implements TranslatableTable { /** * Creates a PigTable. */ - public PigTable( String filePath, String[] fieldNames ) { + public PigEntity( String filePath, String[] fieldNames ) { this.filePath = filePath; this.fieldNames = fieldNames; } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java index 45de8ab15c..7f2a10ace6 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java @@ -65,7 +65,7 @@ public PigScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table @Override public void implement( Implementor implementor ) { - final PigTable pigTable = getPigTable( implementor.getTableName( this ) ); + final PigEntity pigTable = getPigTable( implementor.getTableName( this ) ); final String alias = implementor.getPigRelationAlias( this ); final String schema = '(' + getSchemaForPigStatement( implementor ) + ')'; final String statement = alias + " = LOAD '" + pigTable.getFilePath() + "' USING PigStorage() AS " + schema + ';'; @@ -73,9 +73,9 @@ public void implement( Implementor implementor ) { } - private PigTable getPigTable( String name ) { + private PigEntity getPigTable( String name ) { final PolyphenyDbSchema schema = getTable().unwrap( PolyphenyDbSchema.class ); - return (PigTable) schema.getTable( name ).getTable(); + return (PigEntity) schema.getTable( name ).getTable(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigSchema.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigSchema.java index 6c94a4ea56..953178c0fe 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigSchema.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigSchema.java @@ -36,25 +36,31 @@ import java.util.HashMap; import java.util.Map; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; /** * Schema that contains one more or more Pig tables. */ -public class PigSchema extends AbstractSchema { +public class PigSchema extends AbstractNamespace implements Schema { - protected final Map tableMap = new HashMap<>(); + protected final Map tableMap = new HashMap<>(); + + + public PigSchema( long id ) { + super( id ); + } @Override - protected Map getTableMap() { + protected Map getTableMap() { return tableMap; } - void registerTable( String name, PigTable table ) { + void registerTable( String name, PigEntity table ) { tableMap.put( name, table ); } diff --git a/plugins/pig-adapter/src/test/java/org/polypheny/db/test/PigAlgBuilderStyleTest.java b/plugins/pig-adapter/src/test/java/org/polypheny/db/test/PigAlgBuilderStyleTest.java index 6cc58dd70a..52c2b003d7 100644 --- a/plugins/pig-adapter/src/test/java/org/polypheny/db/test/PigAlgBuilderStyleTest.java +++ b/plugins/pig-adapter/src/test/java/org/polypheny/db/test/PigAlgBuilderStyleTest.java @@ -51,9 +51,9 @@ import org.polypheny.db.adapter.pig.PigAggregate; import org.polypheny.db.adapter.pig.PigAlg; import org.polypheny.db.adapter.pig.PigAlgFactories; +import org.polypheny.db.adapter.pig.PigEntity; import org.polypheny.db.adapter.pig.PigFilter; import org.polypheny.db.adapter.pig.PigRules; -import org.polypheny.db.adapter.pig.PigTable; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.operators.OperatorName; @@ -63,7 +63,7 @@ import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.AlgBuilderFactory; @@ -231,8 +231,8 @@ public void testImplWithJoinAndGroupBy() throws Exception { private SchemaPlus createTestSchema() { SchemaPlus result = Frameworks.createRootSchema( false ); - result.add( "t", new PigTable( "build/test-classes/data.txt", new String[]{ "tc0", "tc1" } ) ); - result.add( "s", new PigTable( "build/test-classes/data2.txt", new String[]{ "sc0", "sc1" } ) ); + result.add( "t", new PigEntity( "build/test-classes/data.txt", new String[]{ "tc0", "tc1" } ) ); + result.add( "s", new PigEntity( "build/test-classes/data2.txt", new String[]{ "sc0", "sc1" } ) ); return result; } @@ -278,7 +278,7 @@ private void assertScriptAndResults( String relAliasForStore, String script, Str } - private String getPigScript( AlgNode root, Schema schema ) { + private String getPigScript( AlgNode root, Namespace namespace ) { PigAlg.Implementor impl = new PigAlg.Implementor(); impl.visitChild( 0, root ); return impl.getScript(); diff --git a/plugins/pig-language/src/test/java/org/polypheny/db/test/PigRelBuilderTest.java b/plugins/pig-language/src/test/java/org/polypheny/db/test/PigRelBuilderTest.java index d0cd53513b..ab1bb6cbf6 100644 --- a/plugins/pig-language/src/test/java/org/polypheny/db/test/PigRelBuilderTest.java +++ b/plugins/pig-language/src/test/java/org/polypheny/db/test/PigRelBuilderTest.java @@ -55,7 +55,7 @@ public static Frameworks.ConfigBuilder config() { final SchemaPlus rootSchema = transaction.getSchema().plus(); Frameworks.ConfigBuilder configBuilder = Frameworks.newConfigBuilder() .parserConfig( ParserConfig.DEFAULT ) - .defaultSchema( rootSchema.getSubSchema( transaction.getDefaultSchema().name ) ) + .defaultSchema( rootSchema.getSubNamespace( transaction.getDefaultSchema().name ) ) .traitDefs( (List) null ) .programs( Programs.heuristicJoinOrder( Programs.RULE_SET, true, 2 ) ) .prepareContext( new ContextImpl( diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index f18597a0b7..fea4951046 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -32,8 +32,8 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.PostgresqlSqlDialect; @@ -85,13 +85,13 @@ public static void register() { @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentJdbcSchema; } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java index 7745f1a6e5..1abf0cbfc4 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java @@ -48,8 +48,8 @@ import org.polypheny.db.docker.DockerManager.ContainerBuilder; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.PostgresqlSqlDialect; import org.polypheny.db.transaction.PUID; import org.polypheny.db.transaction.PUID.Type; @@ -195,13 +195,13 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @Override - public Schema getCurrentSchema() { + public Namespace getCurrentSchema() { return currentJdbcSchema; } diff --git a/plugins/sql-language/src/main/codegen/config.fmpp b/plugins/sql-language/src/main/codegen/config.fmpp index 4e4b549a77..76a663fced 100644 --- a/plugins/sql-language/src/main/codegen/config.fmpp +++ b/plugins/sql-language/src/main/codegen/config.fmpp @@ -22,7 +22,7 @@ data: { # List of additional classes and packages to import. # Example. "org.polypheny.db.sql.*", "java.util.List". imports: [ - "org.polypheny.db.schema.ColumnStrategy" + "org.polypheny.db.namespace.ColumnStrategy" "org.polypheny.db.sql.language.SqlAlter" "org.polypheny.db.sql.language.SqlBasicCall" "org.polypheny.db.sql.language.SqlBinaryOperator" diff --git a/plugins/sql-language/src/main/codegen/includes/parserImpls.ftl b/plugins/sql-language/src/main/codegen/includes/parserImpls.ftl index 0292aa3823..3e41c77ed6 100644 --- a/plugins/sql-language/src/main/codegen/includes/parserImpls.ftl +++ b/plugins/sql-language/src/main/codegen/includes/parserImpls.ftl @@ -24,13 +24,13 @@ return SqlShowTables(...) SqlTruncate SqlTruncateTable() : { final Span s; - final SqlIdentifier table; + final SqlIdentifier entity; } { { s = span(); } - table = CompoundIdentifier() +
    entity = CompoundIdentifier() { - return new SqlTruncate(s.end(this), table); + return new SqlTruncate(s.end(this), entity); } } @@ -40,24 +40,24 @@ SqlTruncate SqlTruncateTable() : */ SqlAlterSchema SqlAlterSchema(Span s) : { - final SqlIdentifier schema; + final SqlIdentifier namespace; final SqlIdentifier name; final SqlIdentifier owner; } { - schema = CompoundIdentifier() + namespace = CompoundIdentifier() ( name = CompoundIdentifier() { - return new SqlAlterSchemaRename(s.end(this), schema, name); + return new SqlAlterSchemaRename(s.end(this), namespace, name); } | owner = SimpleIdentifier() { - return new SqlAlterSchemaOwner(s.end(this), schema, owner); + return new SqlAlterSchemaOwner(s.end(this), namespace, owner); } ) } @@ -175,7 +175,7 @@ SqlAlterMaterializedView SqlAlterMaterializedView(Span s) : */ SqlAlterTable SqlAlterTable(Span s) : { - final SqlIdentifier table; + final SqlIdentifier entity; final SqlIdentifier column; final SqlIdentifier name; final SqlIdentifier owner; @@ -215,18 +215,18 @@ SqlAlterTable SqlAlterTable(Span s) : } {
    - table = CompoundIdentifier() + entity = CompoundIdentifier() ( name = SimpleIdentifier() { - return new SqlAlterTableRename(s.end(this), table, name); + return new SqlAlterTableRename(s.end(this), entity, name); } | owner = SimpleIdentifier() { - return new SqlAlterTableOwner(s.end(this), table, owner); + return new SqlAlterTableOwner(s.end(this), entity, owner); } | @@ -234,7 +234,7 @@ SqlAlterTable SqlAlterTable(Span s) : name = SimpleIdentifier() { - return new SqlAlterTableRenameColumn(s.end(this), table, column, name); + return new SqlAlterTableRenameColumn(s.end(this), entity, column, name); } | @@ -264,7 +264,7 @@ SqlAlterTable SqlAlterTable(Span s) : { afterColumn = null; beforeColumn = null; } ) { - return new SqlAlterTableAddColumn(s.end(this), table, name, type, nullable, defaultValue, beforeColumn, afterColumn); + return new SqlAlterTableAddColumn(s.end(this), entity, name, type, nullable, defaultValue, beforeColumn, afterColumn); } | @@ -285,14 +285,14 @@ SqlAlterTable SqlAlterTable(Span s) : { afterColumn = null; beforeColumn = null; } ) { - return new SqlAlterSourceTableAddColumn(s.end(this), table, name, physicalName, defaultValue, beforeColumn, afterColumn); + return new SqlAlterSourceTableAddColumn(s.end(this), entity, name, physicalName, defaultValue, beforeColumn, afterColumn); } ) | column = SimpleIdentifier() { - return new SqlAlterTableDropColumn(s.end(this), table, column); + return new SqlAlterTableDropColumn(s.end(this), entity, column); } | @@ -305,12 +305,12 @@ SqlAlterTable SqlAlterTable(Span s) : } ) { - return new SqlAlterTableAddPrimaryKey(s.end(this), table, columnList); + return new SqlAlterTableAddPrimaryKey(s.end(this), entity, columnList); } | { - return new SqlAlterTableDropPrimaryKey(s.end(this), table); + return new SqlAlterTableDropPrimaryKey(s.end(this), entity); } | @@ -326,7 +326,7 @@ SqlAlterTable SqlAlterTable(Span s) : } ) { - return new SqlAlterTableAddUniqueConstraint(s.end(this), table, constraintName, columnList); + return new SqlAlterTableAddUniqueConstraint(s.end(this), entity, constraintName, columnList); } | @@ -374,14 +374,14 @@ SqlAlterTable SqlAlterTable(Span s) : { onDelete = null; } ) { - return new SqlAlterTableAddForeignKey(s.end(this), table, constraintName, columnList, refTable, referencesList, onUpdate, onDelete); + return new SqlAlterTableAddForeignKey(s.end(this), entity, constraintName, columnList, refTable, referencesList, onUpdate, onDelete); } ) | constraintName = SimpleIdentifier() { - return new SqlAlterTableDropConstraint(s.end(this), table, constraintName); + return new SqlAlterTableDropConstraint(s.end(this), entity, constraintName); } | @@ -389,7 +389,7 @@ SqlAlterTable SqlAlterTable(Span s) : constraintName = SimpleIdentifier() { - return new SqlAlterTableDropForeignKey(s.end(this), table, constraintName); + return new SqlAlterTableDropForeignKey(s.end(this), entity, constraintName); } | @@ -428,7 +428,7 @@ SqlAlterTable SqlAlterTable(Span s) : ] { - return new SqlAlterTableAddPlacement(s.end(this), table, columnList, store, partitionList, partitionNamesList); + return new SqlAlterTableAddPlacement(s.end(this), entity, columnList, store, partitionList, partitionNamesList); } | @@ -437,7 +437,7 @@ SqlAlterTable SqlAlterTable(Span s) : store = SimpleIdentifier() { - return new SqlAlterTableDropPlacement(s.end(this), table, store); + return new SqlAlterTableDropPlacement(s.end(this), entity, store); } | @@ -450,7 +450,7 @@ SqlAlterTable SqlAlterTable(Span s) : store = SimpleIdentifier() { - return new SqlAlterTableModifyPlacementAddColumn(s.end(this), table, column, store); + return new SqlAlterTableModifyPlacementAddColumn(s.end(this), entity, column, store); } | @@ -460,7 +460,7 @@ SqlAlterTable SqlAlterTable(Span s) : store = SimpleIdentifier() { - return new SqlAlterTableModifyPlacementDropColumn(s.end(this), table, column, store); + return new SqlAlterTableModifyPlacementDropColumn(s.end(this), entity, column, store); } | columnList = ParenthesizedSimpleIdentifierList() @@ -485,7 +485,7 @@ SqlAlterTable SqlAlterTable(Span s) : ] { - return new SqlAlterTableModifyPlacement(s.end(this), table, columnList, store, partitionList, partitionNamesList); + return new SqlAlterTableModifyPlacement(s.end(this), entity, columnList, store, partitionList, partitionNamesList); } ) @@ -509,7 +509,7 @@ SqlAlterTable SqlAlterTable(Span s) : store = SimpleIdentifier() { - return new SqlAlterTableModifyPartitions(s.end(this), table, store, partitionList, partitionNamesList); + return new SqlAlterTableModifyPartitions(s.end(this), entity, store, partitionList, partitionNamesList); } | @@ -541,18 +541,18 @@ SqlAlterTable SqlAlterTable(Span s) : { storeName = null; } ) { - return new SqlAlterTableAddIndex(s.end(this), table, columnList, unique, indexMethod, indexName, storeName); + return new SqlAlterTableAddIndex(s.end(this), entity, columnList, unique, indexMethod, indexName, storeName); } | indexName = SimpleIdentifier() { - return new SqlAlterTableDropIndex(s.end(this), table, indexName); + return new SqlAlterTableDropIndex(s.end(this), entity, indexName); } | column = SimpleIdentifier() - statement = AlterTableModifyColumn(s, table, column) + statement = AlterTableModifyColumn(s, entity, column) { return statement; } @@ -608,7 +608,7 @@ SqlAlterTable SqlAlterTable(Span s) : rawPartitionInfo.setPartitionNamesList( CoreUtil.toNodeList( partitionNamesList, Identifier.class ) ); rawPartitionInfo.setPartitionQualifierList( SqlUtil.toNodeListList( partitionQualifierList ) ); - return new SqlAlterTableAddPartitions(s.end(this), table, partitionColumn, partitionType, numPartitionGroups, numPartitions, partitionNamesList, partitionQualifierList, rawPartitionInfo); + return new SqlAlterTableAddPartitions(s.end(this), entity, partitionColumn, partitionType, numPartitionGroups, numPartitions, partitionNamesList, partitionQualifierList, rawPartitionInfo); } ) @@ -647,13 +647,13 @@ SqlAlterTable SqlAlterTable(Span s) : ] { rawPartitionInfo = new RawPartitionInformation(); - return new SqlAlterTableAddPartitions(s.end(this), table, partitionColumn, partitionType, numPartitionGroups, numPartitions, partitionNamesList, partitionQualifierList, rawPartitionInfo); + return new SqlAlterTableAddPartitions(s.end(this), entity, partitionColumn, partitionType, numPartitionGroups, numPartitions, partitionNamesList, partitionQualifierList, rawPartitionInfo); } | { - return new SqlAlterTableMergePartitions(s.end(this), table); + return new SqlAlterTableMergePartitions(s.end(this), entity); } ) } @@ -661,7 +661,7 @@ SqlAlterTable SqlAlterTable(Span s) : /** * Parses the MODIFY COLUMN part of an ALTER TABLE statement. */ -SqlAlterTableModifyColumn AlterTableModifyColumn(Span s, SqlIdentifier table, SqlIdentifier column) : +SqlAlterTableModifyColumn AlterTableModifyColumn(Span s, SqlIdentifier entity, SqlIdentifier column) : { SqlDataTypeSpec type = null; Boolean nullable = null; @@ -703,7 +703,7 @@ SqlAlterTableModifyColumn AlterTableModifyColumn(Span s, SqlIdentifier table, Sq { dropDefault = true; } ) { - return new SqlAlterTableModifyColumn(s.end(this), table, column, type, nullable, beforeColumn, afterColumn, collation, defaultValue, dropDefault); + return new SqlAlterTableModifyColumn(s.end(this), entity, column, type, nullable, beforeColumn, afterColumn, collation, defaultValue, dropDefault); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index 1d30737759..8ee61d7fac 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -33,8 +33,8 @@ import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.prepare.Prepare.PreparingEntity; -import org.polypheny.db.schema.CustomColumnResolvingTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.CustomColumnResolvingEntity; +import org.polypheny.db.schema.Entity; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlNode; @@ -102,9 +102,9 @@ void resolveInNamespace( SqlValidatorNamespace ns, boolean nullable, List>> entries = ((CustomColumnResolvingTable) t).resolveColumn( rowType, validator.getTypeFactory(), names ); + Entity t = ((PreparingEntity) validatorTable).unwrap( Entity.class ); + if ( t instanceof CustomColumnResolvingEntity ) { + final List>> entries = ((CustomColumnResolvingEntity) t).resolveColumn( rowType, validator.getTypeFactory(), names ); for ( Pair> entry : entries ) { final AlgDataTypeField field = entry.getKey(); final List remainder = entry.getValue(); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index e469f6c5af..f347fcdd94 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -32,8 +32,8 @@ import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.Prepare.PreparingEntity; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.Table; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlDataTypeSpec; @@ -133,7 +133,7 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L remainingNames = Util.skip( remainingNames ); continue; } - final PolyphenyDbSchema subSchema = schema.getSubSchema( schemaName, nameMatcher.isCaseSensitive() ); + final PolyphenyDbSchema subSchema = schema.getSubNamespace( schemaName, nameMatcher.isCaseSensitive() ); if ( subSchema != null ) { path = path.plus( null, -1, subSchema.getName(), StructKind.NONE ); remainingNames = Util.skip( remainingNames ); @@ -148,15 +148,15 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L if ( entry != null ) { path = path.plus( null, -1, entry.name, StructKind.NONE ); remainingNames = Util.skip( remainingNames ); - final Table table = entry.getTable(); - final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); + final Entity entity = entry.getTable(); + final CatalogTable catalogTable = Catalog.getInstance().getTable( entity.getId() ); ValidatorTable table2 = null; - if ( table instanceof Wrapper ) { - table2 = ((Wrapper) table).unwrap( PreparingEntity.class ); + if ( entity instanceof Wrapper ) { + table2 = ((Wrapper) entity).unwrap( PreparingEntity.class ); } if ( table2 == null ) { final AlgOptSchema algOptSchema = validator.catalogReader.unwrap( AlgOptSchema.class ); - final AlgDataType rowType = table.getRowType( validator.typeFactory ); + final AlgDataType rowType = entity.getRowType( validator.typeFactory ); table2 = AlgOptEntityImpl.create( algOptSchema, rowType, entry, catalogTable, null ); } namespace = new TableNamespace( validator, table2 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlUserDefinedTableMacro.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlUserDefinedTableMacro.java index 4d34ed940a..ff5c326eb1 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlUserDefinedTableMacro.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlUserDefinedTableMacro.java @@ -36,7 +36,7 @@ import org.polypheny.db.schema.Function; import org.polypheny.db.schema.FunctionParameter; import org.polypheny.db.schema.TableMacro; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlFunction; import org.polypheny.db.sql.language.SqlIdentifier; @@ -86,7 +86,7 @@ public List getParamNames() { /** * Returns the table in this UDF, or null if there is no table. */ - public TranslatableTable getTable( AlgDataTypeFactory typeFactory, List operandList ) { + public TranslatableEntity getTable( AlgDataTypeFactory typeFactory, List operandList ) { List arguments = convertArguments( typeFactory, operandList, tableMacro, getNameAsId(), true ); return tableMacro.apply( arguments ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index dfe85a0e98..3f94a345c1 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -101,8 +101,8 @@ import org.polypheny.db.runtime.Resources; import org.polypheny.db.runtime.Resources.ExInst; import org.polypheny.db.schema.ColumnStrategy; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.Table; import org.polypheny.db.schema.document.DocumentUtil; import org.polypheny.db.sql.language.SqlAggFunction; import org.polypheny.db.sql.language.SqlBasicCall; @@ -3001,13 +3001,13 @@ private void checkRollUpInUsing( SqlIdentifier identifier, SqlNode leftOrRight ) // if it's not a SqlIdentifier then that's fine, it'll be validated somewhere else. if ( leftOrRight instanceof SqlIdentifier ) { SqlIdentifier from = (SqlIdentifier) leftOrRight; - Table table = findTable( + Entity entity = findTable( catalogReader.getRootSchema(), Util.last( from.names ), catalogReader.nameMatcher.isCaseSensitive() ); String name = Util.last( identifier.names ); - if ( table != null && table.isRolledUp( name ) ) { + if ( entity != null && entity.isRolledUp( name ) ) { throw newValidationError( identifier, RESOURCE.rolledUpNotAllowed( name, "USING" ) ); } } @@ -3368,9 +3368,9 @@ private boolean isRolledUpColumnAllowedInAgg( SqlIdentifier identifier, SqlValid String tableAlias = pair.left; String columnName = pair.right; - Table table = findTable( tableAlias ); - if ( table != null ) { - return table.rolledUpColumnValidInsideAgg( columnName, aggCall, parent ); + Entity entity = findTable( tableAlias ); + if ( entity != null ) { + return entity.rolledUpColumnValidInsideAgg( columnName, aggCall, parent ); } return true; } @@ -3387,15 +3387,15 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc String tableAlias = pair.left; String columnName = pair.right; - Table table = findTable( tableAlias ); - if ( table != null ) { - return table.isRolledUp( columnName ); + Entity entity = findTable( tableAlias ); + if ( entity != null ) { + return entity.isRolledUp( columnName ); } return false; } - private Table findTable( PolyphenyDbSchema schema, String tableName, boolean caseSensitive ) { + private Entity findTable( PolyphenyDbSchema schema, String tableName, boolean caseSensitive ) { PolyphenyDbSchema.TableEntry entry = schema.getTable( tableName ); if ( entry != null ) { return entry.getTable(); @@ -3403,9 +3403,9 @@ private Table findTable( PolyphenyDbSchema schema, String tableName, boolean cas // Check sub schemas for ( PolyphenyDbSchema subSchema : schema.getSubSchemaMap().values() ) { - Table table = findTable( subSchema, tableName, caseSensitive ); - if ( table != null ) { - return table; + Entity entity = findTable( subSchema, tableName, caseSensitive ); + if ( entity != null ) { + return entity; } } @@ -3414,9 +3414,9 @@ private Table findTable( PolyphenyDbSchema schema, String tableName, boolean cas /** - * Given a table alias, find the corresponding {@link Table} associated with it + * Given a table alias, find the corresponding {@link Entity} associated with it */ - private Table findTable( String alias ) { + private Entity findTable( String alias ) { List names = null; if ( tableScope == null ) { // no tables to find @@ -3694,8 +3694,8 @@ public void validateSequenceValue( SqlValidatorScope scope, SqlIdentifier id ) { // We've found a table. But is it a sequence? final SqlValidatorNamespace ns = resolved.only().namespace; if ( ns instanceof TableNamespace ) { - final Table table = ns.getTable().unwrap( Table.class ); - switch ( table.getJdbcTableType() ) { + final Entity entity = ns.getTable().unwrap( Entity.class ); + switch ( entity.getJdbcTableType() ) { case SEQUENCE: case TEMPORARY_SEQUENCE: return; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index e944add688..5ba106fda8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -44,14 +44,14 @@ import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorTable; -import org.polypheny.db.plan.AlgOptSchemaWithSampling; import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptSchemaWithSampling; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; -import org.polypheny.db.schema.CustomColumnResolvingTable; -import org.polypheny.db.schema.ExtensibleTable; +import org.polypheny.db.schema.CustomColumnResolvingEntity; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.ExtensibleEntity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.Table; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlDataTypeSpec; import org.polypheny.db.sql.language.SqlIdentifier; @@ -133,7 +133,7 @@ private static AlgOptEntity getAlgOptTable( TableNamespace tableNamespace, Prepa */ public static List getExtendedColumns( AlgDataTypeFactory typeFactory, ValidatorTable table, SqlNodeList extendedColumns ) { final ImmutableList.Builder extendedFields = ImmutableList.builder(); - final ExtensibleTable extTable = table.unwrap( ExtensibleTable.class ); + final ExtensibleEntity extTable = table.unwrap( ExtensibleEntity.class ); int extendedFieldOffset = extTable == null ? table.getRowType().getFieldCount() @@ -281,9 +281,9 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF * @return the target field or null if the name cannot be resolved */ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, AlgOptEntity table, boolean isDocument ) { - final Table t = table == null ? null : table.unwrap( Table.class ); + final Entity t = table == null ? null : table.unwrap( Entity.class ); - if ( !(t instanceof CustomColumnResolvingTable) ) { + if ( !(t instanceof CustomColumnResolvingEntity) ) { final NameMatcher nameMatcher = catalogReader.nameMatcher; AlgDataTypeField typeField = nameMatcher.field( rowType, id.getSimple() ); @@ -294,7 +294,7 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF return typeField; } - final List>> entries = ((CustomColumnResolvingTable) t).resolveColumn( rowType, typeFactory, id.names ); + final List>> entries = ((CustomColumnResolvingEntity) t).resolveColumn( rowType, typeFactory, id.names ); switch ( entries.size() ) { case 1: if ( !entries.get( 0 ).getValue().isEmpty() ) { @@ -649,7 +649,7 @@ public static boolean isTableNonRelational( SqlValidatorImpl validator ) { return false; } SqlIdentifier id = ((SqlIdentifier) validator.getTableScope().getNode()); - PolyphenyDbSchema schema = validator.getCatalogReader().getRootSchema().getSubSchema( id.names.get( 0 ), false ); + PolyphenyDbSchema schema = validator.getCatalogReader().getRootSchema().getSubNamespace( id.names.get( 0 ), false ); if ( schema == null ) { return false; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java index 201884de74..6d13d1f552 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java @@ -27,8 +27,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.schema.ExtensibleTable; -import org.polypheny.db.schema.Table; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.ExtensibleEntity; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.sql.language.SqlNodeList; @@ -105,8 +105,8 @@ public TableNamespace extend( SqlNodeList extendList ) { builder.addAll( this.extendedFields ); builder.addAll( SqlValidatorUtil.getExtendedColumns( validator.getTypeFactory(), getTable(), extendList ) ); final List extendedFields = builder.build(); - final Table schemaTable = table.unwrap( Table.class ); - if ( schemaTable != null && table instanceof AlgOptEntity && schemaTable instanceof ExtensibleTable ) { + final Entity schemaEntity = table.unwrap( Entity.class ); + if ( schemaEntity != null && table instanceof AlgOptEntity && schemaEntity instanceof ExtensibleEntity ) { checkExtendedColumnTypes( extendList ); final AlgOptEntity algOptEntity = ((AlgOptEntity) table).extend( extendedFields ); final ValidatorTable validatorTable = algOptEntity.unwrap( ValidatorTable.class ); @@ -120,8 +120,8 @@ public TableNamespace extend( SqlNodeList extendList ) { * Gets the data-type of all columns in a table (for a view table: including columns of the underlying table) */ private AlgDataType getBaseRowType() { - final Table schemaTable = table.unwrap( Table.class ); - return schemaTable.getRowType( validator.typeFactory ); + final Entity schemaEntity = table.unwrap( Entity.class ); + return schemaEntity.getRowType( validator.typeFactory ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index b846cbd369..65dddb231e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -104,8 +104,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalMinus; import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.algebra.logical.relational.LogicalUnion; @@ -140,9 +140,9 @@ import org.polypheny.db.nodes.Operator; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptSamplingParameters; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; +import org.polypheny.db.plan.AlgOptSamplingParameters; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -165,10 +165,9 @@ import org.polypheny.db.rex.RexUtil; import org.polypheny.db.rex.RexWindowBound; import org.polypheny.db.schema.ColumnStrategy; -import org.polypheny.db.schema.LogicalRelView; -import org.polypheny.db.schema.ModifiableTable; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.TranslatableTable; +import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.sql.language.SqlAggFunction; import org.polypheny.db.sql.language.SqlBasicCall; @@ -2161,8 +2160,8 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { final SqlCallBinding callBinding = new SqlCallBinding( bb.scope.getValidator(), bb.scope, call ); if ( operator instanceof SqlUserDefinedTableMacro ) { final SqlUserDefinedTableMacro udf = (SqlUserDefinedTableMacro) operator; - final TranslatableTable table = udf.getTable( typeFactory, callBinding.sqlOperands() ); - final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTableId() ); + final TranslatableEntity table = udf.getTable( typeFactory, callBinding.sqlOperands() ); + final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getId() ); final AlgDataType rowType = table.getRowType( typeFactory ); AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, udf.getNameAsId().names ); AlgNode converted = toAlg( algOptEntity ); @@ -2879,8 +2878,8 @@ protected AlgNode convertInsert( SqlInsert call ) { * Creates a relational expression to modify a table or modifiable view. */ private AlgNode createModify( AlgOptEntity targetTable, AlgNode source ) { - final ModifiableTable modifiableTable = targetTable.unwrap( ModifiableTable.class ); - if ( modifiableTable != null && modifiableTable == targetTable.unwrap( Table.class ) ) { + final ModifiableEntity modifiableTable = targetTable.unwrap( ModifiableEntity.class ); + if ( modifiableTable != null && modifiableTable == targetTable.unwrap( Entity.class ) ) { return modifiableTable.toModificationAlg( cluster, targetTable, @@ -3048,9 +3047,9 @@ private Blackboard createInsertBlackboard( AlgOptEntity targetTable, RexNode sou private InitializerExpressionFactory getInitializerFactory( ValidatorTable validatorTable ) { // We might unwrap a null instead of a InitializerExpressionFactory. - final Table table = unwrap( validatorTable, Table.class ); - if ( table != null ) { - InitializerExpressionFactory f = unwrap( table, InitializerExpressionFactory.class ); + final Entity entity = unwrap( validatorTable, Entity.class ); + if ( entity != null ) { + InitializerExpressionFactory f = unwrap( entity, InitializerExpressionFactory.class ); if ( f != null ) { return f; } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java index b4f8a76fb8..050c5d683e 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java @@ -103,7 +103,7 @@ public class AlgWriterTest extends SqlLanguageDependent { public void testWriter() { String s = Frameworks.withPlanner( ( cluster, algOptSchema, rootSchema ) -> { - rootSchema.add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + rootSchema.add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); LogicalRelScan scan = LogicalRelScan.create( cluster, @@ -138,7 +138,7 @@ public void testWriter() { public void testReader() { String s = Frameworks.withPlanner( ( cluster, algOptSchema, rootSchema ) -> { - rootSchema.add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + rootSchema.add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final AlgJsonReader reader = new AlgJsonReader( cluster, algOptSchema, rootSchema ); AlgNode node; try { diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java index f724be7c6e..5ad9102cc8 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java @@ -27,7 +27,6 @@ import java.lang.reflect.Type; import java.math.BigDecimal; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Queryable; @@ -53,6 +52,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgDataTypeSystemImpl; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.languages.OperatorRegistry; @@ -75,18 +75,18 @@ import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.HrSchema; -import org.polypheny.db.schema.ModifiableTable; +import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.Path; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.ProjectableFilterableTable; +import org.polypheny.db.schema.ProjectableFilterableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.impl.AbstractEntity; +import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.sql.language.dialect.AnsiSqlDialect; import org.polypheny.db.sql.util.PlannerImplMock; @@ -112,7 +112,7 @@ public void testOptimize() { AlgNode x = Frameworks.withPlanner( ( cluster, algOptSchema, rootSchema ) -> { final AlgDataTypeFactory typeFactory = cluster.getTypeFactory(); - final Table table = new AbstractTable() { + final Entity entity = new AbstractEntity() { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { final AlgDataType stringType = typeFactory.createJavaType( String.class ); @@ -125,7 +125,11 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { }; // "SELECT * FROM myTable" - final AlgOptAbstractEntity algOptTable = new AlgOptAbstractEntity( algOptSchema, "myTable", table.getRowType( typeFactory ) ) { + final AlgOptAbstractEntity algOptTable = new AlgOptAbstractEntity( algOptSchema, "myTable", entity.getRowType( typeFactory ) ) { + @Override + public CatalogEntity getCatalogEntity() { + return null; + } }; final EnumerableScan tableRel = EnumerableScan.create( cluster, algOptTable ); @@ -161,7 +165,7 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Test public void testCreateRootSchemaWithNoMetadataSchema() { SchemaPlus rootSchema = Frameworks.createRootSchema( false ); - assertThat( rootSchema.getSubSchemaNames().size(), equalTo( 0 ) ); + assertThat( rootSchema.getSubNamespaceNames().size(), equalTo( 0 ) ); } @@ -243,7 +247,7 @@ public Void apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, SchemaPlus public void testFrameworksValidatorWithIdentifierExpansion() throws Exception { final SchemaPlus schema = Frameworks .createRootSchema( true ) - .add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema( schema ) @@ -278,7 +282,7 @@ public JavaTypeFactory getTypeFactory() { public void testSchemaPath() { final SchemaPlus schema = Frameworks .createRootSchema( true ) - .add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema( schema ) @@ -313,10 +317,10 @@ public void testSchemaPath() { @Test @Ignore // test is no longer needed? as the streamer prevents this error and uses different end implementation public void testUpdate() throws Exception { - Table table = new TableImpl(); + Entity entity = new EntityImpl(); final SchemaPlus rootSchema = Frameworks.createRootSchema( true ); - SchemaPlus schema = rootSchema.add( "x", new AbstractSchema(), NamespaceType.RELATIONAL ); - schema.add( "MYTABLE", table ); + SchemaPlus schema = rootSchema.add( "x", new AbstractNamespace( -1 ), NamespaceType.RELATIONAL ); + schema.add( "MYTABLE", entity ); List traitDefs = new ArrayList<>(); traitDefs.add( ConventionTraitDef.INSTANCE ); traitDefs.add( AlgDistributionTraitDef.INSTANCE ); @@ -379,9 +383,9 @@ private void executeQuery( FrameworkConfig config, @SuppressWarnings("SameParame /** * Modifiable, filterable table. */ - private static class TableImpl extends AbstractTable implements ModifiableTable, ProjectableFilterableTable { + private static class EntityImpl extends AbstractEntity implements ModifiableEntity, ProjectableFilterableEntity { - TableImpl() { + EntityImpl() { } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java index 6d0f3f7e86..4d92d32364 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java @@ -43,11 +43,11 @@ import org.polypheny.db.prepare.JavaTypeFactoryImpl; import org.polypheny.db.schema.HrSchema; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.ScannableTable; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.sql.util.PlannerImplMock; -import org.polypheny.db.test.ScannableTableTest.BeatlesTable; -import org.polypheny.db.test.ScannableTableTest.SimpleTable; +import org.polypheny.db.test.ScannableEntityTest.BeatlesEntity; +import org.polypheny.db.test.ScannableEntityTest.SimpleEntity; import org.polypheny.db.tools.FrameworkConfig; import org.polypheny.db.tools.Frameworks; import org.polypheny.db.tools.Planner; @@ -153,7 +153,7 @@ public void setParameterTypes( Map types ) { @Before public void setUp() { - rootSchema = Frameworks.createRootSchema( true ).add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + rootSchema = Frameworks.createRootSchema( true ).add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( ParserConfig.DEFAULT ) @@ -254,11 +254,11 @@ public void testInterpretTable() throws Exception { /** - * Tests executing a plan on a {@link ScannableTable} using an interpreter. + * Tests executing a plan on a {@link ScannableEntity} using an interpreter. */ @Test public void testInterpretScannableTable() throws Exception { - rootSchema.add( "beatles", new BeatlesTable() ); + rootSchema.add( "beatles", new BeatlesEntity() ); Node parse = planner.parse( "select * from \"beatles\" order by \"i\"" ); Node validate = planner.validate( parse ); @@ -271,7 +271,7 @@ public void testInterpretScannableTable() throws Exception { @Test public void testAggregateCount() throws Exception { - rootSchema.add( "beatles", new BeatlesTable() ); + rootSchema.add( "beatles", new BeatlesEntity() ); Node parse = planner.parse( "select count(*) from \"beatles\"" ); Node validate = planner.validate( parse ); @@ -284,7 +284,7 @@ public void testAggregateCount() throws Exception { @Test public void testAggregateMax() throws Exception { - rootSchema.add( "beatles", new BeatlesTable() ); + rootSchema.add( "beatles", new BeatlesEntity() ); Node parse = planner.parse( "select max(\"i\") from \"beatles\"" ); Node validate = planner.validate( parse ); @@ -297,7 +297,7 @@ public void testAggregateMax() throws Exception { @Test public void testAggregateMin() throws Exception { - rootSchema.add( "beatles", new BeatlesTable() ); + rootSchema.add( "beatles", new BeatlesEntity() ); Node parse = planner.parse( "select min(\"i\") from \"beatles\"" ); Node validate = planner.validate( parse ); @@ -310,7 +310,7 @@ public void testAggregateMin() throws Exception { @Test public void testAggregateGroup() throws Exception { - rootSchema.add( "beatles", new BeatlesTable() ); + rootSchema.add( "beatles", new BeatlesEntity() ); Node parse = planner.parse( "select \"j\", count(*) from \"beatles\" group by \"j\"" ); Node validate = planner.validate( parse ); @@ -323,7 +323,7 @@ public void testAggregateGroup() throws Exception { @Test public void testAggregateGroupFilter() throws Exception { - rootSchema.add( "beatles", new BeatlesTable() ); + rootSchema.add( "beatles", new BeatlesEntity() ); final String sql = "select \"j\",\n" + " count(*) filter (where char_length(\"j\") > 4)\n" + "from \"beatles\" group by \"j\""; Node parse = planner.parse( sql ); Node validate = planner.validate( parse ); @@ -335,11 +335,11 @@ public void testAggregateGroupFilter() throws Exception { /** - * Tests executing a plan on a single-column {@link ScannableTable} using an interpreter. + * Tests executing a plan on a single-column {@link ScannableEntity} using an interpreter. */ @Test public void testInterpretSimpleScannableTable() throws Exception { - rootSchema.add( "simple", new SimpleTable() ); + rootSchema.add( "simple", new SimpleEntity() ); Node parse = planner.parse( "select * from \"simple\" limit 2" ); Node validate = planner.validate( parse ); @@ -355,7 +355,7 @@ public void testInterpretSimpleScannableTable() throws Exception { */ @Test public void testInterpretUnionAll() throws Exception { - rootSchema.add( "simple", new SimpleTable() ); + rootSchema.add( "simple", new SimpleEntity() ); Node parse = planner.parse( "select * from \"simple\"\n" + "union all\n" + "select * from \"simple\"\n" ); Node validate = planner.validate( parse ); @@ -371,7 +371,7 @@ public void testInterpretUnionAll() throws Exception { */ @Test public void testInterpretUnion() throws Exception { - rootSchema.add( "simple", new SimpleTable() ); + rootSchema.add( "simple", new SimpleEntity() ); Node parse = planner.parse( "select * from \"simple\"\n" + "union\n" + "select * from \"simple\"\n" ); Node validate = planner.validate( parse ); diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java index 949210f7a6..8ed09f65af 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java @@ -34,8 +34,19 @@ package org.polypheny.db.sql; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.fail; +import static org.polypheny.db.plan.AlgOptRule.operand; + import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; import org.hamcrest.Matcher; import org.junit.Ignore; import org.junit.Test; @@ -47,14 +58,24 @@ import org.polypheny.db.algebra.AlgCollationTraitDef; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.algebra.constant.*; +import org.polypheny.db.algebra.constant.ExplainFormat; +import org.polypheny.db.algebra.constant.ExplainLevel; +import org.polypheny.db.algebra.constant.FunctionCategory; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.constant.Lex; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.operators.ChainedOperatorTable; import org.polypheny.db.algebra.operators.OperatorTable; -import org.polypheny.db.algebra.rules.*; +import org.polypheny.db.algebra.rules.FilterMergeRule; +import org.polypheny.db.algebra.rules.LoptOptimizeJoinRule; +import org.polypheny.db.algebra.rules.ProjectMergeRule; +import org.polypheny.db.algebra.rules.ProjectToWindowRules; +import org.polypheny.db.algebra.rules.SortJoinTransposeRule; +import org.polypheny.db.algebra.rules.SortProjectTransposeRule; +import org.polypheny.db.algebra.rules.SortRemoveRule; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; @@ -65,10 +86,20 @@ import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.nodes.validate.ValidatorScope; -import org.polypheny.db.plan.*; +import org.polypheny.db.plan.AlgOptPredicateList; +import org.polypheny.db.plan.AlgOptRule; +import org.polypheny.db.plan.AlgOptRuleCall; +import org.polypheny.db.plan.AlgOptUtil; +import org.polypheny.db.plan.AlgTraitDef; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.plan.ConventionTraitDef; import org.polypheny.db.prepare.ContextImpl; import org.polypheny.db.prepare.JavaTypeFactoryImpl; -import org.polypheny.db.schema.*; +import org.polypheny.db.schema.FoodmartSchema; +import org.polypheny.db.schema.HrSchema; +import org.polypheny.db.schema.PolyphenyDbSchema; +import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.TpchSchema; import org.polypheny.db.sql.language.SqlAggFunction; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlNode; @@ -76,21 +107,22 @@ import org.polypheny.db.sql.language.util.ListSqlOperatorTable; import org.polypheny.db.sql.util.PlannerImplMock; import org.polypheny.db.test.PolyphenyDbAssert; -import org.polypheny.db.tools.*; +import org.polypheny.db.tools.AlgBuilder; +import org.polypheny.db.tools.AlgConversionException; +import org.polypheny.db.tools.FrameworkConfig; +import org.polypheny.db.tools.Frameworks; +import org.polypheny.db.tools.Planner; +import org.polypheny.db.tools.Program; +import org.polypheny.db.tools.Programs; +import org.polypheny.db.tools.RuleSet; +import org.polypheny.db.tools.RuleSets; +import org.polypheny.db.tools.ValidationException; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.checker.OperandTypes; import org.polypheny.db.type.inference.ReturnTypes; import org.polypheny.db.util.Optionality; import org.polypheny.db.util.Util; -import java.util.ArrayList; -import java.util.List; - -import static org.hamcrest.CoreMatchers.*; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.*; -import static org.polypheny.db.plan.AlgOptRule.operand; - /** * Unit tests for {@link Planner}. @@ -202,7 +234,7 @@ public void testValidateFails() throws NodeParseException { public void testValidateUserDefinedAggregate() throws Exception { final SchemaPlus schema = Frameworks .createRootSchema( true ) - .add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final SqlStdOperatorTable stdOpTab = SqlStdOperatorTable.instance(); OperatorTable opTab = ChainedOperatorTable.of( stdOpTab, new ListSqlOperatorTable( ImmutableList.of( new MyCountAggFunction() ) ) ); @@ -257,7 +289,7 @@ private Planner getPlanner( List traitDefs, Program... programs ) { private Planner getPlanner( List traitDefs, ParserConfig parserConfig, Program... programs ) { final SchemaPlus schema = Frameworks .createRootSchema( true ) - .add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( parserConfig ) @@ -970,7 +1002,7 @@ public void testBushyCrossJoin2() throws Exception { * Checks that a query returns a particular plan, using a planner with MultiJoinOptimizeBushyRule enabled. */ private void checkBushy( String sql, String expected ) throws Exception { - final SchemaPlus schema = Frameworks.createRootSchema( false ).add( "foodmart", new ReflectiveSchema( new FoodmartSchema() ), NamespaceType.RELATIONAL ); + final SchemaPlus schema = Frameworks.createRootSchema( false ).add( "foodmart", new ReflectiveSchema( new FoodmartSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) @@ -1026,7 +1058,7 @@ public void testOldJoinStyleDeCorrelation() throws Exception { public String checkTpchQuery( String tpchTestQuery ) throws Exception { - final SchemaPlus schema = Frameworks.createRootSchema( false ).add( "tpch", new ReflectiveSchema( new TpchSchema() ), NamespaceType.RELATIONAL ); + final SchemaPlus schema = Frameworks.createRootSchema( false ).add( "tpch", new ReflectiveSchema( new TpchSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( Parser.configBuilder().setLex( Lex.MYSQL ).build() ) @@ -1086,7 +1118,7 @@ public AlgDataType deriveType( Validator validator, ValidatorScope scope, Call c public void testOrderByNonSelectColumn() throws Exception { final SchemaPlus schema = Frameworks .createRootSchema( true ) - .add( "tpch", new ReflectiveSchema( new TpchSchema() ), NamespaceType.RELATIONAL ); + .add( "tpch", new ReflectiveSchema( new TpchSchema(), -1 ), NamespaceType.RELATIONAL ); String query = "select t.psPartkey from \n" + "(select ps.psPartkey from `tpch`.`partsupp` ps \n" @@ -1169,7 +1201,7 @@ public void testViewOnView() throws Exception { private void checkView( String sql, Matcher matcher ) throws NodeParseException, ValidationException, AlgConversionException { final SchemaPlus schema = Frameworks .createRootSchema( true ) - .add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + .add( "hr", new ReflectiveSchema( -1L, new HrSchema() ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema( schema ) diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java index 6612745a11..df516b2d99 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java @@ -34,18 +34,18 @@ import org.apache.calcite.linq4j.function.SemiStrict; import org.apache.calcite.linq4j.tree.Types; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableTable; +import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; -import org.polypheny.db.schema.QueryableTable; -import org.polypheny.db.schema.ScannableTable; -import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.QueryableEntity; +import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.TableType; +import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.type.PolyType; @@ -55,9 +55,9 @@ public class Smalls { public static final Method GENERATE_STRINGS_METHOD = Types.lookupMethod( Smalls.class, "generateStrings", Integer.class ); - public static final Method MAZE_METHOD = Types.lookupMethod( MazeTable.class, "generate", int.class, int.class, int.class ); - public static final Method MAZE2_METHOD = Types.lookupMethod( MazeTable.class, "generate2", int.class, int.class, Integer.class ); - public static final Method MAZE3_METHOD = Types.lookupMethod( MazeTable.class, "generate3", String.class ); + public static final Method MAZE_METHOD = Types.lookupMethod( MazeEntity.class, "generate", int.class, int.class, int.class ); + public static final Method MAZE2_METHOD = Types.lookupMethod( MazeEntity.class, "generate2", int.class, int.class, Integer.class ); + public static final Method MAZE3_METHOD = Types.lookupMethod( MazeEntity.class, "generate3", String.class ); public static final Method MULTIPLICATION_TABLE_METHOD = Types.lookupMethod( Smalls.class, "multiplicationTable", int.class, int.class, Integer.class ); public static final Method FIBONACCI_TABLE_METHOD = Types.lookupMethod( Smalls.class, "fibonacciTable" ); public static final Method FIBONACCI2_TABLE_METHOD = Types.lookupMethod( Smalls.class, "fibonacciTableWithLimit", long.class ); @@ -72,7 +72,7 @@ private Smalls() { } - private static QueryableTable oneThreePlus( String s ) { + private static QueryableEntity oneThreePlus( String s ) { List items; // Argument is null in case SQL contains function call with expression. Then the engine calls a function with null arguments to get getRowType. if ( s == null ) { @@ -82,7 +82,7 @@ private static QueryableTable oneThreePlus( String s ) { items = ImmutableList.of( 1, 3, latest ); } final Enumerable enumerable = Linq4j.asEnumerable( items ); - return new AbstractQueryableTable( Integer.class ) { + return new AbstractQueryableEntity( Integer.class ) { @Override public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { //noinspection unchecked @@ -106,8 +106,8 @@ public static Queryable stringUnion( Queryable q0, Queryable q1 ) { /** * A function that generates a table that generates a sequence of {@link IntString} values. */ - public static QueryableTable generateStrings( final Integer count ) { - return new AbstractQueryableTable( IntString.class ) { + public static QueryableEntity generateStrings( final Integer count ) { + return new AbstractQueryableEntity( IntString.class ) { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.createJavaType( IntString.class ); @@ -169,9 +169,9 @@ public void close() { /** * A function that generates multiplication table of {@code ncol} columns x {@code nrow} rows. */ - public static QueryableTable multiplicationTable( final int ncol, final int nrow, Integer offset ) { + public static QueryableEntity multiplicationTable( final int ncol, final int nrow, Integer offset ) { final int offs = offset == null ? 0 : offset; - return new AbstractQueryableTable( Object[].class ) { + return new AbstractQueryableEntity( Object[].class ) { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { final AlgDataTypeFactory.Builder builder = typeFactory.builder(); @@ -212,7 +212,7 @@ public int size() { /** * A function that generates the Fibonacci sequence. Interesting because it has one column and no arguments. */ - public static ScannableTable fibonacciTable() { + public static ScannableEntity fibonacciTable() { return fibonacciTableWithLimit( -1L ); } @@ -220,8 +220,8 @@ public static ScannableTable fibonacciTable() { /** * A function that generates the Fibonacci sequence. Interesting because it has one column and no arguments. */ - public static ScannableTable fibonacciTableWithLimit( final long limit ) { - return new ScannableTable() { + public static ScannableEntity fibonacciTableWithLimit( final long limit ) { + return new ScannableEntity() { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.builder().add( "N", null, PolyType.BIGINT ).build(); @@ -279,14 +279,14 @@ public Statistic getStatistic() { @Override - public Long getTableId() { + public Long getId() { return null; } @Override - public Schema.TableType getJdbcTableType() { - return Schema.TableType.TABLE; + public TableType getJdbcTableType() { + return TableType.TABLE; } @@ -308,8 +308,8 @@ public boolean rolledUpColumnValidInsideAgg( String column, Call call, Node pare /** * A function that adds a number to the first column of input cursor */ - public static QueryableTable processCursor( final int offset, final Enumerable a ) { - return new AbstractQueryableTable( Object[].class ) { + public static QueryableEntity processCursor( final int offset, final Enumerable a ) { + return new AbstractQueryableEntity( Object[].class ) { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.builder() @@ -331,8 +331,8 @@ public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, /** * A function that sums the second column of first input cursor, second column of first input and the given int. */ - public static QueryableTable processCursors( final int offset, final Enumerable a, final Enumerable b ) { - return new AbstractQueryableTable( Object[].class ) { + public static QueryableEntity processCursors( final int offset, final Enumerable a, final Enumerable b ) { + return new AbstractQueryableEntity( Object[].class ) { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.builder() @@ -608,22 +608,21 @@ private interface MyGenericAggFunction { /** * The real MazeTable may be found in example/function. This is a cut-down version to support a test. */ - public static class MazeTable extends AbstractTable implements ScannableTable { + public static class MazeEntity extends AbstractEntity implements ScannableEntity { private final String content; - public MazeTable( String content ) { + public MazeEntity( String content ) { this.content = content; } - public static ScannableTable generate( int width, int height, int seed ) { - return new MazeTable( String.format( Locale.ROOT, "generate(w=%d, h=%d, s=%d)", width, height, seed ) ); + public static ScannableEntity generate( int width, int height, int seed ) { + return new MazeEntity( String.format( Locale.ROOT, "generate(w=%d, h=%d, s=%d)", width, height, seed ) ); } - @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.builder() diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java index 67924e3096..e44c9bf4d9 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java @@ -69,7 +69,7 @@ public final class SortRemoveRuleTest extends SqlLanguageDependent { */ private AlgNode transform( String sql, RuleSet prepareRules ) throws Exception { final SchemaPlus rootSchema = Frameworks.createRootSchema( true ); - final SchemaPlus defSchema = rootSchema.add( "hr", new HrClusteredSchema(), NamespaceType.RELATIONAL ); + final SchemaPlus defSchema = rootSchema.add( "hr", new HrClusteredSchema( rootSchema.getId() ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( ParserConfig.DEFAULT ) .defaultSchema( defSchema ) diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java index 60ae477b1c..bf91c26e49 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java @@ -60,7 +60,7 @@ public class LexCaseSensitiveTest { private static Planner getPlanner( List traitDefs, ParserConfig parserConfig, Program... programs ) { - final SchemaPlus schema = Frameworks.createRootSchema( true ).add( "hr", new ReflectiveSchema( new HrSchema() ), NamespaceType.RELATIONAL ); + final SchemaPlus schema = Frameworks.createRootSchema( true ).add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( parserConfig ) .defaultSchema( schema ) diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java similarity index 98% rename from plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java rename to plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java index 3a98c95de6..51a1395e2a 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java @@ -41,7 +41,7 @@ import org.polypheny.db.type.PolyType; -public class SchemaToJsonMapperTest extends SqlLanguageDependent { +public class NamespaceToJsonMapperTest extends SqlLanguageDependent { private static final String mockJson = "{\"tableName\":\"stores\",\"columns\":[{\"columnName\":\"sid\",\"type\":\"INTEGER\",\"nullable\":false},{\"columnName\":\"name\",\"type\":\"VARCHAR\",\"length\":50,\"nullable\":false},{\"columnName\":\"location\",\"type\":\"VARCHAR\",\"length\":30,\"nullable\":true,\"defaultValue\":\"Basel\"}],\"primaryKeyColumnNames\":[\"sid\",\"name\"]}"; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java index da056ac0d7..4abcf666d7 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java @@ -91,11 +91,11 @@ import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; +import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractTable; +import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.tools.FrameworkConfig; import org.polypheny.db.tools.Frameworks; import org.polypheny.db.tools.RuleSet; @@ -142,7 +142,7 @@ public AlgNode apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, SchemaPl final AlgDataType sqlBigInt = typeFactory.createPolyType( PolyType.BIGINT ); // SELECT * from T; - final Table table = new AbstractTable() { + final Entity entity = new AbstractEntity() { @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.builder() @@ -157,7 +157,7 @@ public Statistic getStatistic() { } }; - final AlgOptAbstractEntity t1 = new AlgOptAbstractEntity( algOptSchema, "t1", table.getRowType( typeFactory ) ) { + final AlgOptAbstractEntity t1 = new AlgOptAbstractEntity( algOptSchema, "t1", entity.getRowType( typeFactory ) ) { @Override public CatalogTable getCatalogTable() { return null; @@ -166,8 +166,8 @@ public CatalogTable getCatalogTable() { @Override public T unwrap( Class clazz ) { - return clazz.isInstance( table ) - ? clazz.cast( table ) + return clazz.isInstance( entity ) + ? clazz.cast( entity ) : super.unwrap( clazz ); } }; diff --git a/plugins/sql-language/src/test/resources/org/polypheny/db/sql/AlgOptRulesTest.xml b/plugins/sql-language/src/test/resources/org/polypheny/db/sql/AlgOptRulesTest.xml index b296774083..b34e68f3b2 100644 --- a/plugins/sql-language/src/test/resources/org/polypheny/db/sql/AlgOptRulesTest.xml +++ b/plugins/sql-language/src/test/resources/org/polypheny/db/sql/AlgOptRulesTest.xml @@ -23,14 +23,14 @@ from emp]]> @@ -43,7 +43,7 @@ from emp]]> @@ -56,14 +56,14 @@ group by grouping sets ((empno, deptno),(deptno),(empno))]]> @@ -76,14 +76,14 @@ group by grouping sets ((empno, deptno),(deptno),(empno))]]> @@ -98,7 +98,7 @@ where NOT(caseCol)]]> LogicalProject(CASECOL=[$0]) LogicalFilter(condition=[NOT($0)]) LogicalProject(CASECOL=[CASE(>($5, 1000), null, false)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -106,7 +106,7 @@ LogicalProject(CASECOL=[$0]) LogicalProject(CASECOL=[$0]) LogicalFilter(condition=[NOT($0)]) LogicalProject(CASECOL=[AND(>($5, 1000), null)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -122,14 +122,14 @@ where case when (sal = 1000) then ($5, 1000)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -144,14 +144,14 @@ OR case when sal = 2000 then null else 1 end is null]]> @@ -186,13 +186,13 @@ window w as (partition by empno order by empno)]]> @@ -216,7 +216,7 @@ LogicalProject(EMPNO=[$0], DEPTNO=[$1], W_COUNT=[$2]) LogicalFilter(condition=[IS NULL($2)]) LogicalProject(EMPNO=[$0], DEPTNO=[$7], $2=[$9]) LogicalWindow(window#0=[window(partition {7} order by [0] range between UNBOUNDED PRECEDING and CURRENT ROW aggs [COUNT($0)])]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -239,7 +239,7 @@ LogicalProject(EMPNO=[$0], DEPTNO=[$1], W_COUNT=[$2]) LogicalFilter(condition=[IS NULL($2)]) LogicalWindow(window#0=[window(partition {} order by [] rows between $2 PRECEDING and $3 PRECEDING aggs [COUNT($0)])]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -256,13 +256,13 @@ from emp]]> @@ -275,18 +275,18 @@ LogicalProject(COUNT1=[COUNT() OVER (PARTITION BY $0 ORDER BY $5 RANGE BETWEEN U LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -312,17 +312,17 @@ select name, deptno from dept @@ -332,17 +332,17 @@ LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -375,20 +375,20 @@ LogicalMinus(all=[true]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalProject(NAME=[$1], DEPTNO=[$0], EXPR$2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalMinus(all=[true]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalProject(NAME=[$1], DEPTNO=[$0], EXPR$2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -401,18 +401,18 @@ LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalMinus(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1], DEPTNO=[$0], EXPR$2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$0], DEPTNO=[$1]) LogicalMinus(all=[true]) LogicalProject(NAME=[$1], DEPTNO=[$0], EXPR$2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0], $f2=[1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -424,8 +424,8 @@ LogicalProject(NAME=[$0], DEPTNO=[$1]) @@ -433,8 +433,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($7, $9)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -446,8 +446,8 @@ LogicalProject(EXPR$0=[1]) @@ -455,9 +455,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -471,9 +471,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[NOT(<= SOME($0, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -482,10 +482,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[AND(OR(IS NOT TRUE(<=($0, $9)), =($10, 0)), OR(<=($10, $11), =($10, 0), IS TRUE(<=($0, $9))), OR(>($0, $9), =($10, 0), IS TRUE(<=($0, $9)), >($10, $11)))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], m=[MAX($0)], c=[COUNT()], d=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -494,10 +494,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[AND(OR(IS NOT TRUE(<=($0, $9)), =($10, 0)), OR(<=($10, $11), =($10, 0), IS TRUE(<=($0, $9))), OR(>($0, $9), =($10, 0), IS TRUE(<=($0, $9)), >($10, $11)))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], m=[MAX($0)], c=[COUNT()], d=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -509,14 +509,14 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ ($5, 1000), $0, $5), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> ($5, 1000), =($0, 1)), =($5, 1))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -534,13 +534,13 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($2, $3)], joinType=[inner]) LogicalFilter(condition=[<($2, 10)]) LogicalProject(EMPNO=[$0], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], EXPR$0=[AVG($1)]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -555,20 +555,20 @@ where EXISTS ( LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[EXISTS({ LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -576,11 +576,11 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -597,12 +597,12 @@ AND NOT EXISTS ( LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[AND(EXISTS({ LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), NOT(EXISTS({ LogicalFilter(condition=[AND(=($2, $cor0.JOB), =($5, 34))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -612,15 +612,15 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[IS NULL($10)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{2}]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[AND(=($2, $cor0.JOB), =($5, 34))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -629,16 +629,16 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[IS NULL($12)]) LogicalJoin(condition=[=($2, $11)], joinType=[left]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], $f1=[true]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7], i=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(JOB=[$0], $f1=[true]) LogicalAggregate(group=[{0}]) LogicalProject(JOB=[$2], i=[true]) LogicalFilter(condition=[=($5, 34)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -656,12 +656,12 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[AND(IN($2, { LogicalProject(JOB=[$2]) LogicalFilter(condition=[=($5, 34)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), EXISTS({ LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -670,15 +670,15 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) LogicalJoin(condition=[=($2, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(JOB=[$2]) LogicalFilter(condition=[=($5, 34)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -686,15 +686,15 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalJoin(condition=[=($7, $10)], joinType=[inner]) LogicalJoin(condition=[=($2, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(JOB=[$2]) LogicalFilter(condition=[=($5, 34)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], $f1=[true]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7], i=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -713,13 +713,13 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[AND(IN($0, { LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }), IN($0, { LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[=($cor0.ENAME, $1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -730,13 +730,13 @@ LogicalProject(SAL=[$5]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{1}]) LogicalFilter(condition=[=($0, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{2}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[=($cor0.ENAME, $1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -744,11 +744,11 @@ LogicalProject(SAL=[$5]) LogicalProject(SAL=[$5]) LogicalJoin(condition=[AND(=($1, $12), =($0, $11))], joinType=[inner]) LogicalJoin(condition=[AND(=($2, $10), =($0, $9))], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -766,14 +766,14 @@ LogicalProject(DEPTNO=[$0], I0=[$SCALAR_QUERY({ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject($f0=[1]) LogicalFilter(condition=[>($0, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], I1=[$SCALAR_QUERY({ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject($f0=[0]) LogicalFilter(condition=[AND(=($7, $cor1.DEPTNO), =($1, 'SMITH'))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -781,15 +781,15 @@ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$0], I0=[$2], I1=[$3]) LogicalJoin(condition=[true], joinType=[left]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject($f0=[1]) LogicalFilter(condition=[>($0, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject($f0=[0]) LogicalFilter(condition=[AND(=($7, $cor1.DEPTNO), =($1, 'SMITH'))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -802,7 +802,7 @@ LogicalProject(DEPTNO=[$0], I0=[$2], I1=[$3]) LogicalProject(EXPR$0=[$1], EXPR$1=[$2]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT(DISTINCT $0)], EXPR$1=[SUM($1)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -813,7 +813,7 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$2]) LogicalProject(DEPTNO=[$0], EXPR$1=[$1], $g_0=[=($2, 0)]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], $g=[GROUPING($0)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -828,7 +828,7 @@ GROUP BY deptno]]> @@ -836,7 +836,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($0)], EXPR$2=[SUM(DISTINCT $1)], EXPR$ LogicalAggregate(group=[{0}], EXPR$1=[SUM($2)], EXPR$2=[SUM($1)], EXPR$3=[MAX($3)], EXPR$4=[MAX($4)]) LogicalAggregate(group=[{0, 1}], EXPR$1=[SUM($0)], EXPR$3=[MAX($0)], EXPR$4=[MAX($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -850,7 +850,7 @@ GROUP BY deptno]]> @@ -858,7 +858,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT()], EXPR$2=[SUM(DISTINCT $1)]) LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($2)], EXPR$2=[SUM($1)]) LogicalAggregate(group=[{0, 1}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -872,7 +872,7 @@ GROUP BY deptno]]> @@ -880,7 +880,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)], EXPR$3=[SUM(DI LogicalAggregate(group=[{0}], EXPR$1=[SUM($2)], EXPR$2=[MIN($3)], EXPR$3=[SUM($1)]) LogicalAggregate(group=[{0, 2}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)]) LogicalProject(DEPTNO=[$7], COMM=[$6], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -894,7 +894,7 @@ GROUP BY sal]]> @@ -902,7 +902,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)], EXPR$3=[SUM(DI LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($2)], EXPR$3=[SUM($0)]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)]) LogicalProject(SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -916,7 +916,7 @@ GROUP BY sal]]> @@ -926,16 +926,16 @@ LogicalProject(SAL=[$0], EXPR$1=[$1], EXPR$2=[$3], EXPR$3=[$5]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $2)], joinType=[inner]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$2=[MIN($1)]) LogicalAggregate(group=[{0, 1}]) LogicalProject(SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$3=[SUM($0)]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$0]) LogicalProject(SAL=[$5], COMM=[$6]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -949,7 +949,7 @@ GROUP BY deptno]]> @@ -957,7 +957,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)], EXPR$3=[COUNT( LogicalAggregate(group=[{0}], EXPR$1=[SUM($3)], EXPR$2=[MIN($4)], EXPR$3=[COUNT($2, $1)]) LogicalAggregate(group=[{0, 1, 2}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)]) LogicalProject(DEPTNO=[$7], COMM=[$6], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -971,7 +971,7 @@ GROUP BY deptno]]> @@ -979,7 +979,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)], EXPR$3=[COUNT( LogicalAggregate(group=[{0}], EXPR$1=[SUM($3)], EXPR$2=[MIN($4)], EXPR$3=[COUNT($2, $0, $1)]) LogicalAggregate(group=[{0, 1, 2}], EXPR$1=[SUM($1)], EXPR$2=[MIN($1)]) LogicalProject(DEPTNO=[$7], COMM=[$6], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1069,8 +1069,8 @@ LogicalValues(tuples=[[]]) ProjectRel(EXPR$0=[1]) FilterRel(condition=[=($1, 'Charlie')]) JoinRel(condition=[=($0, $9)], joinType=[left]) - TableAccessRel(table=[[CATALOG, SALES, DEPT]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, DEPT]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1078,8 +1078,8 @@ ProjectRel(EXPR$0=[1]) ProjectRel(EXPR$0=[1]) JoinRel(condition=[=($0, $9)], joinType=[left]) FilterRel(condition=[=($1, 'Charlie')]) - TableAccessRel(table=[[CATALOG, SALES, DEPT]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, DEPT]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1096,11 +1096,11 @@ LogicalProject(SAL=[$0]) SemiJoin(condition=[=($1, $2)], joinType=[inner]) LogicalFilter(condition=[=($1, 200)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[=($0, 100)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1109,11 +1109,11 @@ LogicalProject(SAL=[$0]) SemiJoin(condition=[=($1, $2)], joinType=[inner]) LogicalFilter(condition=[=($1, 200)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[=($0, 100)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1126,8 +1126,8 @@ LogicalProject(SAL=[$0]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($1, 'Charlie')]) LogicalJoin(condition=[=($0, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1136,8 +1136,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[CAST($0):INTEGER], NAME=[CAST($1):VARCHAR(10)], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) LogicalFilter(condition=[=($1, 'Charlie')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1150,8 +1150,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($7, 100)]) LogicalJoin(condition=[=($0, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1159,9 +1159,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[CAST($2):INTEGER], ENAME=[CAST($3):VARCHAR(20)], JOB=[CAST($4):VARCHAR(10)], MGR=[$5], HIREDATE=[CAST($6):TIMESTAMP(0)], SAL=[CAST($7):INTEGER], COMM=[CAST($8):INTEGER], DEPTNO0=[CAST($9):INTEGER], SLACKER=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($0, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1174,8 +1174,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[AND(=($1, 'Charlie'), >($7, 100))]) LogicalJoin(condition=[=($0, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1184,9 +1184,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[CAST($0):INTEGER], NAME=[CAST($1):VARCHAR(10)], EMPNO=[CAST($2):INTEGER], ENAME=[CAST($3):VARCHAR(20)], JOB=[CAST($4):VARCHAR(10)], MGR=[$5], HIREDATE=[CAST($6):TIMESTAMP(0)], SAL=[CAST($7):INTEGER], COMM=[CAST($8):INTEGER], DEPTNO0=[CAST($9):INTEGER], SLACKER=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($0, $9)], joinType=[inner]) LogicalFilter(condition=[=($1, 'Charlie')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1199,8 +1199,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($7, 100)]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1208,9 +1208,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[CAST($2):INTEGER], ENAME=[CAST($3):VARCHAR(20)], JOB=[CAST($4):VARCHAR(10)], MGR=[$5], HIREDATE=[CAST($6):TIMESTAMP(0)], SAL=[CAST($7):INTEGER], COMM=[CAST($8):INTEGER], DEPTNO0=[CAST($9):INTEGER], SLACKER=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($0, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1223,8 +1223,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($1, 'Charlie')]) LogicalJoin(condition=[=($0, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1233,8 +1233,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[CAST($0):INTEGER], NAME=[CAST($1):VARCHAR(10)], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalJoin(condition=[=($0, $9)], joinType=[inner]) LogicalFilter(condition=[=($1, 'Charlie')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1250,9 +1250,9 @@ where R.deptno <=10]]> LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[<=($0, 10)]) SemiJoin(condition=[=($0, $2)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1260,9 +1260,9 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalProject(DEPTNO=[$0], NAME=[$1]) SemiJoin(condition=[=($0, $2)], joinType=[inner]) LogicalFilter(condition=[<=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1276,7 +1276,7 @@ LogicalProject(DNAME=[$0], C=[$1]) LogicalFilter(condition=[=($0, 'Charlie')]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject(DNAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1285,7 +1285,7 @@ LogicalProject(DNAME=[$0], C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalFilter(condition=[=($0, 'Charlie')]) LogicalProject(DNAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1302,7 +1302,7 @@ LogicalProject(DDEPTNO=[$0], DNAME=[$1], C=[$2]) LogicalProject(DDEPTNO=[$0], DNAME=[$1], C=[$2]) LogicalFilter(condition=[=($1, 'Charlie')]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], C=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1311,7 +1311,7 @@ LogicalProject(DDEPTNO=[$0], DNAME=[$1], C=[$2]) LogicalProject(DDEPTNO=[CASE($2, null, $0)], DNAME=[CASE($3, null, $1)], C=[$4]) LogicalFilter(condition=[=(CASE($3, null, $1), 'Charlie')]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], indicator=[true], C=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1328,7 +1328,7 @@ LogicalProject(DNAME=[$0], DDEPTNO=[$1], C=[$2]) LogicalFilter(condition=[=($0, 'Charlie')]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], C=[COUNT()]) LogicalProject(DNAME=[$1], DDEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1337,7 +1337,7 @@ LogicalProject(DNAME=[$0], DDEPTNO=[$1], C=[$2]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], C=[COUNT()]) LogicalFilter(condition=[=($0, 'Charlie')]) LogicalProject(DNAME=[$1], DDEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1349,7 +1349,7 @@ LogicalProject(DNAME=[$0], DDEPTNO=[$1], C=[$2]) @@ -1357,7 +1357,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($0)], EXPR$2=[AVG($1)], EXPR$3=[MIN($0 LogicalProject(NAME=[$0], EXPR$1=[$1], EXPR$2=[CAST(/($2, $3)):INTEGER NOT NULL], EXPR$3=[$4]) LogicalAggregate(group=[{0}], EXPR$1=[MAX($0)], agg#1=[$SUM0($1)], agg#2=[COUNT()], EXPR$3=[MIN($0)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1369,7 +1369,7 @@ LogicalProject(NAME=[$0], EXPR$1=[$1], EXPR$2=[CAST(/($2, $3)):INTEGER NOT NULL] @@ -1377,7 +1377,7 @@ LogicalProject(EXPR$0=[+($0, $7)]) LogicalProject(EXPR$0=[+($0, $4)]) LogicalFilter(condition=[AND(=($2, *(10, $3)), =(UPPER($1), 'FOO'))]) LogicalProject(EMPNO=[$0], ENAME=[$1], SAL=[$5], COMM=[$6], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1389,8 +1389,8 @@ LogicalProject(EXPR$0=[+($0, $4)]) @@ -1398,9 +1398,9 @@ LogicalProject(EXPR$0=[+($5, $12)]) LogicalProject(EXPR$0=[+($1, $4)]) LogicalJoin(condition=[AND(=($0, $3), $2)], joinType=[inner]) LogicalProject(ENAME=[$1], SAL=[$5], ==[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0], COMM=[$3]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1416,8 +1416,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), *(-1, $5), $5)]) LogicalJoin(condition=[=($1, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1427,9 +1427,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1445,8 +1445,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), 11, *(-1, $5))]) LogicalJoin(condition=[=($1, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1456,9 +1456,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), 11, *(-1, $5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1474,8 +1474,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), 11, *(-1, $5))]) LogicalJoin(condition=[=($1, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1485,9 +1485,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[left]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), 11, *(-1, $5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1503,8 +1503,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), 11, *(-1, $9))]) LogicalJoin(condition=[=($5, $0)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1514,9 +1514,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[CASE($2, 11, $3)]) LogicalJoin(condition=[=($1, $0)], joinType=[left]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], <=[<($5, 11)], *=[*(-1, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1532,8 +1532,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), *(-1, $9), $9)]) LogicalJoin(condition=[=($5, $0)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1543,9 +1543,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$2]) LogicalJoin(condition=[=($1, $0)], joinType=[left]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1561,8 +1561,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), *(-1, $9), $9)]) LogicalJoin(condition=[=($5, $0)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1572,9 +1572,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$2]) LogicalJoin(condition=[=($1, $0)], joinType=[left]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1590,8 +1590,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), 11, *(-1, $5))]) LogicalJoin(condition=[=($1, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1601,9 +1601,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[CASE($1, 11, $2)]) LogicalJoin(condition=[=($0, $3)], joinType=[right]) LogicalProject(ENAME=[$1], <=[<($5, 11)], *=[*(-1, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1620,8 +1620,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), *(-1, $5), $5)]) LogicalJoin(condition=[=($1, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1631,9 +1631,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[right]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1649,8 +1649,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), 11, *(-1, $9))]) LogicalJoin(condition=[=($5, $0)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1660,9 +1660,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$2]) LogicalJoin(condition=[=($1, $0)], joinType=[right]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), 11, *(-1, $5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1678,8 +1678,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($9, 11), *(-1, $9), $9)]) LogicalJoin(condition=[=($5, $0)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1689,9 +1689,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$2]) LogicalJoin(condition=[=($1, $0)], joinType=[right]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1707,8 +1707,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), 11, *(-1, $5))]) LogicalJoin(condition=[=($1, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1718,9 +1718,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[CASE($1, 11, $2)]) LogicalJoin(condition=[=($0, $3)], joinType=[full]) LogicalProject(ENAME=[$1], <=[<($5, 11)], *=[*(-1, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1736,8 +1736,8 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject(EXPR$1=[CASE(<($5, 11), *(-1, $5), $5)]) LogicalJoin(condition=[=($1, $9)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1747,9 +1747,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(EXPR$1=[$1]) LogicalJoin(condition=[=($0, $2)], joinType=[full]) LogicalProject(ENAME=[$1], CASE=[CASE(<($5, 11), *(-1, $5), $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -1762,9 +1762,9 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$0]) LogicalProject(SAL=[$5]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1772,10 +1772,10 @@ LogicalProject(SAL=[$5]) LogicalUnion(all=[true]) LogicalProject(SAL=[$5]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(SAL=[$5]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1789,10 +1789,10 @@ LogicalProject(SAL=[$5]) LogicalJoin(condition=[true], joinType=[inner]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1801,12 +1801,12 @@ LogicalProject(SAL=[$5]) LogicalUnion(all=[true]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1818,12 +1818,12 @@ LogicalProject(SAL=[$5]) @@ -1831,13 +1831,13 @@ LogicalProject(SAL=[$5]) LogicalProject(SAL=[$5]) LogicalUnion(all=[true]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1851,9 +1851,9 @@ LogicalProject(EXPR$0=[+(1, 2)], EXPR$1=[+($0, +(3, 4))], EXPR$2=[+(+(5, 6), $0) LogicalFilter(condition=[AND(=($0, +(7, 8)), =($0, +(8, 7)), =($0, CASE(IS NOT NULL(2), 2, null:INTEGER)))]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, -(5, 5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1871,7 +1871,7 @@ LogicalProject(EXPR$0=[3], EXPR$1=[22], EXPR$2=[26], EXPR$3=[null:INTEGER], EXPR @@ -2128,9 +2128,9 @@ LogicalProject(EXPR$0=[CAST($1):VARCHAR(128) NOT NULL], EXPR$1=[$2]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$3], ENAME=[$4], JOB=[$5], MGR=[$6], HIREDATE=[$7], SAL=[$8], COMM=[$9], DEPTNO1=[$10], SLACKER=[$11]) LogicalJoin(condition=[=($2, $12)], joinType=[inner]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2140,9 +2140,9 @@ LogicalProject(EXPR$0=[CAST($1):VARCHAR(128) NOT NULL], EXPR$1=[$2]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$3], ENAME=[$4], JOB=[$5], MGR=[$6], HIREDATE=[$7], SAL=[$8], COMM=[$9], DEPTNO1=[$10], SLACKER=[$11]) LogicalJoin(condition=[=($2, $12)], joinType=[inner]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2156,9 +2156,9 @@ AggregateRel(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()]) ProjectRel(ENAME=[$1], EMPNO=[$0]) UnionRel(all=[true]) ProjectRel(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2170,12 +2170,12 @@ ProjectRel(ENAME=[$0], EXPR$1=[$1], EXPR$2=[CAST($2):BIGINT NOT NULL]) ProjectRel(ENAME=[$1], EMPNO=[$0]) ProjectRel(EMPNO=[$0], ENAME=[$1]) ProjectRel(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) AggregateRel(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()]) ProjectRel(ENAME=[$1], EMPNO=[$0]) ProjectRel(EMPNO=[$0], ENAME=[$1]) ProjectRel(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2187,14 +2187,14 @@ ProjectRel(ENAME=[$0], EXPR$1=[$1], EXPR$2=[CAST($2):BIGINT NOT NULL]) @@ -2204,11 +2204,11 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ select upper(substring(x FROM 1 FOR 2) || substring(x FROM 3)) as u, substring(x FROM 1 FOR 1) as s from ( - select 'table' as x from (values (true)) + select 'entity' as x from (values (true)) union select 'view' from (values (true)) union - select 'foreign table' from (values (true)) + select 'foreign entity' from (values (true)) ) ) where u = 'TABLE']]> @@ -2219,11 +2219,11 @@ LogicalProject(U=[$0], S=[$1]) LogicalProject(U=[UPPER(||(SUBSTRING($0, 1, 2), SUBSTRING($0, 3)))], S=[SUBSTRING($0, 1, 1)]) LogicalUnion(all=[false]) LogicalUnion(all=[false]) - LogicalProject(X=['table']) + LogicalProject(X=['entity']) LogicalValues(tuples=[[{ true }]]) LogicalProject(EXPR$0=['view']) LogicalValues(tuples=[[{ true }]]) - LogicalProject(EXPR$0=['foreign table']) + LogicalProject(EXPR$0=['foreign entity']) LogicalValues(tuples=[[{ true }]]) ]]> @@ -2259,14 +2259,14 @@ LogicalProject(EXPR$0=[false]) @@ -2282,8 +2282,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[false]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2305,8 +2305,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($7, $9)], joinType=[right]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[false]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2314,7 +2314,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) LogicalValues(tuples=[[]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2330,8 +2330,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($7, $9)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[false]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2349,7 +2349,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -2367,7 +2367,7 @@ EmptyRel LogicalSort(sort0=[$7], dir0=[ASC]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[false]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2386,7 +2386,7 @@ LogicalSort(sort0=[$7], dir0=[ASC]) @@ -2409,9 +2409,9 @@ LogicalProject(EMPNO=[$0]) LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[=($0, $cor0.EMPNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2420,11 +2420,11 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($5, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[=($0, $cor0.EMPNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2440,9 +2440,9 @@ where c0 in ( LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalFilter(condition=[IN($4, { LogicalProject(C0=[$5]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) })]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2450,10 +2450,10 @@ LogicalProject(C0=[$5]) LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalJoin(condition=[=($4, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) LogicalAggregate(group=[{0}]) LogicalProject(C0=[$5]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2471,9 +2471,9 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject("F1"."C0"=[$5]) LogicalFilter(condition=[=($cor0."K0", $0)]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2482,11 +2482,11 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalFilter(condition=[=($4, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject("F1"."C0"=[$5]) LogicalFilter(condition=[=($cor0."K0", $0)]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2503,9 +2503,9 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ LogicalFilter(condition=[IN($4, { LogicalProject(C0=[$5]) LogicalFilter(condition=[=($cor0."F1"."C2", $7)]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2514,11 +2514,11 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[$4], "F1"."C0"=[$5], "F0"."C1"=[$6], "F1"."C2"=[$7], "F2"."C3"=[$8]) LogicalFilter(condition=[=($4, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) LogicalAggregate(group=[{0}]) LogicalProject(C0=[$5]) LogicalFilter(condition=[=($cor0."F1"."C2", $7)]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -2532,14 +2532,14 @@ where extract(year from birthdate) = 2014and extract(month from birthdate) = 4]] =($9, 2014-04-01), <($9, 2014-05-01))]) - LogicalScan(table=[[CATALOG, SALES, EMP_B]]) + LogicalScan(entity=[[CATALOG, SALES, EMP_B]]) ]]> @@ -2553,14 +2553,14 @@ where extract(year from birthdate) = 2014]]> =($9, 2014-01-01), <($9, 2015-01-01))]) - LogicalScan(table=[[CATALOG, SALES, EMP_B]]) + LogicalScan(entity=[[CATALOG, SALES, EMP_B]]) ]]> @@ -2580,12 +2580,12 @@ WHERE exists ( LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) LogicalProject(TWICEDEPTNO=[*($0, 2)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2593,12 +2593,12 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalProject(TWICEDEPTNO=[*($0, 2)]) LogicalFilter(condition=[=($cor0.DEPTNO, *($0, 2))]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2618,12 +2618,12 @@ WHERE exists ( LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) LogicalProject(TWICEDEPTNO=[*($0, 2)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2631,13 +2631,13 @@ LogicalProject(EMPNO=[$0]) @@ -2650,8 +2650,8 @@ join sales.emp e on e.deptno = d.deptno and d.deptno not in (4, 6)]]> @@ -2659,9 +2659,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], EMPNO0=[$9], ENAME0=[$10], JOB0=[$11], MGR0=[$12], HIREDATE0=[$13], SAL0=[$14], COMM0=[$15], DEPTNO0=[$16], SLACKER0=[$17]) LogicalJoin(condition=[=($16, $7)], joinType=[inner]) LogicalFilter(condition=[AND(<>($7, 4), <>($7, 6))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[AND(<>($7, 4), <>($7, 6))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2680,13 +2680,13 @@ LogicalIntersect(all=[false]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2698,15 +2698,15 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2725,13 +2725,13 @@ LogicalIntersect(all=[true]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2744,14 +2744,14 @@ LogicalIntersect(all=[true]) LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1, 2, 3, 4, 5, 6, 7, 8}], agg#0=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2765,11 +2765,11 @@ IN (select e.deptno from sales.emp e where e.deptno = d.deptno or e.deptno = 4)] LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=($7, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[OR(=($7, $cor0.DEPTNO), =($7, 4))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2788,17 +2788,17 @@ LogicalProject(SAL=[$5]) LogicalJoin(condition=[AND(=($5, $9), =($5, $8))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(SAL=[$5], SAL0=[$8]) LogicalJoin(condition=[OR(=($8, $5), =($8, 4))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -2808,18 +2808,18 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[OR(IS NOT NULL($5), =($5, 4))]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) LogicalFilter(condition=[AND(OR(IS NOT NULL($0), =($0, 4)), OR(=($0, $1), =($0, 4)), OR(IS NOT NULL($1), =($1, 4)))]) LogicalAggregate(group=[{0, 1}]) LogicalProject(SAL=[$5], SAL0=[$8]) LogicalJoin(condition=[OR(=($8, $5), =($8, 4))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -2838,13 +2838,13 @@ LogicalIntersect(all=[false]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2852,13 +2852,13 @@ LogicalIntersect(all=[false]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2877,13 +2877,13 @@ LogicalMinus(all=[false]) LogicalMinus(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2891,13 +2891,13 @@ LogicalMinus(all=[false]) LogicalMinus(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2915,15 +2915,15 @@ select * from ( LogicalMinus(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalMinus(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2941,14 +2941,14 @@ select * from emp where deptno = 30 LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalIntersect(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2967,13 +2967,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2981,13 +2981,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3006,13 +3006,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3020,13 +3020,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3045,13 +3045,13 @@ LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3059,13 +3059,13 @@ LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3084,13 +3084,13 @@ LogicalUnion(all=[true]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3109,13 +3109,13 @@ LogicalUnion(all=[false]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3123,13 +3123,13 @@ LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 30)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3146,7 +3146,7 @@ group by deptno]]> LogicalProject(EXPR$0=[CAST(/(SUM(+(+($1, *(2, $2)), *(3, $3))) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), COUNT(+(+($1, *(2, $2)), *(3, $3))) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING))):INTEGER NOT NULL]) LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[MIN($2)], agg#2=[AVG($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3156,7 +3156,7 @@ LogicalProject(EXPR$0=[CAST(/($2, $3)):INTEGER NOT NULL]) LogicalProject(DEPTNO=[$0], $1=[+(+($1, *(2, $2)), *(3, $3))]) LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[MIN($2)], agg#2=[AVG($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3168,14 +3168,14 @@ LogicalProject(EXPR$0=[CAST(/($2, $3)):INTEGER NOT NULL]) @@ -3190,7 +3190,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(IS NULL($5), IS NOT NULL($5))]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -3198,7 +3198,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$7]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -3213,7 +3213,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(IS NOT NULL($5), IS NULL($5))]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -3221,7 +3221,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$7]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMPNULLABLES]]) + LogicalScan(entity=[[CATALOG, SALES, EMPNULLABLES]]) ]]> @@ -3240,7 +3240,7 @@ group by deptno]]> LogicalProject(EXPR$0=[$1], EXPR$1=[$2], EXPR$2=[SUM($2) OVER (PARTITION BY $1 ORDER BY $2 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) LogicalAggregate(group=[{0}], EXPR$0=[MIN($1)], EXPR$1=[SUM($2)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3249,7 +3249,7 @@ LogicalProject(EXPR$0=[$1], EXPR$1=[$2], $2=[$3]) LogicalWindow(window#0=[window(partition {1} order by [2] range between UNBOUNDED PRECEDING and CURRENT ROW aggs [SUM($2)])]) LogicalAggregate(group=[{0}], EXPR$0=[MIN($1)], EXPR$1=[SUM($2)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3266,9 +3266,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[$9]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3277,10 +3277,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3297,9 +3297,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[$9]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3308,10 +3308,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3329,9 +3329,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3340,10 +3340,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3360,9 +3360,9 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3371,10 +3371,10 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3391,9 +3391,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(ENAME=[$1]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3402,10 +3402,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3422,9 +3422,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3433,10 +3433,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3453,9 +3453,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3464,10 +3464,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[MAX($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[MAX($1)]) LogicalProject(ENAME=[$1], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3484,9 +3484,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[MIN($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3495,10 +3495,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[MIN($1)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[MIN($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[MIN($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3515,9 +3515,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[AVG($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3525,9 +3525,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[AVG($1)]) LogicalAggregate(group=[{0}], EXPR$1=[AVG($1)]) LogicalUnion(all=[true]) LogicalProject(ENAME=[$1], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$1], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3544,9 +3544,9 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()], EXPR$3=[MIN($2 LogicalProject(ENAME=[$1], EMPNO=[$0], DEPTNO=[$7]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3555,10 +3555,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[$SUM0($2)], EXPR$3=[MIN( LogicalUnion(all=[true]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()], EXPR$3=[MIN($2)], EXPR$4=[MAX($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[COUNT()], EXPR$3=[MIN($2)], EXPR$4=[MAX($1)]) LogicalProject(ENAME=[$1], EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3575,9 +3575,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[$9]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3586,10 +3586,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3606,9 +3606,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[$9]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3617,10 +3617,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], U=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3638,9 +3638,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3649,10 +3649,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3669,9 +3669,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[COUNT()]) LogicalProject(DEPTNO=[$7], JOB=[$2]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3680,10 +3680,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[$SUM0($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3700,9 +3700,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[COUNT($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3711,10 +3711,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[$SUM0($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3731,9 +3731,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3742,10 +3742,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[MAX($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3762,9 +3762,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[MIN($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3773,10 +3773,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[MIN($2)]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[MIN($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[MIN($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3793,9 +3793,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[AVG($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3803,9 +3803,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[AVG($2)]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[AVG($2)]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3822,9 +3822,9 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)], E LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3833,10 +3833,10 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)], E LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)], EXPR$3=[COUNT()], EXPR$4=[MIN($0)], EXPR$5=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], EXPR$2=[SUM($2)], EXPR$3=[COUNT()], EXPR$4=[MIN($0)], EXPR$5=[MAX($2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3856,10 +3856,10 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT() FILTER $1]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3869,11 +3869,11 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($1)]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT() FILTER $1]) LogicalProject(DEPTNO=[$7], $f1=[=($2, 'CLERK')]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT() FILTER $1]) LogicalProject(DEPTNO=[$7], $f1=[=($2, 'CLERK')]) LogicalFilter(condition=[>($7, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3886,7 +3886,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($1)]) LogicalAggregate(group=[{0, 1, 2}]) LogicalFilter(condition=[>($1, 5000)]) LogicalProject(ENAME=[$1], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3894,7 +3894,7 @@ LogicalAggregate(group=[{0, 1, 2}]) LogicalFilter(condition=[>($1, 5000)]) LogicalAggregate(group=[{0, 1, 2}]) LogicalProject(ENAME=[$1], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3907,7 +3907,7 @@ LogicalFilter(condition=[>($1, 5000)]) LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}, {0}, {}]]) LogicalFilter(condition=[>($1, 5000)]) LogicalProject(ENAME=[$1], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3916,7 +3916,7 @@ LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}, {0}, {}]]) LogicalFilter(condition=[>($1, 5000)]) LogicalAggregate(group=[{0, 1, 2}]) LogicalProject(ENAME=[$1], SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3931,14 +3931,14 @@ LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}, {0}, {}]]) LogicalProject(DEPTNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$7], FOUR=[4], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3953,14 +3953,14 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$7], FOUR=[4], ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3975,7 +3975,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$0], EXPR$1=[$4]) LogicalAggregate(group=[{0, 1, 2, 3}], EXPR$1=[MAX($4)]) LogicalProject(DEPTNO=[$7], FOUR=[4], TWO_PLUS_THREE=[+(2, 3)], DEPTNO42=[+($7, 42)], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3983,7 +3983,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$4]) LogicalProject(DEPTNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 3}], EXPR$1=[MAX($4)]) LogicalProject(DEPTNO=[$7], FOUR=[4], TWO_PLUS_THREE=[+(2, 3)], DEPTNO42=[+($7, 42)], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3998,14 +3998,14 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$2]) LogicalProject(DEPTNO=[$1], EXPR$1=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$1=[MAX($2)]) LogicalProject(FOUR=[4], DEPTNO=[$7], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4020,14 +4020,14 @@ LogicalAggregate(group=[{1}], EXPR$1=[MAX($2)]) LogicalProject(DEPTNO=[$1], EXPR$1=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$1=[MAX($2)]) LogicalProject($f0=[+(42, 24)], DEPTNO=[$7], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4041,7 +4041,7 @@ LogicalAggregate(group=[{1}], EXPR$1=[MAX($2)]) @@ -4049,7 +4049,7 @@ LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) LogicalAggregate(group=[{0}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[4], EXPR$1=[+(2, 3)], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4063,7 +4063,7 @@ LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) @@ -4071,7 +4071,7 @@ LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) LogicalAggregate(group=[{0}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[4], EXPR$1=[+(2, 3)], FIVE=[5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4085,7 +4085,7 @@ LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) @@ -4093,7 +4093,7 @@ LogicalAggregate(group=[{0, 1}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[$0], EXPR$1=[+(2, 3)], EXPR$2=[$1]) LogicalAggregate(group=[{0}], EXPR$2=[MAX($2)]) LogicalProject(EXPR$0=[4], EXPR$1=[+(2, 3)], $f2=[5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4107,9 +4107,9 @@ select 2, deptno, job from emp as e2]]> @@ -4117,9 +4117,9 @@ LogicalUnion(all=[true]) LogicalProject(EXPR$0=[2], DEPTNO=[$0], JOB=[$1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4133,9 +4133,9 @@ select 1, deptno, job from emp as e2]]> @@ -4149,9 +4149,9 @@ select 2, 3 from emp as e2]]> @@ -4159,9 +4159,9 @@ LogicalUnion(all=[true]) LogicalProject(EXPR$0=[2], EXPR$1=[3]) LogicalUnion(all=[true]) LogicalProject(EXPR$0=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EXPR$0=[2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4175,18 +4175,18 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($9, $18))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4201,16 +4201,16 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalFilter(condition=[>($9, 3)]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> ($9, 3))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4223,8 +4223,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($1, 'foo'))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4232,8 +4232,8 @@ LogicalProject(ENAME=[$1]) LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) LogicalFilter(condition=[=($1, 'foo')]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4247,9 +4247,9 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($9, $18))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4257,9 +4257,9 @@ LogicalProject(ENAME=[$1]) LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($9, $18)], joinType=[inner], semiJoinDone=[true]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4272,8 +4272,8 @@ LogicalProject(ENAME=[$1]) ProjectRel(DNAME=[$10], ENAME=[$1]) FilterRel(condition=[=($10, 'Propane')]) JoinRel(condition=[=($7, $9)], joinType=[inner]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, DEPT]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4291,26 +4291,26 @@ LogicalProject(A=[$0], B=[$1], C=[$2], D=[$3], E=[$4], F=[$5], G=[$6], H=[$7], I LogicalJoin(condition=[AND(=($0, $2), =($1, $3))], joinType=[left]) LogicalProject(A=[$0], B=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) LogicalProject(C=[$0], D=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, C]]) - LogicalScan(table=[[CATALOG, SALES, D]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, D]]) LogicalProject(E=[$0], F=[$1], G=[$2], H=[$3]) LogicalJoin(condition=[AND(=($0, $2), =($1, $3))], joinType=[right]) LogicalProject(E=[$0], F=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, E]]) - LogicalScan(table=[[CATALOG, SALES, F]]) + LogicalScan(entity=[[CATALOG, SALES, E]]) + LogicalScan(entity=[[CATALOG, SALES, F]]) LogicalProject(G=[$0], H=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, G]]) - LogicalScan(table=[[CATALOG, SALES, H]]) + LogicalScan(entity=[[CATALOG, SALES, G]]) + LogicalScan(entity=[[CATALOG, SALES, H]]) LogicalProject(I=[$0], J=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, I]]) - LogicalScan(table=[[CATALOG, SALES, J]]) + LogicalScan(entity=[[CATALOG, SALES, I]]) + LogicalScan(entity=[[CATALOG, SALES, J]]) ]]> @@ -4319,20 +4319,20 @@ MultiJoin(joinFilter=[AND(=($0, $8), =($7, $9), =($8, $9))], isFullOuterJoin=[fa MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[RIGHT, INNER]], outerJoinConditions=[[AND(=($0, $4), =($1, $5), =($2, $6), =($3, $7)), NULL]], projFields=[[ALL, ALL]]) MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[INNER, LEFT]], outerJoinConditions=[[NULL, AND(=($0, $2), =($1, $3))]], projFields=[[ALL, ALL]]) MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[RIGHT, INNER]], outerJoinConditions=[[=($0, $1), NULL]], projFields=[[ALL, ALL]]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) MultiJoin(joinFilter=[=($0, $1)], isFullOuterJoin=[true], joinTypes=[[INNER, INNER]], outerJoinConditions=[[NULL, NULL]], projFields=[[ALL, ALL]]) - LogicalScan(table=[[CATALOG, SALES, C]]) - LogicalScan(table=[[CATALOG, SALES, D]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, D]]) MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[RIGHT, INNER]], outerJoinConditions=[[AND(=($0, $2), =($1, $3)), NULL]], projFields=[[ALL, ALL]]) MultiJoin(joinFilter=[=($0, $1)], isFullOuterJoin=[true], joinTypes=[[INNER, INNER]], outerJoinConditions=[[NULL, NULL]], projFields=[[ALL, ALL]]) - LogicalScan(table=[[CATALOG, SALES, E]]) - LogicalScan(table=[[CATALOG, SALES, F]]) + LogicalScan(entity=[[CATALOG, SALES, E]]) + LogicalScan(entity=[[CATALOG, SALES, F]]) MultiJoin(joinFilter=[true], isFullOuterJoin=[false], joinTypes=[[INNER, LEFT]], outerJoinConditions=[[NULL, =($0, $1)]], projFields=[[ALL, ALL]]) - LogicalScan(table=[[CATALOG, SALES, G]]) - LogicalScan(table=[[CATALOG, SALES, H]]) - LogicalScan(table=[[CATALOG, SALES, I]]) - LogicalScan(table=[[CATALOG, SALES, J]]) + LogicalScan(entity=[[CATALOG, SALES, G]]) + LogicalScan(entity=[[CATALOG, SALES, H]]) + LogicalScan(entity=[[CATALOG, SALES, I]]) + LogicalScan(entity=[[CATALOG, SALES, J]]) ]]> @@ -4346,9 +4346,9 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($0, $11))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4358,11 +4358,11 @@ LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) SemiJoin(condition=[=($0, $9)], joinType=[inner]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4376,9 +4376,9 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($9, $18))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4387,12 +4387,12 @@ LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($9, $18)], joinType=[inner], semiJoinDone=[true]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) SemiJoin(condition=[=($0, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4405,16 +4405,16 @@ LogicalProject(ENAME=[$1]) LogicalProject(ENAME=[$1]) LogicalFilter(condition=[=($7, $9)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4428,8 +4428,8 @@ LogicalProject(ENAME=[$0], EXPR$1=[$1], EXPR$2=[$2], DEPTNO=[$3]) LogicalFilter(condition=[=($3, $4)]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(ENAME=[$1], EXPR$1=[TRIM(FLAG(BOTH), ' ', $2)], EXPR$2=[*($5, 2)], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4438,9 +4438,9 @@ LogicalProject(ENAME=[$0], EXPR$1=[$1], EXPR$2=[$2], DEPTNO=[$3]) LogicalJoin(condition=[=($3, $4)], joinType=[inner], semiJoinDone=[true]) LogicalProject(ENAME=[$1], EXPR$1=[TRIM(FLAG(BOTH), ' ', $2)], EXPR$2=[*($5, 2)], DEPTNO=[$7]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4453,8 +4453,8 @@ LogicalProject(ENAME=[$0], EXPR$1=[$1], EXPR$2=[$2], DEPTNO=[$3]) LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($1, 'foo'))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4463,9 +4463,9 @@ LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) LogicalFilter(condition=[=($1, 'foo')]) SemiJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4479,9 +4479,9 @@ LogicalProject(ENAME=[$1]) LogicalFilter(condition=[AND(=($7, $9), =($9, $18), =($10, 'foo'))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4489,10 +4489,10 @@ LogicalProject(ENAME=[$1]) LogicalProject(ENAME=[$1]) LogicalJoin(condition=[=($9, $18)], joinType=[inner], semiJoinDone=[true]) LogicalJoin(condition=[=($7, $9)], joinType=[inner], semiJoinDone=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($1, 'foo')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4504,9 +4504,9 @@ LogicalProject(ENAME=[$1]) @@ -4520,7 +4520,7 @@ UnionRel(all=[true]) LogicalAggregate(group=[{1}], EXPR$1=[$SUM0($2)]) LogicalFilter(condition=[>(CAST($0):BIGINT NOT NULL, CAST('12'):BIGINT NOT NULL)]) LogicalAggregate(group=[{5, 7}], EXPR$1=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4528,7 +4528,7 @@ LogicalAggregate(group=[{1}], EXPR$1=[$SUM0($2)]) LogicalAggregate(group=[{1}], EXPR$1=[$SUM0($2)]) LogicalAggregate(group=[{5, 7}], EXPR$1=[COUNT()]) LogicalFilter(condition=[>(CAST($5):BIGINT NOT NULL, 12)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4543,7 +4543,7 @@ where case when mgr < 10 then true else false end]]> LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[0]) LogicalFilter(condition=[CASE(<($3, 10), true, false)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4552,7 +4552,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[0]) LogicalFilter(condition=[IS TRUE(<($0, 10))]) LogicalProject(MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4564,9 +4564,9 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) ($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4574,9 +4574,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4590,8 +4590,8 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4600,9 +4600,9 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4617,8 +4617,8 @@ LogicalProject(EXPR$0=[1]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4628,9 +4628,9 @@ LogicalProject(EXPR$0=[1]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4645,11 +4645,11 @@ LogicalProject(EXPR$0=[1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4659,12 +4659,12 @@ LogicalProject(EXPR$0=[1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[OR(>($7, 7), >($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4678,9 +4678,9 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($16, $25)], joinType=[inner]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4689,11 +4689,11 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($16, $25)], joinType=[inner]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4713,13 +4713,13 @@ LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($7, 4)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4728,14 +4728,14 @@ LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($7, 4)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 4)]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4752,9 +4752,9 @@ LogicalProject(EXPR$0=[1]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4765,11 +4765,11 @@ LogicalProject(EXPR$0=[1]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4783,8 +4783,8 @@ LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($16, 9)]) LogicalJoin(condition=[=($7, $16)], joinType=[left]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4793,9 +4793,9 @@ LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($16, 9)]) LogicalJoin(condition=[=($7, $16)], joinType=[left]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4808,9 +4808,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($7, 7)]) LogicalJoin(condition=[=($7, $16)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 9)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4819,9 +4819,9 @@ LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[>($7, 7)]) LogicalJoin(condition=[=($7, $16)], joinType=[right]) LogicalFilter(condition=[>($7, 9)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 9)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4834,8 +4834,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[AND(>($7, 7), >($16, 9))]) LogicalJoin(condition=[=($7, $16)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4843,8 +4843,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[AND(>($7, 7), >($16, 9))]) LogicalJoin(condition=[=($7, $16)], joinType=[full]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4858,8 +4858,8 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($0, $8)], joinType=[inner]) LogicalProject(DEPTNO=[$6]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4868,8 +4868,8 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($0, $8)], joinType=[inner]) LogicalProject(DEPTNO=[$6]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4883,8 +4883,8 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($0, $8)], joinType=[inner]) LogicalProject(DEPTNO=[$6]) LogicalFilter(condition=[>($6, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4893,9 +4893,9 @@ LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($0, $8)], joinType=[inner]) LogicalProject(DEPTNO=[$6]) LogicalFilter(condition=[>($6, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4908,8 +4908,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4917,9 +4917,9 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4932,8 +4932,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($6, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4941,8 +4941,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[OR(=($7, 7), =($7, 9), >($6, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4958,14 +4958,14 @@ LogicalProject(EXPR$0=[1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4976,15 +4976,15 @@ LogicalProject(EXPR$0=[1]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[OR(>($7, 7), >($7, 1))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5007,14 +5007,14 @@ ProjectRel(EXPR$0=[1]) UnionRel(all=[true]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[OR(>($7, 7), <($0, 10))]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[OR(>($7, 10), <($0, $7))]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[>($7, 1)]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5025,14 +5025,14 @@ ProjectRel(EXPR$0=[1]) UnionRel(all=[true]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[OR(>($7, 7), <($0, 10))]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[OR(>($7, 10), <($0, $7))]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ProjectRel(EMPNO=[$0], DEPTNO=[$7]) FilterRel(condition=[>($7, 1)]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5044,16 +5044,16 @@ ProjectRel(EXPR$0=[1]) @@ -5066,8 +5066,8 @@ LogicalProject(EXPR$0=[1]) ProjectRel(EXPR$0=[1]) JoinRel(condition=[=($7, $16)], joinType=[inner]) FilterRel(condition=[>(2, 1)]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5075,8 +5075,8 @@ ProjectRel(EXPR$0=[1]) ProjectRel(EXPR$0=[1]) JoinRel(condition=[=($7, $16)], joinType=[inner]) FilterRel(condition=[>(2, 1)]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) - TableAccessRel(table=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) + TableAccessRel(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5089,9 +5089,9 @@ ProjectRel(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[AND(>($7, 7), =($6, $7), >(+($6, $7), /($6, 2)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($5, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5099,10 +5099,10 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalJoin(condition=[=($7, $16)], joinType=[inner]) LogicalFilter(condition=[AND(>($7, 7), =($6, $7), >(+($6, $7), /($6, 2)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[>($7, 7)]) LogicalFilter(condition=[=($5, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5114,14 +5114,14 @@ LogicalProject(EXPR$0=[1]) @@ -5135,7 +5135,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>(+($7, 5), $0)]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5144,7 +5144,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>(15, $0)]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5160,7 +5160,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[AND(>($0, 3), >($7, 5))]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5169,7 +5169,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($0, 3)]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5185,8 +5185,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($0, $9)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5195,9 +5195,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[true], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5213,9 +5213,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[AND(=($0, $10), =($9, $12))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, $0)]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[+($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5224,10 +5224,10 @@ LogicalProject(EMPNO=[10], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[=($9, 15)], joinType=[inner]) LogicalProject(EMPNO=[10], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, 10)]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[10], NAME=[$1], $f2=[15]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5242,8 +5242,8 @@ where emp.deptno is not null and emp.sal > 100]]> LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalFilter(condition=[AND(IS NOT NULL($9), >($7, 100))]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5251,9 +5251,9 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5 LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5], HIREDATE=[$6], SAL=[$7], COMM=[$8], DEPTNO0=[$9], SLACKER=[$10]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[CAST($2):INTEGER], ENAME=[CAST($3):VARCHAR(20)], JOB=[CAST($4):VARCHAR(10)], MGR=[$5], HIREDATE=[CAST($6):TIMESTAMP(0)], SAL=[CAST($7):INTEGER], COMM=[CAST($8):INTEGER], DEPTNO0=[CAST($9):INTEGER], SLACKER=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($0, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5268,20 +5268,20 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$2], ENAME=[$3], JOB=[$4], MGR=[$5 ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5297,11 +5297,11 @@ SemiJoin(condition=[=($0, $2)], joinType=[inner]) LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) SemiJoin(condition=[=($0, $2)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$7], $f0=[true]) LogicalFilter(condition=[>($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, CUSTOMER, ACCOUNT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, CUSTOMER, ACCOUNT]]) ]]> @@ -5310,13 +5310,13 @@ LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) SemiJoin(condition=[=($0, $1)], joinType=[inner]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[>($0, 100)]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ACCTNO=[$0]) - LogicalScan(table=[[CATALOG, CUSTOMER, ACCOUNT]]) + LogicalScan(entity=[[CATALOG, CUSTOMER, ACCOUNT]]) ]]> @@ -5329,7 +5329,7 @@ from sales.dept group by name]]> @@ -5338,7 +5338,7 @@ LogicalProject(NAME=[$0], EXPR$1=[CAST(POWER(/(-($1, /(*($2, $2), $3)), $3), 0.5 LogicalAggregate(group=[{0}], agg#0=[$SUM0($2)], agg#1=[$SUM0($1)], agg#2=[COUNT()]) LogicalProject(NAME=[$0], DEPTNO=[$1], $f2=[*($1, $1)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5354,7 +5354,7 @@ group by x, y]]> LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalAggregate(group=[{0, 1}], EXPR$1=[SUM($2)]) LogicalProject(X=[$7], Y=[$0], Z=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5362,7 +5362,7 @@ LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalProject(DEPTNO=[$1], EMPNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 7}], EXPR$1=[SUM($5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5378,7 +5378,7 @@ group by rollup(x, y)]]> LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$1=[SUM($2)]) LogicalProject(X=[$7], Y=[$0], Z=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5386,7 +5386,7 @@ LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalProject(X=[$0], EXPR$1=[$2], Y=[$1]) LogicalProject(DEPTNO=[$1], EMPNO=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{0, 7}], groups=[[{0, 7}, {7}, {}]], EXPR$1=[SUM($5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5400,10 +5400,10 @@ LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5411,9 +5411,9 @@ LogicalAggregate(group=[{0, 1}]) LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5427,10 +5427,10 @@ LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5438,9 +5438,9 @@ LogicalAggregate(group=[{0, 1}]) LogicalAggregate(group=[{0, 1}]) LogicalUnion(all=[true]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7], JOB=[$2]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5453,13 +5453,13 @@ LogicalAggregate(group=[{0, 1}]) LogicalProject(EXPR$0=[$2], DEPTNO=[$1]) LogicalWindow(window#0=[window(partition {} order by [] range between UNBOUNDED PRECEDING and UNBOUNDED FOLLOWING aggs [COUNT($0)])]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5477,13 +5477,13 @@ from ( LogicalProject($0=[$2], $1=[$3]) LogicalWindow(window#0=[window(partition {1} order by [0] range between UNBOUNDED PRECEDING and CURRENT ROW aggs [SUM($2), SUM($3)])]) LogicalProject(SAL=[$5], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5499,7 +5499,7 @@ where r < 2]]> LogicalProject(ENAME=[$0], R=[$1]) LogicalFilter(condition=[<($1, 2)]) LogicalProject(ENAME=[$1], R=[RANK() OVER (PARTITION BY $7 ORDER BY $5 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5507,7 +5507,7 @@ LogicalProject(ENAME=[$0], R=[$1]) LogicalProject(ENAME=[$0], R=[$1]) LogicalFilter(condition=[<($1, 2)]) LogicalProject(ENAME=[$1], R=[RANK() OVER (PARTITION BY $7 ORDER BY $5 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5524,7 +5524,7 @@ where r < 2]]> LogicalProject(ENAME=[$0], R=[$1]) LogicalFilter(condition=[<($1, 2)]) LogicalProject(ENAME=[$1], R=[+(RANK() OVER (PARTITION BY $7 ORDER BY $5 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5532,7 +5532,7 @@ LogicalProject(ENAME=[$0], R=[$1]) LogicalProject(ENAME=[$0], R=[$1]) LogicalFilter(condition=[<($1, 2)]) LogicalProject(ENAME=[$1], R=[+(RANK() OVER (PARTITION BY $7 ORDER BY $5 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5545,7 +5545,7 @@ from sales.emp]]> @@ -5553,7 +5553,7 @@ LogicalAggregate(group=[{}], EXPR$0=[MAX($0)], EXPR$1=[COUNT(DISTINCT $1)]) LogicalAggregate(group=[{}], EXPR$0=[MIN($1) FILTER $3], EXPR$1=[COUNT($0) FILTER $2]) LogicalProject(ENAME=[$0], EXPR$0=[$1], $g_0=[=($2, 0)], $g_1=[=($2, 1)]) LogicalAggregate(group=[{1}], groups=[[{1}, {}]], EXPR$0=[MAX($7)], $g=[GROUPING($1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5565,14 +5565,14 @@ LogicalAggregate(group=[{}], EXPR$0=[MIN($1) FILTER $3], EXPR$1=[COUNT($0) FILTE @@ -5584,7 +5584,7 @@ LogicalAggregate(group=[{1}], EXPR$1=[COUNT($0)]) @@ -5594,7 +5594,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$1], EXPR$2=[CAST($2):INTEGER NOT NULL]) LogicalProject(DEPTNO=[$0], ENAME=[$1], EXPR$2=[$2], $g_0=[=($3, 0)], $g_1=[=($3, 1)]) LogicalProject(DEPTNO=[$1], ENAME=[$0], EXPR$2=[$2], $g=[$3]) LogicalAggregate(group=[{1, 7}], groups=[[{1, 7}, {7}]], EXPR$2=[SUM($5)], $g=[GROUPING($7, $1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5606,7 +5606,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$1], EXPR$2=[CAST($2):INTEGER NOT NULL]) @@ -5615,7 +5615,7 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[COUNT($2) F LogicalProject(DEPTNO=[$0], JOB=[$1], ENAME=[$2], $g_0=[=($3, 0)]) LogicalAggregate(group=[{0, 1, 2}], $g=[GROUPING($0, $1, $2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5627,7 +5627,7 @@ LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[COUNT($2) F @@ -5637,7 +5637,7 @@ LogicalProject(DEPTNO=[$0], JOB=[$1], EXPR$2=[$2], EXPR$3=[CAST($3):INTEGER NOT LogicalProject(DEPTNO=[$0], JOB=[$1], ENAME=[$2], EXPR$3=[$3], $g_0=[=($4, 0)], $g_1=[=($4, 1)]) LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}]], EXPR$3=[SUM($3)], $g=[GROUPING($0, $1, $2)]) LogicalProject(DEPTNO=[$7], JOB=[$2], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5650,18 +5650,18 @@ LogicalProject(DEPTNO=[$0], JOB=[$1], EXPR$2=[$2], EXPR$3=[CAST($3):INTEGER NOT LogicalProject(A=[$0], B=[$1], C=[$2]) LogicalJoin(condition=[=($1, $2)], joinType=[inner]) LogicalJoin(condition=[=($0, $1)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) - LogicalScan(table=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) ]]> @@ -5674,17 +5674,17 @@ MultiJoin(joinFilter=[=($1, $2)], isFullOuterJoin=[false], joinTypes=[[INNER, IN LogicalProject(A=[$0], B=[$1], C=[$2]) LogicalJoin(condition=[=($1, $2)], joinType=[left]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) - LogicalScan(table=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) ]]> @@ -5697,18 +5697,18 @@ MultiJoin(joinFilter=[=($0, $1)], isFullOuterJoin=[false], joinTypes=[[INNER, IN LogicalProject(A=[$0], B=[$1], C=[$2]) LogicalJoin(condition=[=($1, $2)], joinType=[right]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, A]]) - LogicalScan(table=[[CATALOG, SALES, B]]) - LogicalScan(table=[[CATALOG, SALES, C]]) + LogicalScan(entity=[[CATALOG, SALES, A]]) + LogicalScan(entity=[[CATALOG, SALES, B]]) + LogicalScan(entity=[[CATALOG, SALES, C]]) ]]> @@ -5726,7 +5726,7 @@ LogicalProject(C1=[$0]) LogicalAggregate(group=[{0}], C2=[COUNT()]) LogicalProject(C1=[$1]) LogicalFilter(condition=[>($1, 'b')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5737,7 +5737,7 @@ LogicalProject(C1=[$0]) LogicalFilter(condition=[>($0, 'c')]) LogicalProject(C1=[$1]) LogicalFilter(condition=[>($1, 'b')]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5755,10 +5755,10 @@ LogicalProject(NAME=[$1]) LogicalJoin(condition=[$4], joinType=[right]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$2], NAME0=[$3], $f4=[>($2, 10)]) LogicalJoin(condition=[$4], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5768,12 +5768,12 @@ LogicalProject(NAME=[$1]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$2], NAME0=[$3], $f4=[>($2, 10)]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[CAST($2):INTEGER], NAME0=[CAST($3):VARCHAR(10)], $f2=[CAST($4):BOOLEAN]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[>($0, 10)]) LogicalFilter(condition=[>($0, 10)]) LogicalFilter(condition=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5792,11 +5792,11 @@ LogicalProject(NAME=[$0]) LogicalProject(NAME=[$1], $f4=[$5]) LogicalJoin(condition=[$4], joinType=[left]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[>($0, 10)], >=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5804,9 +5804,9 @@ LogicalProject(NAME=[$0]) LogicalProject(NAME=[$1]) LogicalJoin(condition=[>($2, 10)], joinType=[right]) LogicalJoin(condition=[>($2, 10)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5825,7 +5825,7 @@ LogicalProject(NAME=[$1]) LogicalFilter(condition=[=($0, 10)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5833,7 +5833,7 @@ LogicalProject(NAME=[$1]) LogicalProject(NAME=[$1]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5854,8 +5854,8 @@ LogicalProject(DEPTNO=[$0], ENAME=[$1]) LogicalFilter(condition=[=($0, 10)]) LogicalProject(DEPTNO=[$9], ENAME=[$1]) LogicalJoin(condition=[AND(=($7, $9), =($9, 10))], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5863,9 +5863,9 @@ LogicalProject(DEPTNO=[$0], ENAME=[$1]) LogicalProject(DEPTNO=[$0], ENAME=[$1]) LogicalProject(DEPTNO=[$9], ENAME=[$1]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5879,13 +5879,13 @@ from emp]]> @@ -5896,7 +5896,7 @@ LogicalProject(NEWCOL=[1E0:FLOAT]) @@ -5918,9 +5918,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], DEPTNO=[$1]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($7, +(40, 60)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5930,9 +5930,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], DEPTNO=[$1]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($7, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5966,13 +5966,13 @@ from emp]]> LogicalProject($0=[$3], $1=[$4]) LogicalWindow(window#0=[window(partition {1} order by [0] range between UNBOUNDED PRECEDING and CURRENT ROW aggs [SUM($1), SUM($2)])]) LogicalProject(SAL=[$5], DEPTNO=[$7], $2=[+($7, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5989,8 +5989,8 @@ LogicalAggregate(group=[{2, 10}]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5999,9 +5999,9 @@ LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalAggregate(group=[{2}]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6019,9 +6019,9 @@ LogicalAggregate(group=[{2, 11}]) LogicalJoin(condition=[AND(=($2, $11), =($9, $12))], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, $0)]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[+($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6031,10 +6031,10 @@ LogicalAggregate(group=[{0, 2}]) LogicalAggregate(group=[{2, 9}]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, $0)]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1, 2}]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[+($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6051,8 +6051,8 @@ LogicalAggregate(group=[{0, 9}]) LogicalJoin(condition=[<($0, $9)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6061,8 +6061,8 @@ LogicalAggregate(group=[{0, 9}]) LogicalJoin(condition=[<($0, $9)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6076,8 +6076,8 @@ group by e.deptno]]> @@ -6085,9 +6085,9 @@ LogicalAggregate(group=[{7}]) LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalAggregate(group=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6102,8 +6102,8 @@ group by e.deptno, d.deptno]]> LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalAggregate(group=[{7, 9}]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6111,9 +6111,9 @@ LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalProject(DEPTNO=[$0], DEPTNO0=[$1]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalAggregate(group=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6129,9 +6129,9 @@ on A.sal=B.sal @@ -6140,9 +6140,9 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($3)]) LogicalProject(SAL=[$0], $f1=[$1], SAL0=[$2], $f3=[CAST(*($1, $2)):INTEGER]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) LogicalAggregate(group=[{5}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{5}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6161,9 +6161,9 @@ group by A.job, B.mgr, A.deptno]]> LogicalProject(JOB=[$0], MGR0=[$2], DEPTNO=[$1], HIREDATE1=[$3], COMM1=[$4]) LogicalAggregate(group=[{2, 7, 9}], HIREDATE1=[MAX($11)], COMM1=[SUM($12)]) LogicalJoin(condition=[=($5, $10)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{3, 5}], HIREDATE1=[MAX($4)], COMM1=[SUM($6)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6173,9 +6173,9 @@ LogicalProject(JOB=[$0], MGR0=[$2], DEPTNO=[$1], HIREDATE1=[$3], COMM1=[$4]) LogicalProject(JOB=[$0], SAL=[$1], DEPTNO=[$2], $f3=[$3], MGR=[$4], SAL0=[$5], HIREDATE1=[$6], COMM1=[$7], $f8=[CAST(*($3, $7)):INTEGER NOT NULL]) LogicalJoin(condition=[=($1, $5)], joinType=[inner]) LogicalAggregate(group=[{2, 5, 7}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{3, 5}], HIREDATE1=[MAX($4)], COMM1=[SUM($6)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6193,8 +6193,8 @@ LogicalProject(JOB=[$0], EXPR$1=[$2]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6205,9 +6205,9 @@ LogicalProject(JOB=[$0], EXPR$1=[$2]) LogicalAggregate(group=[{2}], EXPR$1=[SUM($5)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6223,8 +6223,8 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($5)]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6235,9 +6235,9 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($4)]) LogicalAggregate(group=[{2}], EXPR$0=[SUM($5)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6253,8 +6253,8 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($5)]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6266,9 +6266,9 @@ LogicalProject(EXPR$0=[CASE(=($1, 0), null:INTEGER, $0)]) LogicalAggregate(group=[{2}], EXPR$0=[$SUM0($5)], agg#1=[COUNT()]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6279,13 +6279,13 @@ LogicalProject(EXPR$0=[CASE(=($1, 0), null:INTEGER, $0)]) @@ -6298,8 +6298,8 @@ LogicalProject(QX=[CAST(CASE(=($0, 1), 1, 2)):INTEGER]) LogicalProject(EXPR$0=[OR(AND(IS NULL($3), IS NULL($12)), IS TRUE(=($3, $12)))]) LogicalFilter(condition=[IS NULL($3)]) LogicalJoin(condition=[=($3, $12)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6307,8 +6307,8 @@ LogicalProject(EXPR$0=[OR(AND(IS NULL($3), IS NULL($12)), IS TRUE(=($3, $12)))]) LogicalProject(EXPR$0=[IS NULL($12)]) LogicalFilter(condition=[IS NULL($3)]) LogicalJoin(condition=[=($3, $12)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6323,7 +6323,7 @@ where t > TIMESTAMP '2018-01-01 00:00:00']]> LogicalProject(SAL=[$0], T=[$1]) LogicalFilter(condition=[>($1, 2018-01-01 00:00:00)]) LogicalProject(SAL=[$5], T=[CURRENT_TIMESTAMP]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6331,7 +6331,7 @@ LogicalProject(SAL=[$0], T=[$1]) LogicalProject(SAL=[$0], T=[$1]) LogicalFilter(condition=[>($1, 2018-01-01 00:00:00)]) LogicalProject(SAL=[$5], T=[CURRENT_TIMESTAMP]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6344,14 +6344,14 @@ where empno=10 and empno is not null]]> @@ -6363,7 +6363,7 @@ LogicalProject(EMPNO=[$0]) @@ -6382,7 +6382,7 @@ where empno=10 and not(empno=10)]]> @@ -6400,7 +6400,7 @@ LogicalProject(EMPNO=[$0]) ($0, 10), <=($0, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6427,7 +6427,7 @@ LogicalProject(N=[$0]) LogicalProject(N=[$0]) LogicalFilter(condition=[AND(IS NULL($0), IS NULL($0))]) LogicalProject(N=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6435,7 +6435,7 @@ LogicalProject(N=[$0]) LogicalProject(N=[$0]) LogicalProject(N=[$0]) LogicalProject(N=[null:INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6447,8 +6447,8 @@ LogicalProject(N=[$0]) @@ -6456,8 +6456,8 @@ LogicalProject(EXPR$0=[1]) LogicalProject(EXPR$0=[1]) LogicalProject(DEPTNO=[$9], NAME=[$10], EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO0=[$7], SLACKER=[$8]) LogicalJoin(condition=[=($9, $7)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6471,7 +6471,7 @@ LogicalProject(EXPR$0=[1]) @@ -6481,16 +6481,16 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$3], EXPR$2=[$5], EXPR$3=[$7], EXPR$4=[$1]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $4)], joinType=[inner]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $2)], joinType=[inner]) LogicalAggregate(group=[{7}], EXPR$4=[SUM($5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$1=[COUNT($0)]) LogicalAggregate(group=[{1, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{2}], EXPR$2=[COUNT($1, $0)]) LogicalAggregate(group=[{1, 2, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$3=[COUNT($1, $0)]) LogicalAggregate(group=[{2, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6503,7 +6503,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$3], EXPR$2=[$5], EXPR$3=[$7], EXPR$4=[$1]) @@ -6512,7 +6512,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1) FILTER $3], EXPR$2=[COUNT($2) FI LogicalProject(DEPTNO=[$0], ENAME=[$1], JOB=[$2], $g_1=[=($3, 1)], $g_2=[=($3, 2)]) LogicalProject(DEPTNO=[$2], ENAME=[$0], JOB=[$1], $g=[$3]) LogicalAggregate(group=[{1, 2, 7}], groups=[[{1, 7}, {2, 7}]], $g=[GROUPING($7, $1, $2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6525,7 +6525,7 @@ LogicalAggregate(group=[{0}], EXPR$1=[COUNT($1) FILTER $3], EXPR$2=[COUNT($2) FI @@ -6533,7 +6533,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT(DISTINCT $0)], EXPR$1=[COUNT(DISTINCT LogicalAggregate(group=[{}], EXPR$0=[COUNT($0) FILTER $2], EXPR$1=[COUNT($1) FILTER $3]) LogicalProject(ENAME=[$0], JOB=[$1], $g_1=[=($2, 1)], $g_2=[=($2, 2)]) LogicalAggregate(group=[{1, 2}], groups=[[{1}, {2}]], $g=[GROUPING($1, $2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6546,7 +6546,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT($0) FILTER $2], EXPR$1=[COUNT($1) FIL @@ -6556,7 +6556,7 @@ LogicalProject(DEPTNO=[$0], CDDJ=[$1], S=[CAST($2):INTEGER NOT NULL]) LogicalProject(DEPTNO=[$0], JOB=[$1], S=[$2], $g_0=[=($3, 0)], $g_1=[=($3, 1)]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], S=[SUM($2)], $g=[GROUPING($0, $1)]) LogicalProject(DEPTNO=[$7], JOB=[$2], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6570,7 +6570,7 @@ LogicalProject(DEPTNO=[$0], CDDJ=[$1], S=[CAST($2):INTEGER NOT NULL]) @@ -6580,16 +6580,16 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$3], EXPR$2=[$5], EXPR$3=[$7], EXPR$4=[$1]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $4)], joinType=[inner]) LogicalJoin(condition=[IS NOT DISTINCT FROM($0, $2)], joinType=[inner]) LogicalAggregate(group=[{7}], EXPR$4=[SUM($5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$1=[COUNT($0)]) LogicalAggregate(group=[{1, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{2}], EXPR$2=[COUNT($1, $0)]) LogicalAggregate(group=[{1, 2, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$3=[COUNT($1, $0)]) LogicalAggregate(group=[{2, 7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6605,7 +6605,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$3], EXPR$2=[$5], EXPR$3=[$7], EXPR$4=[$1]) @@ -6614,7 +6614,7 @@ LogicalProject(DEPTNO=[$0], CDE=[$1], CDJE=[$2], CDDJ=[$3], S=[CAST($4):INTEGER LogicalAggregate(group=[{0}], CDE=[COUNT($1) FILTER $5], CDJE=[COUNT($2, $1) FILTER $4], CDDJ=[COUNT($0, $2) FILTER $6], S=[MIN($3) FILTER $7]) LogicalProject(DEPTNO=[$2], ENAME=[$0], JOB=[$1], S=[$3], $g_0=[=($4, 0)], $g_1=[=($4, 1)], $g_2=[=($4, 2)], $g_3=[=($4, 3)]) LogicalAggregate(group=[{1, 2, 7}], groups=[[{1, 2, 7}, {1, 7}, {2, 7}, {7}]], S=[SUM($5)], $g=[GROUPING($7, $1, $2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6627,8 +6627,8 @@ LogicalProject(DEPTNO=[$0], CDE=[$1], CDJE=[$2], CDDJ=[$3], S=[CAST($4):INTEGER LogicalProject(DEPTNO=[$0], DEPTNO0=[$9]) LogicalFilter(condition=[=(+($0, 10), *($9, 2))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6637,9 +6637,9 @@ LogicalProject(DEPTNO=[$0], DEPTNO0=[$9]) LogicalProject(DEPTNO=[$0], NAME=[$1], EMPNO=[$3], ENAME=[$4], JOB=[$5], MGR=[$6], HIREDATE=[$7], SAL=[$8], COMM=[$9], DEPTNO0=[$10], SLACKER=[$11]) LogicalJoin(condition=[=($2, $12)], joinType=[inner]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[+($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[*($7, 2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6653,14 +6653,14 @@ where r > 0.5]]> ($1, 10)]) LogicalProject(SAL=[$5], N=[NDC()]) - LogicalScan(table=[[scott, EMP]]) + LogicalScan(entity=[[scott, EMP]]) ]]> ($1, 10)]) LogicalProject(SAL=[$5], N=[NDC()]) - LogicalScan(table=[[scott, EMP]]) + LogicalScan(entity=[[scott, EMP]]) ]]> @@ -6673,7 +6673,7 @@ LogicalFilter(condition=[>($1, 10)]) LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=(null, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6697,7 +6697,7 @@ where case deptno LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=(CASE(=($7, 20), 2, =($7, 10), 1, 3), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6705,7 +6705,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6722,7 +6722,7 @@ where case deptno LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=(CASE(=($7, 20), 2, =($7, 10), 1, null:INTEGER), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6730,7 +6730,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6749,7 +6749,7 @@ where case deptno LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[=(CASE(=($7, 30), 1, =($7, 20), 2, =($7, 10), 1, =($7, 30), 111, 0), 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6757,7 +6757,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[1]) LogicalFilter(condition=[OR(=($7, 30), =($7, 10))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6771,16 +6771,16 @@ where coalesce(e1.mgr, -1) = coalesce(e2.mgr, -1)]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], EMPNO0=[$9], ENAME0=[$10], JOB0=[$11], MGR0=[$12], HIREDATE0=[$13], SAL0=[$14], COMM0=[$15], DEPTNO0=[$16], SLACKER0=[$17]) LogicalFilter(condition=[=(CASE(IS NOT NULL($3), $3, -1), CASE(IS NOT NULL($12), $12, -1))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6795,7 +6795,7 @@ LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[>($1, 1)]) LogicalAggregate(group=[{0}], agg#0=[COUNT()]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6804,7 +6804,7 @@ LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[>($1, 1)]) LogicalAggregate(group=[{0}], agg#0=[COUNT()]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -6824,8 +6824,8 @@ group by e.job,d.name]]> LogicalProject(JOB=[$0], MIN_SAL=[$2], MIN_DEPTNO=[$3], SUM_SAL_PLUS=[+($4, 1)], MAX_SAL=[$5], SUM_SAL_2=[$4], COUNT_SAL=[$6], COUNT_MGR=[$7]) LogicalAggregate(group=[{2, 10}], MIN_SAL=[MIN($5)], MIN_DEPTNO=[MIN($7)], SUM_SAL_2=[SUM($5)], MAX_SAL=[MAX($5)], COUNT_SAL=[COUNT()], COUNT_MGR=[COUNT($3)]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6834,9 +6834,9 @@ LogicalProject(JOB=[$0], MIN_SAL=[$2], MIN_DEPTNO=[$3], SUM_SAL_PLUS=[+($4, 1)], LogicalProject(JOB=[$0], NAME=[$7], MIN_SAL=[$1], MIN_DEPTNO=[$2], $f9=[CAST(*($3, $8)):INTEGER NOT NULL], MAX_SAL=[$4], $f10=[*($5, $8)], $f11=[*($6, $8)]) LogicalJoin(condition=[=($0, $7)], joinType=[inner]) LogicalAggregate(group=[{2}], MIN_SAL=[MIN($5)], MIN_DEPTNO=[MIN($7)], SUM_SAL_2=[SUM($5)], MAX_SAL=[MAX($5)], COUNT_SAL=[COUNT()], COUNT_MGR=[COUNT($3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], agg#0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6848,8 +6848,8 @@ LogicalProject(JOB=[$0], MIN_SAL=[$2], MIN_DEPTNO=[$3], SUM_SAL_PLUS=[+($4, 1)], @@ -6858,9 +6858,9 @@ LogicalAggregate(group=[{}], EXPR$0=[$SUM0($4)]) LogicalProject(JOB=[$0], EXPR$0=[$1], NAME=[$2], EXPR$00=[$3], $f4=[*($1, $3)]) LogicalJoin(condition=[=($0, $2)], joinType=[inner]) LogicalAggregate(group=[{2}], EXPR$0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}], EXPR$0=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6873,16 +6873,16 @@ LogicalAggregate(group=[{}], EXPR$0=[$SUM0($4)]) LogicalAggregate(group=[{}], EXPR$0=[COUNT(DISTINCT $0)]) LogicalProject(SAL=[$5]) LogicalJoin(condition=[=($2, $10)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6899,9 +6899,9 @@ group by d.name]]> @@ -6909,9 +6909,9 @@ LogicalAggregate(group=[{9}], SUM_SAL=[SUM($5)], C=[COUNT()]) LogicalProject(NAME=[$3], SUM_SAL=[$1], C=[$2]) LogicalJoin(condition=[=($0, $3)], joinType=[inner]) LogicalAggregate(group=[{2}], SUM_SAL=[SUM($5)], C=[COUNT()]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{1}]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -6923,14 +6923,14 @@ LogicalProject(NAME=[$3], SUM_SAL=[$1], C=[$2]) @@ -6943,13 +6943,13 @@ from emp]]> @@ -6962,13 +6962,13 @@ from emp]]> @@ -6982,20 +6982,20 @@ LogicalProject(NEWCOL=[+($0, CAST(1):INTEGER)]) ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7009,11 +7009,11 @@ SemiJoin(condition=[=($0, $2)], joinType=[inner]) ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7027,17 +7027,17 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7051,11 +7051,11 @@ LogicalProject(NAME=[$1]) ($5, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7070,9 +7070,9 @@ order by sal limit 10]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$5], dir0=[ASC], fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7081,9 +7081,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalSort(sort0=[$5], dir0=[ASC], fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) LogicalSort(sort0=[$5], dir0=[ASC], fetch=[10]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7098,20 +7098,20 @@ from sales.emp]]> LogicalProject(EMPNO=[$0], D=[$SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7125,9 +7125,9 @@ LogicalProject(EMPNO=[$0], D=[$9]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[> SOME($0, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7136,10 +7136,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(AND(IS TRUE(>($0, $9)), <>($10, 0)), AND(>($0, $9), <>($10, 0), IS NOT TRUE(>($0, $9)), <=($10, $11)))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], m=[MIN($0)], c=[COUNT()], d=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7148,10 +7148,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(AND(IS TRUE(>($0, $9)), <>($10, 0)), AND(>($0, $9), <>($10, 0), IS NOT TRUE(>($0, $9)), <=($10, $11)))]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], m=[MIN($0)], c=[COUNT()], d=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7166,9 +7166,9 @@ order by name]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$10], dir0=[ASC]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7176,10 +7176,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$10], dir0=[ASC]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7194,9 +7194,9 @@ order by sal, name limit 10]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$5], sort1=[$10], dir0=[ASC], dir1=[ASC], fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7204,9 +7204,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$5], sort1=[$10], dir0=[ASC], dir1=[ASC], fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7221,9 +7221,9 @@ order by name]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$10], dir0=[ASC]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7231,10 +7231,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(sort0=[$10], dir0=[ASC]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7249,10 +7249,10 @@ limit 10]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalSort(fetch=[10]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7260,10 +7260,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalSort(fetch=[10]) LogicalJoin(condition=[=($7, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalSort(fetch=[10]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7277,8 +7277,8 @@ right join sales.emp e using (deptno) limit 10 offset 2]]> LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalSort(offset=[2], fetch=[10]) LogicalJoin(condition=[=($0, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7286,9 +7286,9 @@ LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalSort(offset=[2], fetch=[10]) LogicalJoin(condition=[=($0, $9)], joinType=[right]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalSort(offset=[2], fetch=[10]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7302,8 +7302,8 @@ left join sales.emp e using (deptno) order by d.deptno offset 1]]> LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalSort(sort0=[$0], dir0=[ASC], offset=[1]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7311,8 +7311,8 @@ LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalProject(DEPTNO=[$0], EMPNO=[$2]) LogicalSort(sort0=[$0], dir0=[ASC], offset=[1]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7325,14 +7325,14 @@ order by cast(d.deptno as integer) offset 1]]> @@ -7345,14 +7345,14 @@ order by cast(d.deptno as double) offset 1]]> @@ -7365,14 +7365,14 @@ order by cast(d.deptno as varchar(10)) offset 1]]> @@ -7389,9 +7389,9 @@ LogicalSort(sort0=[$0], dir0=[ASC], fetch=[10]) LogicalProject(NAME=[$0]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7400,10 +7400,10 @@ LogicalSort(sort0=[$0], dir0=[ASC], fetch=[10]) LogicalUnion(all=[true]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[10]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[10]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7418,20 +7418,20 @@ from sales.emp]]> LogicalProject(EMPNO=[$0], D=[IN($7, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7449,9 +7449,9 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(IN($0, $7, { LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7460,10 +7460,10 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(IS NOT NULL($11), <($5, 100))]) LogicalJoin(condition=[AND(=($0, $9), =($7, $10))], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], DEPTNO=[$7], i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7479,10 +7479,10 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[IN($7, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7490,13 +7490,13 @@ LogicalProject(DEPTNO=[$7]) LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[=($7, $11)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7513,9 +7513,9 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(IN($7, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7524,11 +7524,11 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(IS NOT NULL($10), <($5, 100))]) LogicalJoin(condition=[=($7, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7543,10 +7543,10 @@ on exists (select deptno from sales.emp where empno < 20)]]> LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[EXISTS({ LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7554,13 +7554,13 @@ LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7574,20 +7574,20 @@ from sales.emp]]> @@ -7604,14 +7604,14 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[<($SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), $SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7620,18 +7620,18 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10], $f0=[$11]) LogicalJoin(condition=[<($11, $12)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[true], joinType=[left]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{}], agg#0=[SINGLE_VALUE($0)]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[SINGLE_VALUE($0)]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7647,9 +7647,9 @@ or emp.sal < 100]]> LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(EXISTS({ LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7658,11 +7658,11 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(IS NOT NULL($9), <($5, 100))]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7677,19 +7677,19 @@ from sales.emp]]> LogicalProject(EMPNO=[$0], D=[IN($0, $7, { LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7706,10 +7706,10 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[IN($0, $9, { LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7717,13 +7717,13 @@ LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalJoin(condition=[AND(=($0, $11), =($9, $12))], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7741,13 +7741,13 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(<($SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), $SCALAR_QUERY({ LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7757,15 +7757,15 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[OR(<($9, $10), <($5, 100))]) LogicalJoin(condition=[true], joinType=[left]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[SINGLE_VALUE($0)]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[SINGLE_VALUE($0)]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 100)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7780,7 +7780,7 @@ and empno = 10 and mgr is null and empno = 10]]> @@ -7801,14 +7801,14 @@ and empno = 10 and mgr is null and empno = 10]]> @@ -7827,7 +7827,7 @@ LogicalSort(sort0=[$1], dir0=[DESC-nulls-last]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7836,7 +7836,7 @@ LogicalProject(C=[$2], DEPTNO=[$0]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7855,7 +7855,7 @@ LogicalSort(sort0=[$1], sort1=[$2], dir0=[ASC], dir1=[DESC]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7865,7 +7865,7 @@ LogicalSort(sort0=[$2], dir0=[DESC]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7882,9 +7882,9 @@ LogicalSort(sort0=[$0], dir0=[ASC]) LogicalProject(NAME=[$0]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7893,10 +7893,10 @@ LogicalSort(sort0=[$0], dir0=[ASC]) LogicalUnion(all=[true]) LogicalSort(sort0=[$0], dir0=[ASC]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalSort(sort0=[$0], dir0=[ASC]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7913,9 +7913,9 @@ LogicalSort(sort0=[$0], dir0=[ASC], fetch=[0]) LogicalProject(NAME=[$0]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7924,10 +7924,10 @@ LogicalSort(sort0=[$0], dir0=[ASC], fetch=[0]) LogicalUnion(all=[true]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[0]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[0]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -7944,7 +7944,7 @@ LogicalProject(C=[$2]) LogicalAggregate(group=[{0, 1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7954,7 +7954,7 @@ LogicalProject(C=[$2]) LogicalAggregate(group=[{1}], C=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7971,7 +7971,7 @@ LogicalProject(C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7980,7 +7980,7 @@ LogicalProject(C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -7999,7 +7999,7 @@ LogicalProject(JOB=[$1]) LogicalAggregate(group=[{0, 1}], agg#0=[COUNT()]) LogicalProject(SAL=[$5], JOB=[$2]) LogicalFilter(condition=[AND(IS NULL($5), =($2, 'Clerk'))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8010,7 +8010,7 @@ LogicalProject(JOB=[$1]) LogicalAggregate(group=[{0}], agg#0=[COUNT()]) LogicalProject(SAL=[$5], JOB=[$2]) LogicalFilter(condition=[AND(IS NULL($5), =($2, 'Clerk'))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8029,9 +8029,9 @@ LogicalProject(EMPNO=[$0], D=[IN(CASE(true, CAST($7):INTEGER, null:INTEGER), { LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0], DEPTNO=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8039,17 +8039,17 @@ LogicalProject(DEPTNO=[$1]) LogicalProject(EMPNO=[$0], D=[CASE(=($9, 0), false, IS NULL(CASE(true, CAST($7):INTEGER, null:INTEGER)), null:NULL, IS NOT NULL($12), true, <($10, $9), null:NULL, false)]) LogicalJoin(condition=[=($7, $11)], joinType=[left]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(DEPTNO=[$1]) LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0], DEPTNO=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$1], i=[true]) LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0], DEPTNO=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8071,13 +8071,13 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[<($0, CASE(=(IN($7, { LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), true), 10, =(IN($7, { LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), false), 20, 30))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8087,15 +8087,15 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[<($0, CASE(=(OR(AND(IS NOT NULL($12), <>($9, 0)), AND(<($10, $9), null, <>($9, 0), IS NULL($12))), true), 10, =(OR(AND(IS NOT NULL($12), <>($9, 0)), AND(<($10, $9), null, <>($9, 0), IS NULL($12))), false), 20, 30))]) LogicalJoin(condition=[=($7, $11)], joinType=[left]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null:INTEGER)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null:INTEGER)], i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8113,13 +8113,13 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[<($0, CASE(=(IN($7, { LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), true), 10, =(IN($7, { LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), false), 20, 30))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8129,16 +8129,16 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[<($0, CASE(=(CASE(=($9, 0), false, IS NOT NULL($12), true, <($10, $9), null, false), true), 10, =(CASE(=($9, 0), false, IS NOT NULL($12), true, <($10, $9), null, false), false), 20, 30))]) LogicalJoin(condition=[=($7, $11)], joinType=[left]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EXPR$0=[$0], i=[true]) LogicalProject(EXPR$0=[CASE(true, CAST($7):INTEGER, null)]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8153,9 +8153,9 @@ where exists (select deptno from sales.emp where empno < 20)]]> LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[EXISTS({ LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8163,11 +8163,11 @@ LogicalFilter(condition=[<($0, 20)]) LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8183,9 +8183,9 @@ and emp.sal < 100]]> LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[AND(EXISTS({ LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }), <($5, 100))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8194,11 +8194,11 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[<($5, 100)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(i=[true]) LogicalFilter(condition=[<($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8213,9 +8213,9 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[IN($0, { LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8224,19 +8224,19 @@ LogicalProject(SAL=[$5]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[=($0, $9)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{2}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8253,11 +8253,11 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IN($5, { LogicalProject(SAL=[$5]) LogicalFilter(condition=[>($7, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8267,12 +8267,12 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[=($5, $11)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{7}]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[>($7, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8288,8 +8288,8 @@ group by emp.empno]]> LogicalAggregate(group=[{0}], EXPR$1=[COUNT()], EXPR$2=[AVG(DISTINCT $1)]) LogicalProject(EMPNO=[$0], DEPTNO0=[$9]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8298,8 +8298,8 @@ LogicalAggregate(group=[{0}], EXPR$1=[$SUM0($2)], EXPR$2=[AVG($1)]) LogicalAggregate(group=[{0, 1}], EXPR$1=[COUNT()]) LogicalProject(EMPNO=[$0], DEPTNO0=[$9]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8316,7 +8316,7 @@ group by name]]> LogicalAggregate(group=[{0}], EXPR$1=[SUM(DISTINCT $1)], EXPR$2=[SUM(DISTINCT $2)]) LogicalAggregate(group=[{0}], CN=[COUNT()], SM=[SUM($1)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8327,7 +8327,7 @@ LogicalProject(NAME=[$0], EXPR$1=[CAST($1):BIGINT NOT NULL], EXPR$2=[CAST($2):IN LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1}, {0, 2}]], $g=[GROUPING($0, $1, $2)]) LogicalAggregate(group=[{0}], CN=[COUNT()], SM=[SUM($1)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8361,7 +8361,7 @@ group by job, empno, sal]]> LogicalAggregate(group=[{0, 1, 2}], S=[SUM($2)]) LogicalProject(JOB=[$2], EMPNO=[$0], SAL=[$5]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8370,7 +8370,7 @@ LogicalProject(JOB=[$0], EMPNO=[10], SAL=[$1], S=[$2]) LogicalAggregate(group=[{0, 2}], S=[SUM($2)]) LogicalProject(JOB=[$2], EMPNO=[$0], SAL=[$5]) LogicalFilter(condition=[=($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8384,7 +8384,7 @@ group by ename, sal]]> @@ -8392,7 +8392,7 @@ LogicalAggregate(group=[{1, 5}]) LogicalProject(ENAME=['John':VARCHAR(20)], SAL=[$0]) LogicalAggregate(group=[{5}]) LogicalFilter(condition=[=($1, 'John')]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8409,10 +8409,10 @@ LogicalProject(ENAME=[$0]) LogicalFilter(condition=[IN($1, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=(+($5, 1), $cor0.SALPLUS)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) LogicalProject(ENAME=[$1], DEPTNO=[$7], SALPLUS=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8422,11 +8422,11 @@ LogicalProject(ENAME=[$0]) LogicalFilter(condition=[=($1, $3)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{2}]) LogicalProject(ENAME=[$1], DEPTNO=[$7], SALPLUS=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=(+($5, 1), $cor0.SALPLUS)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8434,12 +8434,12 @@ LogicalProject(ENAME=[$0]) LogicalProject(ENAME=[$0]) LogicalJoin(condition=[AND(=($2, $4), =($1, $3))], joinType=[inner]) LogicalProject(ENAME=[$1], DEPTNO=[$7], SALPLUS=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], $f9=[$9]) LogicalFilter(condition=[=(+($5, 1), $9)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8456,10 +8456,10 @@ LogicalProject(NAME=[$0]) LogicalFilter(condition=[IN($1, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=(+($5, 1), $cor0.DEPTNOMINUS)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) LogicalProject(NAME=[$1], DEPTNO=[$0], DEPTNOMINUS=[-($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8469,11 +8469,11 @@ LogicalProject(NAME=[$0]) LogicalFilter(condition=[=($1, $3)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{2}]) LogicalProject(NAME=[$1], DEPTNO=[$0], DEPTNOMINUS=[-($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[=(+($5, 1), $cor0.DEPTNOMINUS)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8481,12 +8481,12 @@ LogicalProject(NAME=[$0]) LogicalProject(NAME=[$0]) LogicalJoin(condition=[AND(=($2, $4), =($1, $3))], joinType=[inner]) LogicalProject(NAME=[$1], DEPTNO=[$0], DEPTNOMINUS=[-($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}]) LogicalProject(DEPTNO=[$7], $f9=[$9]) LogicalFilter(condition=[=(+($5, 1), $9)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($5, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8503,9 +8503,9 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[NOT(IN($0, { LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8514,12 +8514,12 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[OR(IS NOT TRUE(OR(IS NOT NULL($13), <($11, $10))), IS TRUE(=($10, 0)))]) LogicalJoin(condition=[AND(=($0, $12), =($2, $14))], joinType=[left]) LogicalJoin(condition=[=($2, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], c=[COUNT()], ck=[COUNT($1)]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], i=[true], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8529,15 +8529,15 @@ LogicalProject(SAL=[$5]) LogicalFilter(condition=[OR(IS NOT TRUE(OR(IS NOT NULL($12), <($10, $9))), IS TRUE(=($9, 0)))]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{2}]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{2}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalFilter(condition=[=($cor0.EMPNO, $0)]) LogicalProject(DEPTNO=[$0], i=[true]) LogicalFilter(condition=[=($cor0.JOB, $1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8552,9 +8552,9 @@ or empno NOT IN (select deptno from dept)]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(=($5, 4), NOT(IN($0, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8563,9 +8563,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(=($5, 4), NOT(CASE(IS NOT NULL($10), true, false)))]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], i=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8574,9 +8574,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[OR(=($5, 4), NOT(CASE(IS NOT NULL($10), true, false)))]) LogicalJoin(condition=[=($0, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], i=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8594,9 +8594,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$1]) LogicalFilter(condition=[AND(>($2, 2), =($cor0.ENAME, $0))]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8606,17 +8606,17 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(IS NOT TRUE(OR(IS NOT NULL($12), <($10, $9))), IS TRUE(=($9, 0)))]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{1}]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{1}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)]) LogicalProject(EMPNO=[$1]) LogicalFilter(condition=[AND(>($2, 2), =($cor0.ENAME, $0))]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[=($cor0.EMPNO, $0)]) LogicalProject(EMPNO=[$1], i=[true]) LogicalFilter(condition=[AND(>($2, 2), =($cor0.ENAME, $0))]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8625,16 +8625,16 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(IS NOT TRUE(OR(IS NOT NULL($13), <($11, $10))), IS TRUE(=($10, 0)))]) LogicalJoin(condition=[AND(=($0, $12), =($1, $14))], joinType=[left]) LogicalJoin(condition=[=($1, $9)], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], c=[COUNT()], ck=[COUNT($1)]) LogicalProject(ENAME=[$0], EMPNO=[$1]) LogicalFilter(condition=[>($2, 2)]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$1], i=[true], ENAME=[$0]) LogicalFilter(condition=[>($2, 2)]) LogicalProject(ENAME=[$1], EMPNO=[$0], R=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -8646,7 +8646,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -8768,13 +8768,13 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0], $f0=[$2]) LogicalCorrelate(correlation=[$cor2], joinType=[left], requiredColumns=[{1}]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject($f0=[$0]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($cor2.DEPTNO, $0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -8811,14 +8811,14 @@ LogicalCorrelate(correlation=[$cor0], joinType=[anti], requiredColumns=[{0}]) ($3, 0), >(CASE(>($3, 0), /($7, $3), null:INTEGER), 1))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> ($3, 0), CASE(>($3, 0), >(/($7, $3), 1), null:BOOLEAN))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> diff --git a/plugins/sql-language/src/test/resources/org/polypheny/db/sql/HepPlannerTest.xml b/plugins/sql-language/src/test/resources/org/polypheny/db/sql/HepPlannerTest.xml index b613898eb6..c32db96cbf 100644 --- a/plugins/sql-language/src/test/resources/org/polypheny/db/sql/HepPlannerTest.xml +++ b/plugins/sql-language/src/test/resources/org/polypheny/db/sql/HepPlannerTest.xml @@ -24,17 +24,17 @@ LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -47,11 +47,11 @@ LogicalProject(DEPTNO=[$0]) LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -61,11 +61,11 @@ LogicalAggregate(group=[{0}]) LogicalAggregate(group=[{0}]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -78,11 +78,11 @@ LogicalAggregate(group=[{0}]) LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -91,11 +91,11 @@ LogicalAggregate(group=[{0}]) LogicalUnion(all=[true]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -108,11 +108,11 @@ LogicalAggregate(group=[{0}]) LogicalUnion(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -121,11 +121,11 @@ LogicalUnion(all=[false]) LogicalAggregate(group=[{0}]) LogicalUnion(all=[true]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(ENAME=[$0]) - LogicalScan(table=[[CATALOG, SALES, BONUS]]) + LogicalScan(entity=[[CATALOG, SALES, BONUS]]) ]]> @@ -138,11 +138,11 @@ LogicalUnion(all=[false]) LogicalIntersect(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(FNAME=[$1]) - LogicalScan(table=[[CATALOG, CUSTOMER, CONTACT]]) + LogicalScan(entity=[[CATALOG, CUSTOMER, CONTACT]]) ]]> @@ -151,12 +151,12 @@ LogicalIntersect(all=[false]) LogicalUnion(all=[false]) LogicalProject(NAME=[CAST($0):VARCHAR(20) NOT NULL]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(FNAME=[CAST($0):VARCHAR(20) NOT NULL]) LogicalProject(FNAME=[$1]) - LogicalScan(table=[[CATALOG, CUSTOMER, CONTACT]]) + LogicalScan(entity=[[CATALOG, CUSTOMER, CONTACT]]) ]]> @@ -167,13 +167,13 @@ LogicalIntersect(all=[false]) @@ -185,14 +185,14 @@ LogicalCalc(expr#0..8=[{inputs}], expr#9=[LOWER($t1)], expr#10=[UPPER($t9)], EXP @@ -204,13 +204,13 @@ LogicalProject(NAME=[$1]) diff --git a/plugins/sql-language/src/test/resources/org/polypheny/db/sql/language/SqlToAlgConverterTest.xml b/plugins/sql-language/src/test/resources/org/polypheny/db/sql/language/SqlToAlgConverterTest.xml index 04f712fdf3..f99f79ab01 100644 --- a/plugins/sql-language/src/test/resources/org/polypheny/db/sql/language/SqlToAlgConverterTest.xml +++ b/plugins/sql-language/src/test/resources/org/polypheny/db/sql/language/SqlToAlgConverterTest.xml @@ -31,7 +31,7 @@ LogicalProject(EXPR$0=[1]) @@ -43,7 +43,7 @@ LogicalAggregate(group=[{0}]) @@ -55,7 +55,7 @@ LogicalAggregate(group=[{0}]) @@ -68,7 +68,7 @@ LogicalAggregate(group=[{0, 1}]) LogicalProject(D=[$0], EXPR$1=[+($0, $1)]) LogicalAggregate(group=[{0, 1}]) LogicalProject(D=[+($7, $0)], MGR=[$3]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -81,7 +81,7 @@ from emp group by d,mgr]]> @@ -95,7 +95,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[0]) LogicalAggregate(group=[{0}]) LogicalProject($f0=[SUBSTRING($1, 2, 3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -109,7 +109,7 @@ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalFilter(condition=[>($0, 1)]) LogicalAggregate(group=[{}], E=[COUNT()]) LogicalProject(EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -124,7 +124,7 @@ LogicalFilter(condition=[>($0, 1)]) ($7, 10), >($7, 20))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -133,7 +133,7 @@ LogicalProject(EMPNO=[$0]) @@ -146,7 +146,7 @@ LogicalAggregate(group=[{0}], SUM_SAL=[SUM($1)]) LogicalProject(EXPR$0=[+($0, 4)], EXPR$1=[$1], EXPR$2=[$2], EXPR$3=[*(2, $3)]) LogicalAggregate(group=[{0}], EXPR$1=[SUM($1)], EXPR$2=[SUM($2)], agg#2=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5], $f2=[+(3, $5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -162,7 +162,7 @@ LogicalProject(EXPR$0=[$0]) LogicalFilter(condition=[>($1, 10)]) LogicalAggregate(group=[{}], EXPR$0=[SUM($0)], agg#1=[SUM($1)]) LogicalProject($f0=[+($5, $5)], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -175,7 +175,7 @@ LogicalProject(EXPR$0=[$0]) LogicalProject(NAME=[$0]) LogicalAggregate(group=[{0}]) LogicalProject(NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -188,7 +188,7 @@ LogicalProject(NAME=[$0]) LogicalProject(NAME=[$0], FOO=[$2]) LogicalAggregate(group=[{0, 1}], FOO=[COUNT()]) LogicalProject(NAME=[$1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -203,7 +203,7 @@ group by name, deptno, name)]]> @@ -214,39 +214,39 @@ group by deptno]]> - + - + - + @@ -264,9 +264,9 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject($f0=[$SCALAR_QUERY({ LogicalProject(EXPR$0=[CHAR_LENGTH($1)]) LogicalFilter(condition=[=($0, $cor0.EMPNO)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -287,11 +287,11 @@ LogicalProject(DEPTNO=[$7], EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE LogicalAggregate(group=[{}], EXPR$0=[MAX($0)]) LogicalProject(NAME=[$1]) LogicalFilter(condition=[=($0, $cor0.DEPTNO0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))], variablesSet=[[$cor0]]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -316,7 +316,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) Uncollect Collect(field=[EXPR$0]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -327,7 +327,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) (COUNT(DISTINCT $7) OVER (ROWS BETWEEN 10 PRECEDING AND CURRENT ROW), 0), $SUM0(DISTINCT $7) OVER (ROWS BETWEEN 10 PRECEDING AND CURRENT ROW), null:INTEGER)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -342,7 +342,7 @@ from emp @@ -363,7 +363,7 @@ window w as (partition by productId)]]> @@ -375,7 +375,7 @@ LogicalProject(EXPR$0=[ITEM(ITEM($3, 1).DETAIL.SKILLS, +(2, 3)).DESC]) @@ -389,7 +389,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) Uncollect Collect(field=[EXPR$0]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -403,7 +403,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], ORDINALITY=[$2]) Uncollect(withOrdinality=[true]) Collect(field=[EXPR$0]) LogicalProject(DEPTNO=[$0], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -415,7 +415,7 @@ LogicalProject(EXPR$0=[$1]) LogicalValues(tuples=[[{ true }]]) Collect(field=[EXPR$0]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -427,7 +427,7 @@ LogicalProject(EXPR$0=[$1]) @@ -441,7 +441,7 @@ LogicalProject(EXPR$0=['a'], EXPR$1=[$SLICE($2)]) @@ -478,11 +478,11 @@ from dept]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($0, 55)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -496,11 +496,11 @@ where exists (select 1 from dept where deptno=55)]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -515,7 +515,7 @@ LogicalProject(EXPR$0=[$0]) Uncollect LogicalProject(EXPR$0=[$SLICE($2)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) Collect(field=[EXPR$0]) LogicalUnion(all=[true]) LogicalProject(EXPR$0=[$cor0.DEPTNO]) @@ -531,10 +531,10 @@ LogicalProject(EXPR$0=[$0]) @@ -547,7 +547,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -573,9 +573,9 @@ LogicalProject(EXPR$0=[ELEMENT($SLICE($0))]) @@ -587,9 +587,9 @@ LogicalUnion(all=[true]) @@ -603,7 +603,7 @@ LogicalUnion(all=[true]) LogicalUnion(all=[true]) LogicalValues(tuples=[[{ 10 }, { 20 }]]) LogicalProject(EXPR$0=[34]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalUnion(all=[true]) LogicalProject(EXPR$0=[30]) LogicalValues(tuples=[[{ 0 }]]) @@ -623,14 +623,14 @@ union all values (30), (45 + 10)]]> ($0, 20)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalValues(tuples=[[{ 45 }, { 67 }]]) ]]> @@ -691,7 +691,7 @@ LogicalProject(EXPR$0=[NOT(LIKE('a', 'b', 'c'))]) (COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), $SUM0($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), null:INTEGER)], EXPR$1=[CASE(>(COUNT($7) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), $SUM0($7) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), null:INTEGER)], EXPR$2=[CASE(>=(COUNT() OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), 2), CASE(>(COUNT($7) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), 0), $SUM0($7) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 3 PRECEDING AND CURRENT ROW), null:INTEGER), null:NULL)]) LogicalFilter(condition=[>(-($7, $5), 999)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -723,7 +723,7 @@ LogicalProject(EXPR$0=[CHAR_LENGTH('foo')]) @@ -734,7 +734,7 @@ LogicalProject(EXPR$0=[ROW(ROW(1, 2), ROW(3, 4, 5)).EXPR$1.EXPR$2]) @@ -748,7 +748,7 @@ from ( @@ -763,7 +763,7 @@ lateral (select t2."$unnest" as fake_col3 (COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), $SUM0($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), null:INTEGER)], EXPR$1=[CAST(/(CASE(>(COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), CAST($SUM0($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW)):INTEGER, null:INTEGER), COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW))):INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -789,7 +789,7 @@ window w1 as (partition by job order by hiredate rows 2 preceding)]]> @@ -806,7 +806,7 @@ window w1 as (partition by job order by hiredate rows 2 preceding)]]> @@ -819,7 +819,7 @@ from emp]]> @@ -832,7 +832,7 @@ from emp]]> @@ -844,7 +844,7 @@ LogicalProject(EXPR$0=[*(CAST($0):INTEGER NOT NULL, 3660000:INTERVAL HOUR TO MIN @@ -858,24 +858,24 @@ LogicalAggregate(group=[{0}]) LogicalProject(EXPR$0=[$1]) LogicalAggregate(group=[{0}], EXPR$0=[SUM($1)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> - + - + @@ -904,7 +904,7 @@ LogicalProject(EMPNO=[$0]) @@ -918,25 +918,25 @@ LogicalProject(EMPNO=[$0]) - + @@ -948,7 +948,7 @@ LogicalProject(NAME=[$0]) @@ -960,7 +960,7 @@ LogicalSort(sort0=[$0], dir0=[ASC]) @@ -972,7 +972,7 @@ LogicalSort(sort0=[$0], dir0=[DESC-nulls-last]) @@ -984,7 +984,7 @@ LogicalSort(sort0=[$0], dir0=[DESC]) @@ -996,7 +996,7 @@ LogicalSort(sort0=[$1], dir0=[ASC]) @@ -1009,7 +1009,7 @@ LogicalSort(sort0=[$1], dir0=[DESC]) LogicalProject(EXPR$0=[$0]) LogicalSort(sort0=[$1], sort1=[$0], dir0=[ASC], dir1=[DESC]) LogicalProject(EXPR$0=[+($0, 1)], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1022,7 +1022,7 @@ LogicalProject(EXPR$0=[$0]) LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EMPNO=[$2]) LogicalSort(sort0=[$3], dir0=[DESC]) LogicalProject(EXPR$0=[+($0, 1)], DEPTNO=[$7], EMPNO=[$0], EXPR$3=[-1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1035,7 +1035,7 @@ LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EMPNO=[$2]) LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EMPNO=[$2]) LogicalSort(sort0=[$3], dir0=[DESC]) LogicalProject(EXPR$0=[+($0, 1)], DEPTNO=[$7], EMPNO=[$0], EXPR$3=[+(1, 2)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1048,7 +1048,7 @@ LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EMPNO=[$2]) ProjectRel(EMPNO=[$0], Y=[$1]) SortRel(sort0=[$2], dir0=[Ascending]) ProjectRel(EMPNO=[+($0, 1)], Y=[-($0, 2)], EXPR$2=[+($0, 3)]) - TableAccessRel(table=[[SALES, EMP]]) + TableAccessRel(entity=[[SALES, EMP]]) ]]> @@ -1062,7 +1062,7 @@ from emp order by y + 3]]> LogicalProject(X=[$0], Y=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalProject(X=[+($0, 1)], Y=[-($0, 2)], EXPR$2=[+(-($0, 2), 3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1076,7 +1076,7 @@ from emp order by empno + 3]]> LogicalProject(EMPNO=[$0], Y=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalProject(EMPNO=[+($0, 1)], Y=[-($0, 2)], EXPR$2=[+(+($0, 1), 3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1093,9 +1093,9 @@ LogicalSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[ASC]) LogicalProject(EMPNO=[$0], SAL=[$1]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1113,9 +1113,9 @@ LogicalProject(EMPNO=[$0], SAL=[$1]) LogicalProject(EMPNO=[$0], SAL=[$1], EXPR$2=[+(*($0, $1), 2)]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1133,7 +1133,7 @@ LogicalProject(DEPTNO=[$0], EXPR$1=[$1]) LogicalProject(DEPTNO=[$0], EXPR$1=[$1], EXPR$2=[*($0, $2)], EXPR$3=[$3]) LogicalAggregate(group=[{0}], EXPR$1=[COUNT()], agg#1=[SUM($1)], agg#2=[MIN($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1148,7 +1148,7 @@ LogicalProject(EMPNO=[$0], EXPR$1=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalAggregate(group=[{0, 1, 2}]) LogicalProject(EMPNO=[$0], EXPR$1=[+($7, 1)], EXPR$2=[+(+($7, 1), $0)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1163,8 +1163,8 @@ LogicalProject(EMPNO=[$0]) LogicalSort(sort0=[$1], sort1=[$2], dir0=[DESC], dir1=[ASC]) LogicalProject(EMPNO=[$0], EXPR$1=[+($5, $0)], EXPR$2=[*($5, $0)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1181,9 +1181,9 @@ LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(EMPNO=[$0], SAL=[$1]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], DEPTNO0=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1195,7 +1195,7 @@ LogicalSort(sort0=[$1], dir0=[ASC]) ($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1213,8 +1213,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[>($0, 5)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1227,7 +1227,7 @@ from emp @@ -1241,7 +1241,7 @@ from emp @@ -1267,7 +1267,7 @@ window w1 as (partition by job order by hiredate rows 2 preceding)]]> (COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), $SUM0($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), null:INTEGER)], EXPR$1=[/(CASE(>(COUNT(CAST($5):REAL NOT NULL) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), 0), CAST($SUM0(CAST($5):REAL NOT NULL) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW)):REAL, null:REAL), COUNT(CAST($5):REAL NOT NULL) OVER (PARTITION BY $2 ORDER BY $4 ROWS BETWEEN 2 PRECEDING AND CURRENT ROW))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1279,8 +1279,8 @@ LogicalProject(EXPR$0=[CASE(>(COUNT($5) OVER (PARTITION BY $2 ORDER BY $4 ROWS B @@ -1292,9 +1292,9 @@ LogicalProject(DEPTNO=[$7], EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE @@ -1307,8 +1307,8 @@ JOIN dept on emp.deptno = dept.deptno]]> @@ -1321,8 +1321,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -1335,7 +1335,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($0, 5)]) Sample(mode=[bernoulli], rate=[0.5], repeatableSeed=[-]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1355,8 +1355,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) Sample(mode=[bernoulli], rate=[0.1], repeatableSeed=[1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1369,7 +1369,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($0, 5)]) Sample(mode=[system], rate=[0.5], repeatableSeed=[-]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1389,8 +1389,8 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) Sample(mode=[system], rate=[0.1], repeatableSeed=[1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1404,7 +1404,7 @@ LogicalProject(EXPR$0=[$0], DEPTNO=[$1], EXPR$2=[$0]) LogicalAggregate(group=[{0, 1}]) LogicalProject(EXPR$0=[+($5, 5)], DEPTNO=[$7]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1419,7 +1419,7 @@ where empno > 10]]> LogicalAggregate(group=[{}], EXPR$0=[COUNT()], EXPR$1=[SUM($0)]) LogicalProject(SAL=[$5]) LogicalFilter(condition=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1435,7 +1435,7 @@ where c like 'X%']]> LogicalProject(EXPR$0=[+($0, $1)]) LogicalFilter(condition=[LIKE($2, 'X%')]) LogicalProject(DEPTNO=[$0], UNO=[1], NAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1454,7 +1454,7 @@ LogicalProject(A=[$0], B=[$1], C=[$2], DEPTNO=[$3], NAME=[$4]) LogicalJoin(condition=[=($3, $2)], joinType=[inner]) LogicalProject(A=[$2], B=[$1], C=[$0]) LogicalValues(tuples=[[{ 1, 2, 3 }]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1466,7 +1466,7 @@ from (select row(row(1)) r from dept) t]]> @@ -1482,10 +1482,10 @@ LogicalProject(GRADE=[$1]) LogicalJoin(condition=[true], joinType=[inner]) LogicalUnion(all=[false]) LogicalProject(EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, SALGRADE]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, SALGRADE]]) ]]> @@ -1497,8 +1497,8 @@ LogicalProject(GRADE=[$1]) @@ -1511,9 +1511,9 @@ FROM emp NATURAL JOIN (SELECT deptno AS foo, name FROM dept) AS d]]> @@ -1527,9 +1527,9 @@ NATURAL JOIN (SELECT deptno, name AS ename FROM dept) AS d]]> @@ -1542,7 +1542,7 @@ order by empno offset 10 rows fetch next 5 rows only]]> @@ -1555,7 +1555,7 @@ order by empno offset ? rows fetch next ? rows only]]> @@ -1567,7 +1567,7 @@ LogicalSort(sort0=[$0], dir0=[ASC], offset=[?0:INTEGER], fetch=[?1:INTEGER]) @@ -1579,7 +1579,7 @@ LogicalSort(fetch=[5]) @@ -1592,7 +1592,7 @@ offset 10 rows fetch next 5 rows only]]> @@ -1605,7 +1605,7 @@ offset ? rows fetch next ? rows only]]> @@ -1617,7 +1617,7 @@ LogicalSort(offset=[?0:INTEGER], fetch=[?1:INTEGER]) @@ -1629,7 +1629,7 @@ LogicalSort(offset=[10]) @@ -1643,8 +1643,8 @@ using (n_nationkey)]]> @@ -1659,7 +1659,7 @@ and (deptno = 8 or empno < 100)]]> ($7, 5), OR(=($7, 8), <($0, 100)))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1675,9 +1675,9 @@ join emp as e2 using (empno)]]> LogicalProject(EMPNO=[$0], DEPTNO=[$7], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8], NAME=[$10], ENAME0=[$12], JOB0=[$13], MGR0=[$14], HIREDATE0=[$15], SAL0=[$16], COMM0=[$17], DEPTNO1=[$18], SLACKER0=[$19]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1691,9 +1691,9 @@ JOIN dept on emp.deptno + 1 = dept.deptno - 2]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$10], NAME=[$11]) LogicalJoin(condition=[=($9, $12)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f2=[-($0, 2)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1705,7 +1705,7 @@ select empno as "e", deptno as d, 1 as "e" from EMP)]]> @@ -1717,7 +1717,7 @@ select * from emp2]]> @@ -1735,9 +1735,9 @@ LogicalUnion(all=[true]) LogicalFilter(condition=[<($7, 30)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[>($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1753,13 +1753,13 @@ where exists ( LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor1], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[<=($0, $cor1.DEPTNO)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[>=($0, $cor1.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1773,12 +1773,12 @@ from emp]]> ($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1790,7 +1790,7 @@ LogicalProject(C=[$9]) @@ -1804,8 +1804,8 @@ JOIN dept on dept.deptno = emp.deptno + 0]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$10], NAME=[$11]) LogicalJoin(condition=[=($10, $9)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, 0)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1819,8 +1819,8 @@ JOIN dept on emp.deptno + 0 = dept.deptno]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$10], NAME=[$11]) LogicalJoin(condition=[=($9, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f9=[+($7, 0)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1833,9 +1833,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -1853,18 +1853,18 @@ LogicalProject(D2=[$0], D3=[$1]) LogicalFilter(condition=[IS NOT NULL($2)]) LogicalCorrelate(correlation=[$cor3], joinType=[left], requiredColumns=[{0, 1}]) LogicalProject(D2=[+(2, $7)], D3=[+(3, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[AND(=($0, $cor3.D2), IS NOT NULL($1))]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{0}]) LogicalProject(D1=[+($0, 1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[AND(=($0, $cor0.D1), =($1, $cor0.D1), =($2, $cor3.D3))]) LogicalProject(D4=[+($0, 4)], D5=[+($0, 5)], D6=[+($0, 6)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1882,19 +1882,19 @@ LogicalProject(D2=[$0], D3=[$1]) LogicalProject(D2=[$0], D3=[$1], D1=[CAST($2):INTEGER], D6=[$3], $f2=[CAST($4):BOOLEAN]) LogicalJoin(condition=[AND(=($0, $2), =($1, $3))], joinType=[inner]) LogicalProject(D2=[+(2, $7)], D3=[+(3, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], agg#0=[MIN($2)]) LogicalProject(D1=[$0], D6=[$2], $f0=[true]) LogicalFilter(condition=[IS NOT NULL($1)]) LogicalProject(D1=[$0], $f0=[$3], D6=[$2]) LogicalJoin(condition=[=($0, $1)], joinType=[left]) LogicalProject(D1=[+($0, 1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}], agg#0=[MIN($2)]) LogicalProject(D4=[$0], D6=[$2], $f0=[true]) LogicalFilter(condition=[=($1, $0)]) LogicalProject(D4=[+($0, 4)], D5=[+($0, 5)], D6=[+($0, 6)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1910,16 +1910,16 @@ where exists ( LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[CAST($9):INTEGER], $f1=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO0=[$2], $f0=[true]) LogicalFilter(condition=[<=($0, $2)]) LogicalProject(DEPTNO=[$0], NAME=[$1], DEPTNO0=[$2]) LogicalJoin(condition=[>=($0, $2)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -1933,10 +1933,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[CAST($9):INTEGER], $f1=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO=[$0], $f0=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1950,13 +1950,13 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[IS NOT NULL($9)]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{7}]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalSort(fetch=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1970,12 +1970,12 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[CAST($9):INTEGER], $f1=[CAST($10):BOOLEAN]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO=[$1], $f0=[true]) LogicalSort(fetch=[1]) LogicalProject(EXPR$0=[1], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -1987,7 +1987,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EXPR$0=[CAST(/(SUM(+(+($1, *(2, $2)), *(3, $3))) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), COUNT(+(+($1, *(2, $2)), *(3, $3))) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING))):INTEGER NOT NULL]) LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[MIN($2)], agg#2=[AVG($2)]) LogicalProject(DEPTNO=[$7], SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2027,9 +2027,9 @@ LogicalUnion(all=[true]) LogicalFilter(condition=[<($0, 30)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2042,7 +2042,7 @@ select * from emp2 order by deptno]]> @@ -2061,9 +2061,9 @@ LogicalProject(EMPNO=[$0], X=[$1]) LogicalProject(EMPNO=[$0], X=[$1], EXPR$2=[+($0, $1)]) LogicalUnion(all=[true]) LogicalProject(EMPNO=[$0], X=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], X=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2079,13 +2079,13 @@ LogicalProject(NAME=[$1], EXPR$1=[OR(AND(IS NOT NULL($6), <>($2, 0)), AND(<($3, LogicalJoin(condition=[=($4, $5)], joinType=[left]) LogicalProject(DEPTNO=[$0], NAME=[$1], $f0=[$2], $f1=[$3], DEPTNO0=[$0]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{}], agg#0=[COUNT()], agg#1=[COUNT($0)]) LogicalProject(EXPR$0=[CAST($7):INTEGER], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EXPR$0=[CAST($7):INTEGER], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2101,13 +2101,13 @@ LogicalProject(EMPNO=[$0], EXPR$1=[NOT(CASE(=($9, 0), false, IS NOT NULL($13), t LogicalJoin(condition=[=($11, $12)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f0=[$9], $f1=[$10], DEPTNO0=[$7]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[COUNT()], agg#1=[COUNT($0)]) LogicalProject(EXPR$0=[CAST($0):INTEGER], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EXPR$0=[CAST($0):INTEGER], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2121,10 +2121,10 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[NOT(AND(IS TRUE($11), IS NOT NULL($9)))]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO=[$0], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2139,10 +2139,10 @@ from emp]]> LogicalProject(EMPNO=[$0], EXPR$1=[IS NOT TRUE($11)]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(DEPTNO=[$0], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2153,7 +2153,7 @@ LogicalProject(EMPNO=[$0], EXPR$1=[IS NOT TRUE($11)]) @@ -2182,11 +2182,11 @@ from emp]]> LogicalProject(EMPNO=[$0], EXPR$1=[CAST(IS NOT TRUE($11)):BOOLEAN]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(MGR=[$3], $f1=[true]) LogicalFilter(condition=[>($3, 5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2201,11 +2201,11 @@ from emp]]> LogicalProject(EMPNO=[$0], EXPR$1=[CAST(IS NOT TRUE($11)):BOOLEAN]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(MGR=[$3], $f1=[true]) LogicalFilter(condition=[IS NOT NULL($3)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2221,15 +2221,15 @@ from emp]]> LogicalProject(EMPNO=[$0], EXPR$1=[CAST(IS NOT TRUE($11)):BOOLEAN]) LogicalJoin(condition=[=($9, $10)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(MGR=[$3], $f1=[true]) LogicalJoin(condition=[=($3, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(MGR=[$3]) LogicalFilter(condition=[=($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2245,13 +2245,13 @@ LogicalProject(EMPNO=[$0], EXPR$1=[NOT(CASE(=($9, 0), false, IS NOT NULL($13), t LogicalJoin(condition=[=($11, $12)], joinType=[left]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], $f0=[$9], $f1=[$10], DEPTNO0=[$7]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], agg#0=[COUNT()], agg#1=[COUNT($0)]) LogicalProject(MGR=[$3], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(MGR=[$3], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2264,7 +2264,7 @@ LogicalProject(EMPNO=[$0], EXPR$1=[NOT(CASE(=($9, 0), false, IS NOT NULL($13), t LogicalProject(EXPR$0=[$1]) LogicalAggregate(group=[{0}], EXPR$0=[SUM($1)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2276,7 +2276,7 @@ LogicalProject(EXPR$0=[$1]) @@ -2288,7 +2288,7 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) @@ -2303,7 +2303,7 @@ order by 2]]> LogicalSort(sort0=[$1], dir0=[ASC]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2318,7 +2318,7 @@ order by 2]]> LogicalSort(sort0=[$1], dir0=[ASC]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2333,7 +2333,7 @@ order by 2]]> LogicalSort(sort0=[$1], dir0=[ASC]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {1}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2348,7 +2348,7 @@ order by 2]]> LogicalSort(sort0=[$1], dir0=[ASC]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {1}, {}]], EXPR$2=[SUM($2)]) LogicalProject(DEPTNO=[$7], ENAME=[$1], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2366,7 +2366,7 @@ group by sal, LogicalProject(EXPR$0=[$3]) LogicalAggregate(group=[{0, 1, 2}], groups=[[{0, 1, 2}, {0, 1}, {0, 2}]], EXPR$0=[SUM($0)]) LogicalProject(SAL=[$5], DEPTNO=[$7], ENAME=[$1]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2481,7 +2481,7 @@ group by substring(ename FROM 1 FOR 1)]]> LogicalProject(EXPR$0=[$1]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject($f0=[SUBSTRING($1, 1, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2513,7 +2513,7 @@ LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(DEPTNO=[$1], EXPR$1=[$2], EXPR$2=[$3], EXPR$3=[$4]) LogicalAggregate(group=[{0, 1}], EXPR$1=[GROUPING($1)], EXPR$2=[COUNT()], EXPR$3=[GROUPING($0)]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2529,7 +2529,7 @@ group by rollup(empno, deptno)]]> LogicalProject(DEPTNO=[$1], EXPR$1=[$2], EXPR$2=[$3], EXPR$3=[$4]) LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {0}, {}]], EXPR$1=[GROUPING($1)], EXPR$2=[COUNT()], EXPR$3=[GROUPING($0)]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2549,7 +2549,7 @@ LogicalProject(DEPTNO=[$1], EXPR$1=[$2], EXPR$2=[$3], EXPR$3=[$4]) (PREV(UP.$3, 0), PREV(UP.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2573,7 +2573,7 @@ from emp match_recognize ( (PREV(UP.$3, 0), PREV(UP.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2597,7 +2597,7 @@ from emp match_recognize ( (PREV(UP.$3, 0), PREV(UP.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2622,7 +2622,7 @@ from emp match_recognize ( (PREV(UP.$3, 0), PREV(UP.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2642,7 +2642,7 @@ LogicalProject(JOB=[$0], SAL=[$1], EMPNO=[$2], ENAME=[$3], MGR=[$4], HIREDATE=[$ (PREV(UP.$3, 0), NEXT(PREV(UP.$3, 0), 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2665,7 +2665,7 @@ MATCH_RECOGNIZE ( (PREV(UP.$3, 0), LAST(DOWN.$3, 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2689,7 +2689,7 @@ MATCH_RECOGNIZE ( (PREV(UP.$3, 0), PREV(LAST(DOWN.$3, 1), 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2710,7 +2710,7 @@ LogicalProject(START_MGR=[$0], BOTTOM_MGR=[$1], END_MGR=[$2]) (PREV(UP.$3, 0), NEXT(PREV(UP.$3, 0), 1))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2738,7 +2738,7 @@ MATCH_RECOGNIZE ( (PREV(UP.$3, 0), 15), >(PREV(UP.$3, 0), 20))]], inputFields=[[EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, SLACKER]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2750,7 +2750,7 @@ LogicalProject(START_MGR=[$0], UP_DAYS=[$1], TOTAL_DAYS=[$2]) @@ -2761,7 +2761,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -2772,7 +2772,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1]) @@ -2783,7 +2783,7 @@ LogicalProject(EXPR$0=[+($0, $0)]) @@ -2794,7 +2794,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], X=[$2]) @@ -2805,7 +2805,7 @@ LogicalProject(DEPTNO=[$0], X=[$2]) @@ -2817,7 +2817,7 @@ from EMP_MODIFIABLEVIEW extend (x varchar(5) not null)]]> @@ -2829,7 +2829,7 @@ from EMP_MODIFIABLEVIEW extend (x varchar(5) not null)]]> @@ -2841,7 +2841,7 @@ from EMP_MODIFIABLEVIEW extend (x int not null)]]> @@ -2854,7 +2854,7 @@ where deptno = ?]]> @@ -2865,7 +2865,7 @@ LogicalProject(DEPTNO=[$3]) @@ -2879,7 +2879,7 @@ GROUP BY (CASE WHEN emp.empno IN (3) THEN 0 ELSE 1 END)]]> @@ -2895,7 +2895,7 @@ GROUP BY empno, EXPR$2]]> LogicalProject(EMPNO=[$0], EXPR$2=[$1], EXPR$20=[$2]) LogicalAggregate(group=[{0, 1}], EXPR$2=[COUNT()]) LogicalProject(EMPNO=[$0], EXPR$2=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2907,7 +2907,7 @@ LogicalProject(EMPNO=[$0], EXPR$2=[$1], EXPR$20=[$2]) @@ -2922,9 +2922,9 @@ FROM emp]]> LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject($f0=[CASE(IN($7, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }), 1, 0)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -2937,10 +2937,10 @@ LogicalProject(DEPTNO=[$0]) LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(EXPR$0=[$9]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -2953,7 +2953,7 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalDelta LogicalProject(PRODUCTID=[$1]) LogicalFilter(condition=[=($1, 10)]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -2969,7 +2969,7 @@ group by floor(rowtime to second)]]> LogicalDelta LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject(ROWTIME=[FLOOR($0, FLAG(SECOND))]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -2985,7 +2985,7 @@ from orders]]> @@ -3001,7 +3001,7 @@ LogicalDelta LogicalProject(ROWTIME=[+($0, 7200000:INTERVAL HOUR)], PRODUCTID=[$1]) LogicalAggregate(group=[{0, 1}]) LogicalProject($f0=[TUMBLE($0, 7200000:INTERVAL HOUR)], PRODUCTID=[$1]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -3018,7 +3018,7 @@ LogicalDelta LogicalProject(ROWTIME=[$0], C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject($f0=[HOP($0, 3600000:INTERVAL HOUR, 10800000:INTERVAL HOUR)]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -3036,10 +3036,10 @@ LogicalProject(**=[$1]) LogicalFilter(condition=[NOT(AND(IS TRUE($4), IS NOT NULL($2)))]) LogicalJoin(condition=[=($2, $3)], joinType=[left]) LogicalProject(N_NAME=[$0], **=[$1], N_NAME0=[$0]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EXPR$0=[''], $f1=[true]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -3057,7 +3057,7 @@ LogicalDelta LogicalProject(ROWTIME=[$0], EXPR$1=[$0], C=[$1]) LogicalAggregate(group=[{0}], C=[COUNT()]) LogicalProject($f0=[SESSION($0, 3600000:INTERVAL HOUR)]) - LogicalScan(table=[[CATALOG, SALES, ORDERS]]) + LogicalScan(entity=[[CATALOG, SALES, ORDERS]]) ]]> @@ -3072,7 +3072,7 @@ group by (CASE WHEN (deptno IN (10, 20)) THEN 0 ELSE deptno END)]]> @@ -3087,7 +3087,7 @@ group by deptno]]> @@ -3102,7 +3102,7 @@ group by deptno]]> ($0, 1), <>($0, 2))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3117,11 +3117,11 @@ where deptno > (select min(deptno) * 2 + 10 from EMP)]]> LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, $9)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EXPR$0=[+(*($0, 2), 10)]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3136,7 +3136,7 @@ where deptno > (values 10)]]> LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, $9)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalValues(tuples=[[{ 10 }]]) ]]> @@ -3153,10 +3153,10 @@ from EMP order by deptno limit 1)]]> LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($7, $9)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalSort(sort0=[$0], dir0=[ASC], fetch=[1]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3170,7 +3170,7 @@ where deptno in (1, 2) or deptno in (1, 2)]]> @@ -3188,7 +3188,7 @@ LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[>(+($1, $2), 10)]) LogicalAggregate(group=[{0}], agg#0=[SUM($1)], agg#1=[SUM($2)]) LogicalProject(DEPTNO=[$7], $f1=[CASE(OR(=($7, 1), =($7, 2)), 0, 1)], $f2=[CASE(OR(=($7, 3), =($7, 4)), 0, 1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3209,13 +3209,13 @@ LogicalProject(SAL=[$0]) LogicalJoin(condition=[=($0, $1)], joinType=[inner]) LogicalAggregate(group=[{0}]) LogicalProject(SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[>($1, 0)]) LogicalAggregate(group=[{0}], agg#0=[SUM($0)]) LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -3234,10 +3234,10 @@ LogicalProject(DEPTNO=[$0]) LogicalJoin(condition=[true], joinType=[left]) LogicalAggregate(group=[{0}], agg#0=[MAX($1)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3255,10 +3255,10 @@ LogicalProject(DEPTNO=[$0], B=[>($1, $2)]) LogicalJoin(condition=[true], joinType=[left]) LogicalAggregate(group=[{0}], agg#0=[MAX($1)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3269,7 +3269,7 @@ values (10, 150, 'Fred')]]> @@ -3282,7 +3282,7 @@ values (50, 'Fred')]]> @@ -3295,7 +3295,7 @@ values (?, ?, ?)]]> @@ -3308,7 +3308,7 @@ values (?, ?)]]> @@ -3320,7 +3320,7 @@ LogicalModify(table=[[CATALOG, SALES, EMPNULLABLES]], operation=[INSERT], flatte @@ -3332,7 +3332,7 @@ LogicalModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], flatten ($0, 100)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], SLACKER=[$8]) LogicalFilter(condition=[AND(=($7, 20), >($5, 1000))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3376,7 +3376,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -3390,7 +3390,7 @@ LogicalProject(ENAME=[$1], EMPNO=[$0], JOB=[$2], SLACKER=[$6], SAL=[$5], HIREDAT @@ -3404,7 +3404,7 @@ LogicalProject(ENAME=[$1], EMPNO=[$0], JOB=[$2], SLACKER=[$6], sal=[$7], HIREDAT @@ -3418,7 +3418,7 @@ LogicalProject(ENAME=[$0], EMPNO=[$1], JOB=[$2], SLACKER=[$4], SAL=[$5], HIREDAT @@ -3432,7 +3432,7 @@ LogicalProject(ENAME=[$0], EMPNO=[$1], JOB=[$2], SLACKER=[$4], SAL=[$5], HIREDAT @@ -3446,7 +3446,7 @@ LogicalProject(ENAME=[$1], EMPNO=[$0], JOB=[$2], SLACKER=[$6], SAL=[$5], HIREDAT @@ -3457,7 +3457,7 @@ values (150, 'Fred')]]> ($5, 1000)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[null:VARCHAR(10)], MGR=[null:INTEGER], HIREDATE=[null:TIMESTAMP(0)], SAL=[null:INTEGER], COMM=[null:INTEGER], DEPTNO=[20], SLACKER=[null:BOOLEAN]) LogicalValues(tuples=[[{ 150, 'Fred' }]]) @@ -3471,7 +3471,7 @@ values (10, 'Fred')]]> ($5, 1000)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[null:VARCHAR(10)], MGR=[null:INTEGER], HIREDATE=[null:TIMESTAMP(0)], SAL=[null:INTEGER], COMM=[null:INTEGER], DEPTNO=[20], SLACKER=[null:BOOLEAN]) LogicalValues(tuples=[[{ 10, 'Fred' }]]) @@ -3484,7 +3484,7 @@ LogicalModify(table=[[CATALOG, SALES, EMPNULLABLES]], operation=[INSERT], flatte @@ -3496,7 +3496,7 @@ LogicalModify(table=[[SALES, EMPDEFAULTS]], operation=[INSERT], flattened=[true] @@ -3508,7 +3508,7 @@ LogicalModify(table=[[SALES, EMPDEFAULTS]], operation=[INSERT], flattened=[true] @@ -3520,7 +3520,7 @@ LogicalModify(table=[[SALES, EMPDEFAULTS]], operation=[INSERT], flattened=[true] @@ -3532,7 +3532,7 @@ LogicalModify(table=[[SALES, EMPDEFAULTS]], operation=[INSERT], flattened=[true] @@ -3544,7 +3544,7 @@ LogicalModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], flatten @@ -3556,7 +3556,7 @@ LogicalModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], flatten @@ -3568,7 +3568,7 @@ LogicalModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], flatten @@ -3580,7 +3580,7 @@ LogicalModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[INSERT], flatten @@ -3593,7 +3593,7 @@ values (?, ?, ?)]]> @@ -3607,7 +3607,7 @@ values (?, ?, ?, ?, ?, ?, ?, ?)]]> @@ -3633,7 +3633,7 @@ LogicalModify(table=[[CATALOG, STRUCT, T]], operation=[UPDATE], updateColumnList @@ -3644,7 +3644,7 @@ LogicalProject(K0=[$0]) @@ -3655,7 +3655,7 @@ LogicalProject(C2=[$7]) @@ -3668,7 +3668,7 @@ LogicalProject(C2=[$7]) LogicalProject(C1=[$0]) LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(C1=[$1], "F0"."C1"=[$6]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -3681,7 +3681,7 @@ LogicalProject(C1=[$0]) LogicalProject(EXPR$0=[$1]) LogicalAggregate(group=[{0}], EXPR$0=[COUNT()]) LogicalProject("F0"."C1"=[$6], "C1"=[$1]) - LogicalScan(table=[[CATALOG, STRUCT, T]]) + LogicalScan(entity=[[CATALOG, STRUCT, T]]) ]]> @@ -3692,7 +3692,7 @@ LogicalProject(EXPR$0=[$1]) @@ -3703,7 +3703,7 @@ LogicalProject("K0"=[$0], "C1"=[$1], "F1"."A0"=[$2], "F2"."A0"=[$3], "F0"."C0"=[ @@ -3719,7 +3719,7 @@ from emp group by deptno, empno LogicalProject(EXPR$0=[$2], EXPR$1=[RANK() OVER (ORDER BY $1 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)], EXPR$2=[MAX($1) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)]) LogicalAggregate(group=[{0, 1}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3734,7 +3734,7 @@ group by deptno]]> LogicalProject(EXPR$0=[CAST(/(SUM($0) OVER (RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING), COUNT($0) OVER (RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING))):INTEGER NOT NULL]) LogicalAggregate(group=[{0}]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3754,8 +3754,8 @@ LogicalProject(EXPR$0=[$3], EXPR$1=[RANK() OVER (ORDER BY $1 RANGE BETWEEN UNBOU LogicalProject(DEPTNO0=[$9], EMPNO=[$0], DEPTNO=[$7]) LogicalFilter(condition=[=($7, $9)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -3773,7 +3773,7 @@ LogicalProject(EXPR$0=[$2], EXPR$1=[RANK() OVER (ORDER BY $1 RANGE BETWEEN UNBOU LogicalFilter(condition=[AND(<($1, 10), <($2, 20))]) LogicalAggregate(group=[{0, 1}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3795,9 +3795,9 @@ LogicalProject(X=[$0], Y=[$1], Z=[$2], EMPNO=[$3]) LogicalProject(X=[$2], Y=[RANK() OVER (ORDER BY $1 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)], Z=[MAX($1) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)]) LogicalAggregate(group=[{0, 1}], X=[MIN($0)]) LogicalProject(DEPTNO=[$7], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], EMPNO0=[CAST($0):BIGINT NOT NULL]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3811,7 +3811,7 @@ from emp order by row_number() over(partition by empno order by deptno)]]> LogicalProject(DEPTNO=[$0], EXPR$1=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalProject(DEPTNO=[$7], EXPR$1=[RANK() OVER (PARTITION BY $0 ORDER BY $7 RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)], EXPR$2=[ROW_NUMBER() OVER (PARTITION BY $0 ORDER BY $7 ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3847,12 +3847,12 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[AND(=($0, $11), >($5, $12))], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], EXPR$0=[AVG($1)]) LogicalProject(EMPNO=[$0], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3871,11 +3871,11 @@ LogicalAggregate(group=[{}], EXPR$0=[SUM($0)]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10], DEPTNO1=[CAST($11):INTEGER], EXPR$0=[CAST($12):INTEGER]) LogicalJoin(condition=[AND(=($9, $11), >($5, $12))], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], EXPR$0=[AVG($1)]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3893,12 +3893,12 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EMPNO=[$0], $f0=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3916,12 +3916,12 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($0, $11)], joinType=[left]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EMPNO=[$0], $f0=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -3942,10 +3942,10 @@ LogicalProject(A=[$0], *=[$1]) @@ -3955,10 +3955,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP]], operation=[DELETE], flattened=[true @@ -3968,10 +3968,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP]], operation=[DELETE], flattened=[true @@ -3981,10 +3981,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP]], operation=[DELETE], flattened=[true @@ -3995,10 +3995,10 @@ where note = ?]]> @@ -4023,9 +4023,9 @@ LogicalProject(EXPR$0=[+(2, 2)]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[> SOME($7, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4037,7 +4037,7 @@ LogicalProject(DEPTNO=[$0]) ($7, 10), >($7, 20))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4053,9 +4053,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[OR(=($7, 10), IN($7, { LogicalProject(DEPTNO=[$0]) LogicalFilter(condition=[<($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4065,10 +4065,10 @@ LogicalProject(DEPTNO=[$0]) @@ -4078,10 +4078,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP]], operation=[UPDATE], updateColumnLis @@ -4091,10 +4091,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP]], operation=[UPDATE], updateColumnLis @@ -4104,10 +4104,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP]], operation=[UPDATE], updateColumnLis @@ -4117,10 +4117,10 @@ LogicalModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[UPDATE], updateC @@ -4130,10 +4130,10 @@ LogicalModify(table=[[CATALOG, SALES, EMPDEFAULTS]], operation=[UPDATE], updateC @@ -4143,10 +4143,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW3]], operation=[UPDATE], @@ -4156,10 +4156,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW2]], operation=[UPDATE], @@ -4169,10 +4169,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW2]], operation=[UPDATE], @@ -4182,10 +4182,10 @@ LogicalModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW2]], operation=[UPDATE], @@ -4195,7 +4195,7 @@ LogicalModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW3]], operation=[UPDATE], @@ -4215,7 +4215,7 @@ LogicalDelta LogicalProject(S=[$0], E=[+($0, 60000:INTERVAL MINUTE)]) LogicalAggregate(group=[{0}]) LogicalProject($f0=[TUMBLE($1, 60000:INTERVAL MINUTE)]) - LogicalScan(table=[[CATALOG, SALES, SHIPMENTS]]) + LogicalScan(entity=[[CATALOG, SALES, SHIPMENTS]]) ]]> @@ -4232,7 +4232,7 @@ LogicalDelta LogicalProject(ROWTIME=[+($0, 7200000:INTERVAL HOUR)], ORDERID=[$1]) LogicalAggregate(group=[{0, 1}]) LogicalProject($f0=[TUMBLE($1, 7200000:INTERVAL HOUR)], ORDERID=[$0]) - LogicalScan(table=[[CATALOG, SALES, SHIPMENTS]]) + LogicalScan(entity=[[CATALOG, SALES, SHIPMENTS]]) ]]> @@ -4242,10 +4242,10 @@ LogicalDelta @@ -4256,10 +4256,10 @@ set sal = sal + 5000 where slacker = false]]> @@ -4271,10 +4271,10 @@ where slacker = false]]> @@ -4284,9 +4284,9 @@ LogicalModify(table=[[CATALOG, SALES, EMP_MODIFIABLEVIEW2]], operation=[UPDATE], @@ -4296,9 +4296,9 @@ LogicalModify(table=[[CATALOG, SALES, EMP]], operation=[DELETE], flattened=[true @@ -4310,19 +4310,19 @@ set empno = ( @@ -4333,7 +4333,7 @@ LogicalModify(table=[[CATALOG, SALES, EMP]], operation=[UPDATE], updateColumnLis @@ -4349,9 +4349,9 @@ LogicalProject(C=[$SCALAR_QUERY({ LogicalAggregate(group=[{}], EXPR$0=[COUNT()]) LogicalProject($f0=[0]) LogicalFilter(condition=[>($0, 10)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4369,9 +4369,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[<=($0, $cor1.DEPTNO)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[>=($0, $cor1.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor1]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4385,9 +4385,9 @@ from dept]]> @@ -4401,9 +4401,9 @@ from emp]]> @@ -4416,9 +4416,9 @@ LogicalProject(DEPTNO=[$0]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[NOT(IN($7, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) }))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4430,7 +4430,7 @@ LogicalProject(DEPTNO=[$0]) @@ -4442,7 +4442,7 @@ LogicalProject(EMPNO=[$0]) @@ -4454,7 +4454,7 @@ LogicalProject(EMPNO=[$0]) @@ -4468,9 +4468,9 @@ from emp]]> @@ -4483,9 +4483,9 @@ LogicalProject(EXPR$0=[CAST($0):INTEGER]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IN($7, { LogicalProject(DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4501,7 +4501,7 @@ LogicalProject(DEPTNO=[$0], EMPNO_AVG=[$7]) LogicalProject(DEPTNO=[$0], NAME=[$1], TYPE=[$2], DESC=[$3], A=[$4], B=[$5], EMPLOYEES=[$6], EMPNO_AVG=[$7]) LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{6}]) LogicalProject(DEPTNO=[$0], NAME=[$1], TYPE=[$2.TYPE], DESC=[$2.DESC], A=[$2.OTHERS.A], B=[$2.OTHERS.B], EMPLOYEES=[$3]) - LogicalScan(table=[[CATALOG, SALES, DEPT_NESTED]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT_NESTED]]) LogicalAggregate(group=[{}], EMPNO_AVG=[AVG($0)]) LogicalProject(EMPNO=[$0]) Uncollect @@ -4521,7 +4521,7 @@ from dept_nested as d, LogicalProject(DEPTNO=[$0], EMPNO=[$7]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{6}]) LogicalProject(DEPTNO=[$0], NAME=[$1], TYPE=[$2.TYPE], DESC=[$2.DESC], A=[$2.OTHERS.A], B=[$2.OTHERS.B], EMPLOYEES=[$3]) - LogicalScan(table=[[CATALOG, SALES, DEPT_NESTED]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT_NESTED]]) Uncollect LogicalProject(EMPLOYEES=[$cor0.EMPLOYEES_6]) LogicalValues(tuples=[[{ 0 }]]) @@ -4539,7 +4539,7 @@ from dept_nested as d, LogicalProject(DEPTNO=[$0], EMPNO=[$7]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{6}]) LogicalProject(DEPTNO=[$0], NAME=[$1], TYPE=[$2.TYPE], DESC=[$2.DESC], A=[$2.OTHERS.A], B=[$2.OTHERS.B], EMPLOYEES=[$3]) - LogicalScan(table=[[CATALOG, SALES, DEPT_NESTED]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT_NESTED]]) LogicalProject(EMPNO=[$0], Y=[$1], Z=[$2]) Uncollect LogicalProject(EMPLOYEES=[$cor0.EMPLOYEES_6]) @@ -4561,9 +4561,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalFilter(condition=[<=($0, $cor1.DEPTNO)]) LogicalProject(DEPTNO=[$0], NAME=[$1]) LogicalFilter(condition=[>=($0, $cor1.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor1]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4576,9 +4576,9 @@ LogicalFilter(condition=[<=($0, $cor1.DEPTNO)]) LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IN($0, $7, { LogicalProject(EXPR$0=[-($0, 10)], DEPTNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4594,10 +4594,10 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalJoin(condition=[OR(=($0, 1), IN($9, { LogicalProject(DEPTNO=[$7]) LogicalFilter(condition=[>($0, 5)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4612,10 +4612,10 @@ or exists (select deptno from emp where empno > dept.deptno + 5)]]> LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], DEPTNO0=[$9], NAME=[$10]) LogicalJoin(condition=[OR(=($0, 1), EXISTS({ LogicalFilter(condition=[>($0, +($cor0.DEPTNO0, 5))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4633,11 +4633,11 @@ and deptno in ( LogicalProject(S=[$1]) LogicalFilter(condition=[AND(>($2, 2), IN($0, { LogicalProject(EXPR$0=[CAST($7):INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) }))]) LogicalAggregate(group=[{0}], S=[SUM($1)], agg#1=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4655,11 +4655,11 @@ LogicalProject(S=[$0]) LogicalSort(sort0=[$1], sort1=[$2], dir0=[DESC], dir1=[ASC]) LogicalProject(S=[$1], EXPR$1=[$SCALAR_QUERY({ LogicalProject(EXPR$0=[CAST($7):INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], EXPR$2=[$2]) LogicalAggregate(group=[{0}], S=[SUM($1)], agg#1=[COUNT()]) LogicalProject(DEPTNO=[$7], SAL=[$5]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4676,9 +4676,9 @@ LogicalProject(ENAME=[$0]) LogicalSort(sort0=[$1], sort1=[$0], dir0=[DESC], dir1=[ASC]) LogicalProject(ENAME=[$1], EXPR$1=[$SCALAR_QUERY({ LogicalProject(EXPR$0=[CAST($7):INTEGER]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4696,9 +4696,9 @@ LogicalProject(S=[$1]) LogicalProject(DEPTNO=[$7], $f1=[$SCALAR_QUERY({ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4714,9 +4714,9 @@ LogicalAggregate(group=[{}], EXPR$0=[MIN($0)]) LogicalProject(X0=[$0], X1=[$1]) LogicalUnion(all=[true]) LogicalProject(X0=['a'], X1=['a'], X2=['a']) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalProject(X0=['bb'], X1=['bb'], X2=['bb']) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4733,11 +4733,11 @@ LogicalProject(EMPNO=[$0]) LogicalFilter(condition=[IN($5, { LogicalProject(SAL=[$5]) LogicalFilter(condition=[>($7, $cor0.DEPTNO)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) })], variablesSet=[[$cor0]]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4750,9 +4750,9 @@ LogicalProject(SAL=[$5]) @@ -4765,13 +4765,13 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -4785,9 +4785,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8]) LogicalFilter(condition=[EXISTS({ LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4805,19 +4805,19 @@ LogicalProject(D2=[$0], D3=[$1]) LogicalProject(D2=[$0], D3=[$1], D1=[CAST($2):INTEGER], D6=[$3], $f2=[CAST($4):BOOLEAN]) LogicalJoin(condition=[AND(=($0, $2), =($1, $3))], joinType=[inner]) LogicalProject(D2=[+(2, $7)], D3=[+(3, $7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0, 1}], agg#0=[MIN($2)]) LogicalProject(D1=[$0], D6=[$2], $f0=[true]) LogicalFilter(condition=[IS NOT NULL($1)]) LogicalProject(D1=[$0], $f0=[$3], D6=[$2]) LogicalJoin(condition=[=($0, $1)], joinType=[left]) LogicalProject(D1=[+($0, 1)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0, 1}], agg#0=[MIN($2)]) LogicalProject(D4=[$0], D6=[$2], $f0=[true]) LogicalFilter(condition=[=($1, $0)]) LogicalProject(D4=[+($0, 4)], D5=[+($0, 5)], D6=[+($0, 6)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4829,7 +4829,7 @@ LogicalProject(D2=[$0], D3=[$1]) @@ -4873,9 +4873,9 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ LogicalSort(fetch=[1]) LogicalProject(EXPR$0=[1]) LogicalFilter(condition=[=($cor0.DEPTNO, $0)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) })], variablesSet=[[$cor0]]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4889,7 +4889,7 @@ LogicalProject(EXPR$0=[$0]) Uncollect LogicalProject(EXPR$0=[$SLICE($2)]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) Collect(field=[EXPR$0]) LogicalUnion(all=[true]) LogicalProject(EXPR$0=[$cor0.DEPTNO]) @@ -4911,12 +4911,12 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EMPNO=[$0], $f0=[true]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -4934,20 +4934,20 @@ LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($0, $11)], joinType=[inner]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(EMPNO0=[$9], $f0=[true]) LogicalJoin(condition=[<($9, $0)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) LogicalFilter(condition=[<($7, 10)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalFilter(condition=[<($0, 15)]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -4961,11 +4961,11 @@ from dept]]> @@ -4977,7 +4977,7 @@ LogicalProject(DEPTNO=[$0], NAME=[$1], EMPSET=[$2]) @@ -5012,7 +5012,7 @@ LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$ @@ -5023,7 +5023,7 @@ LogicalProject(N_NATIONKEY=[$0], N_NAME=[$1]) @@ -5038,7 +5038,7 @@ order by n_regionkey]]> LogicalProject(N_NATIONKEY=[$0], N_NAME=[$1]) LogicalSort(sort0=[$2], dir0=[ASC]) LogicalProject(N_NATIONKEY=[ITEM($0, 'N_NATIONKEY')], N_NAME=[ITEM($0, 'N_NAME')], EXPR$2=[ITEM($0, 'N_REGIONKEY')]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5052,9 +5052,9 @@ LogicalProject(**=[$0], **0=[$1]) LogicalFilter(condition=[=(ITEM($0, 'N_NATIONKEY'), ITEM($1, 'C_NATIONKEY'))]) LogicalJoin(condition=[true], joinType=[inner]) LogicalProject(**=[$0]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) LogicalProject(**=[$0]) - LogicalScan(table=[[CATALOG, SALES, CUSTOMER]]) + LogicalScan(entity=[[CATALOG, SALES, CUSTOMER]]) ]]> @@ -5068,7 +5068,7 @@ LogicalAggregate(group=[{0}], CNT=[COUNT()]) LogicalProject(N_REGIONKEY=[ITEM($0, 'N_REGIONKEY')]) LogicalFilter(condition=[>(ITEM($0, 'N_NATIONKEY'), 5)]) LogicalProject(**=[$0]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5081,8 +5081,8 @@ LogicalAggregate(group=[{0}], CNT=[COUNT()]) LogicalProject(**=[$1], **0=[$3]) LogicalFilter(condition=[=($0, $2)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) - LogicalScan(table=[[CATALOG, SALES, CUSTOMER]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, CUSTOMER]]) ]]> @@ -5095,8 +5095,8 @@ LogicalProject(**=[$1], **0=[$3]) LogicalProject(**=[$1], R_REGIONKEY=[$2], R_NAME=[$3], R_COMMENT=[$4]) LogicalFilter(condition=[=($0, $2)]) LogicalJoin(condition=[true], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) - LogicalScan(table=[[CATALOG, SALES, REGION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, REGION]]) ]]> @@ -5108,7 +5108,7 @@ LogicalProject(**=[$1], R_REGIONKEY=[$2], R_NAME=[$3], R_COMMENT=[$4]) @@ -5122,10 +5122,10 @@ from SALES.REGION where exists (select * from SALES.NATION)]]> LogicalProject(R_REGIONKEY=[$0], R_NAME=[$1], R_COMMENT=[$2]) LogicalFilter(condition=[IS NOT NULL($3)]) LogicalJoin(condition=[true], joinType=[left]) - LogicalScan(table=[[CATALOG, SALES, REGION]]) + LogicalScan(entity=[[CATALOG, SALES, REGION]]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5138,7 +5138,7 @@ LogicalProject(R_REGIONKEY=[$0], R_NAME=[$1], R_COMMENT=[$2]) LogicalProject(**=[$0]) LogicalSort(sort0=[$1], dir0=[ASC]) LogicalProject(**=[$0], EXPR$1=[ITEM($0, 'N_NATIONKEY')]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5152,7 +5152,7 @@ WHERE cast(e.empno as bigint) in (130, 131, 132, 133, 134)]]> @@ -5160,7 +5160,7 @@ LogicalProject(EMPNO=[$0]) LogicalProject(EMPNO=[$0]) LogicalJoin(condition=[=($9, $10)], joinType=[inner]) LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], SLACKER=[$8], EMPNO0=[CAST($0):BIGINT NOT NULL]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) LogicalAggregate(group=[{0}]) LogicalValues(tuples=[[{ 130 }, { 131 }, { 132 }, { 133 }, { 134 }]]) ]]> @@ -5178,7 +5178,7 @@ order by row_number() over(partition by empno order by deptno)]]> LogicalProject(DEPTNO=[$0], EXPR$1=[$1]) LogicalSort(sort0=[$2], dir0=[ASC-nulls-first]) LogicalProject(DEPTNO=[$7], EXPR$1=[RANK() OVER (PARTITION BY $0 ORDER BY $7 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)], EXPR$2=[ROW_NUMBER() OVER (PARTITION BY $0 ORDER BY $7 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5191,7 +5191,7 @@ WINDOW w AS (PARTITION BY REGION ORDER BY n_nationkey)]]> (COUNT(ITEM($0, 'N_NATIONKEY')) OVER (PARTITION BY ITEM($0, 'REGION') ORDER BY ITEM($0, 'N_NATIONKEY') RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), 0), $SUM0(ITEM($0, 'N_NATIONKEY')) OVER (PARTITION BY ITEM($0, 'REGION') ORDER BY ITEM($0, 'N_NATIONKEY') RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), null:ANY)]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5208,7 +5208,7 @@ GROUP BY n_regionkey]]> LogicalProject(N_REGIONKEY=[$0], EXPR$1=[MAX($1) OVER (PARTITION BY $0 RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)]) LogicalAggregate(group=[{0}], agg#0=[MIN($1)]) LogicalProject(N_REGIONKEY=[ITEM($0, 'N_REGIONKEY')], $f1=[ITEM($0, 'N_NATIONKEY')]) - LogicalScan(table=[[CATALOG, SALES, NATION]]) + LogicalScan(entity=[[CATALOG, SALES, NATION]]) ]]> @@ -5229,7 +5229,7 @@ LogicalProject(A=[$0], B=[$1]) LogicalAggregate(group=[{}], agg#0=[MIN($0)]) LogicalProject($f0=[true]) LogicalFilter(condition=[=($7, $cor0.A)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5244,7 +5244,7 @@ lateral (select t2."$unnest" as fake_col3 LogicalProject(C1=[$1]) LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{0}]) LogicalProject(**=[$0]) - LogicalScan(table=[[CATALOG, SALES, CUSTOMER]]) + LogicalScan(entity=[[CATALOG, SALES, CUSTOMER]]) LogicalProject(C1=[$0]) Uncollect LogicalProject(EXPR$0=[ITEM($cor0.**, 'FAKE_COL')]) @@ -5296,7 +5296,7 @@ LogicalProject(C1=[$1]) @@ -5309,7 +5309,7 @@ LogicalAggregate(group=[{}], ANYEMPNO=[ANY_VALUE($0)]) LogicalProject(ANYEMPNO=[$1]) LogicalAggregate(group=[{0}], ANYEMPNO=[ANY_VALUE($1)]) LogicalProject(SAL=[$5], EMPNO=[$0]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5321,7 +5321,7 @@ from emp]]> @@ -5333,7 +5333,7 @@ from emp]]> @@ -5345,7 +5345,7 @@ from emp]]> @@ -5357,7 +5357,7 @@ from emp]]> @@ -5370,7 +5370,7 @@ from emp]]> ($0)]) LogicalProject($f0=[JSON_STRUCTURED_VALUE_EXPRESSION($1)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5382,7 +5382,7 @@ from emp]]> @@ -5395,7 +5395,7 @@ from emp]]> ($0, $1)]) LogicalProject(ENAME=[$1], $f1=[JSON_STRUCTURED_VALUE_EXPRESSION($7)]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5417,7 +5417,7 @@ from emp]]> @@ -5432,7 +5432,7 @@ group by deptno]]> @@ -5452,8 +5452,8 @@ group by dept.deptno]]> LogicalAggregate(group=[{0}], S=[COLLECT($1) WITHIN GROUP ([1 DESC])], S1=[COLLECT($1) WITHIN GROUP ([2])], S2=[COLLECT($1) WITHIN GROUP ([1]) FILTER $3]) LogicalProject(DEPTNO=[$9], SAL=[$5], $f2=[1], $f3=[>($5, 2000)]) LogicalJoin(condition=[=($7, $9)], joinType=[inner]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) - LogicalScan(table=[[CATALOG, SALES, DEPT]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, DEPT]]) ]]> @@ -5468,7 +5468,7 @@ group by deptno]]> ($0, 1), <>($0, 2))]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5481,7 +5481,7 @@ order by empno desc]]> @@ -5496,7 +5496,7 @@ LogicalSort(sort0=[$0], dir0=[DESC]) LogicalProject(EMPNO=[$0]) LogicalSort(sort0=[$1], dir0=[ASC], offset=[1]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> @@ -5511,7 +5511,7 @@ LogicalSort(sort0=[$0], dir0=[ASC]) LogicalProject(EMPNO=[$0]) LogicalSort(sort0=[$1], dir0=[ASC], fetch=[10]) LogicalProject(EMPNO=[$0], DEPTNO=[$7]) - LogicalScan(table=[[CATALOG, SALES, EMP]]) + LogicalScan(entity=[[CATALOG, SALES, EMP]]) ]]> diff --git a/plugins/sql-language/src/test/resources/sql/agg.iq b/plugins/sql-language/src/test/resources/sql/agg.iq index a0b94355c3..db7614e842 100644 --- a/plugins/sql-language/src/test/resources/sql/agg.iq +++ b/plugins/sql-language/src/test/resources/sql/agg.iq @@ -18,7 +18,7 @@ !use post !set outputformat mysql -# count(*) returns number of rows in table +# count(*) returns number of rows in entity select count(ename) as c from emp; +---+ | C | @@ -1450,9 +1450,9 @@ EnumerableAggregate(group=[{0}], CF=[COUNT() FILTER $1], C=[COUNT()]) EnumerableCalc(expr#0..1=[{inputs}], expr#2=['CLERK':VARCHAR(9)], expr#3=[=($t0, $t2)], expr#4=[IS TRUE($t3)], DEPTNO=[$t1], $f1=[$t4]) EnumerableUnion(all=[true]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[20], expr#9=[<($t7, $t8)], JOB=[$t2], DEPTNO=[$t7], $condition=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[20], expr#9=[>($t7, $t8)], JOB=[$t2], DEPTNO=[$t7], $condition=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-751] Aggregate join transpose @@ -1469,9 +1469,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{}], EXPR$0=[COUNT()]) EnumerableJoin(condition=[=($0, $2)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum: splits into sum * count @@ -1488,9 +1488,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{}], EXPR$0=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum; no aggregate needed after join @@ -1511,9 +1511,9 @@ EnumerableCalc(expr#0..2=[{inputs}], EXPR$0=[$t2]) EnumerableAggregate(group=[{0, 3}], EXPR$0=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum; group by only one of the join keys @@ -1534,9 +1534,9 @@ EnumerableCalc(expr#0..1=[{inputs}], EXPR$0=[$t1]) EnumerableAggregate(group=[{3}], EXPR$0=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push min; Join-Aggregate is optimized to SemiJoin @@ -1557,9 +1557,9 @@ EnumerableCalc(expr#0..1=[{inputs}], EXPR$0=[$t1]) EnumerableAggregate(group=[{3}], EXPR$0=[MIN($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum and count @@ -1576,9 +1576,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{}], C=[COUNT()], S=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum and count, group by join key @@ -1599,9 +1599,9 @@ EnumerableCalc(expr#0..2=[{inputs}], C=[$t1], S=[$t2]) EnumerableAggregate(group=[{3}], C=[COUNT()], S=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum and count, group by join key plus another column @@ -1627,9 +1627,9 @@ EnumerableCalc(expr#0..3=[{inputs}], C=[$t2], S=[$t3]) EnumerableAggregate(group=[{0, 2}], C=[COUNT()], S=[SUM($3)]) EnumerableJoin(condition=[=($0, $4)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum and count, group by non-join column @@ -1651,9 +1651,9 @@ EnumerableCalc(expr#0..2=[{inputs}], C=[$t1], S=[$t2]) EnumerableAggregate(group=[{2}], C=[COUNT()], S=[SUM($3)]) EnumerableJoin(condition=[=($0, $4)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push count and sum, group by superset of join key @@ -1679,9 +1679,9 @@ EnumerableCalc(expr#0..3=[{inputs}], C=[$t2], S=[$t3]) EnumerableAggregate(group=[{0, 2}], C=[COUNT()], S=[SUM($3)]) EnumerableJoin(condition=[=($0, $4)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push count and sum, group by a column being aggregated @@ -1710,9 +1710,9 @@ EnumerableCalc(expr#0..2=[{inputs}], C=[$t1], S=[$t2]) EnumerableAggregate(group=[{2}], C=[COUNT()], S=[SUM($2)]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Push sum, self-join, returning one row with a null value @@ -1849,9 +1849,9 @@ using (deptno); EnumerableCalc(expr#0..2=[{inputs}], EMPNO=[$t1], DEPTNO=[$t0]) EnumerableJoin(condition=[=($0, $2)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1016] "GROUP BY constant" on empty relation should return 0 rows @@ -1897,7 +1897,7 @@ select count('1') from "scott".emp where false; !ok -# As above, but on VALUES rather than table +# As above, but on VALUES rather than entity # Should return 0 rows select '1' from (values (1, 2), (3, 4)) where false group by 1; +--------+ @@ -2006,7 +2006,7 @@ group by deptno, job; EnumerableCalc(expr#0..2=[{inputs}], JOB=[$t0], SUM_SAL=[$t2], DEPTNO=[$t1]) EnumerableAggregate(group=[{2, 7}], SUM_SAL=[SUM($5)]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[CAST($t7):INTEGER], expr#9=[10], expr#10=[=($t8, $t9)], proj#0..7=[{exprs}], $condition=[$t10]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan !} @@ -2171,7 +2171,7 @@ EnumerableCalc(expr#0..3=[{inputs}], expr#4=[CAST($t1):BIGINT NOT NULL], EXPR$0= EnumerableAggregate(group=[{}], EXPR$0=[COUNT($0) FILTER $4], EXPR$1=[MIN($1) FILTER $5], EXPR$2=[MIN($2) FILTER $5], EXPR$3=[MIN($3) FILTER $5]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[0], expr#6=[=($t4, $t5)], expr#7=[1], expr#8=[=($t4, $t7)], proj#0..3=[{exprs}], $g_0=[$t6], $g_1=[$t8]) EnumerableAggregate(group=[{0}], groups=[[{0}, {}]], EXPR$1=[COUNT($5)], EXPR$2=[MIN($5)], EXPR$3=[MAX($5)], $g=[GROUPING($0)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1930] AggregateExpandDistinctAggregateRules should handle multiple aggregate calls with same input ref @@ -2189,7 +2189,7 @@ EnumerableCalc(expr#0..3=[{inputs}], expr#4=[CAST($t1):BIGINT NOT NULL], EXPR$0= EnumerableAggregate(group=[{}], EXPR$0=[COUNT($0) FILTER $4], EXPR$1=[MIN($1) FILTER $5], EXPR$2=[MIN($2) FILTER $5], EXPR$3=[MIN($3) FILTER $5]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[0], expr#6=[=($t4, $t5)], expr#7=[1], expr#8=[=($t4, $t7)], proj#0..3=[{exprs}], $g_0=[$t6], $g_1=[$t8]) EnumerableAggregate(group=[{7}], groups=[[{7}, {}]], EXPR$1=[COUNT($2)], EXPR$2=[MIN($5)], EXPR$3=[MAX($5)], $g=[GROUPING($7)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1930] AggregateExpandDistinctAggregateRules should handle multiple aggregate calls with same input ref @@ -2213,7 +2213,7 @@ EnumerableCalc(expr#0..4=[{inputs}], expr#5=[CAST($t2):BIGINT NOT NULL], proj#0. EnumerableAggregate(group=[{0}], EXPR$1=[COUNT($1) FILTER $5], EXPR$2=[MIN($2) FILTER $6], EXPR$3=[MIN($3) FILTER $6], EXPR$4=[MIN($4) FILTER $6]) EnumerableCalc(expr#0..5=[{inputs}], expr#6=[0], expr#7=[=($t5, $t6)], expr#8=[1], expr#9=[=($t5, $t8)], proj#0..4=[{exprs}], $g_0=[$t7], $g_1=[$t9]) EnumerableAggregate(group=[{3, 7}], groups=[[{3, 7}, {3}]], EXPR$2=[COUNT($2)], EXPR$3=[MIN($5)], EXPR$4=[MAX($5)], $g=[GROUPING($3, $7)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1930] AggregateExpandDistinctAggregateRules should handle multiple aggregate calls with same input ref @@ -2236,7 +2236,7 @@ select MGR, count(distinct DEPTNO, JOB), MIN(SAL), MAX(SAL) from "scott".emp gro EnumerableAggregate(group=[{0}], EXPR$1=[COUNT($1, $2) FILTER $5], EXPR$2=[MIN($3) FILTER $6], EXPR$3=[MIN($4) FILTER $6]) EnumerableCalc(expr#0..5=[{inputs}], expr#6=[0], expr#7=[=($t5, $t6)], expr#8=[3], expr#9=[=($t5, $t8)], MGR=[$t1], DEPTNO=[$t2], JOB=[$t0], EXPR$2=[$t3], EXPR$3=[$t4], $g_0=[$t7], $g_3=[$t9]) EnumerableAggregate(group=[{2, 3, 7}], groups=[[{2, 3, 7}, {3}]], EXPR$2=[MIN($5)], EXPR$3=[MAX($5)], $g=[GROUPING($3, $7, $2)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-2366] Add support for ANY_VALUE function @@ -2252,7 +2252,7 @@ SELECT any_value(empno) as anyempno from "scott".emp; !ok EnumerableAggregate(group=[{}], ANYEMPNO=[ANY_VALUE($0)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-2366] Add support for ANY_VALUE function @@ -2280,7 +2280,7 @@ SELECT any_value(empno) as anyempno from "scott".emp group by sal; EnumerableCalc(expr#0..1=[{inputs}], ANYEMPNO=[$t1]) EnumerableAggregate(group=[{5}], ANYEMPNO=[ANY_VALUE($0)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1776, CALCITE-2402] REGR_COUNT @@ -2297,7 +2297,7 @@ from "scott".emp; !ok EnumerableAggregate(group=[{}], REGR_COUNT(COMM, SAL)=[REGR_COUNT($6, $5)], REGR_COUNT(EMPNO, SAL)=[REGR_COUNT($5)]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-1776, CALCITE-2402] REGR_SXX, REGR_SXY, REGR_SYY @@ -2396,7 +2396,7 @@ group by deptno; !ok EnumerableAggregate(group=[{7}], EMPNOS=[COLLECT($0) WITHIN GROUP ([0])]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select deptno, collect(empno) within group (order by empno desc) as empnos @@ -2414,7 +2414,7 @@ group by deptno; !ok EnumerableAggregate(group=[{7}], EMPNOS=[COLLECT($0) WITHIN GROUP ([0 DESC])]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select deptno, collect(empno) within group (order by empno desc) @@ -2434,7 +2434,7 @@ group by deptno; !ok EnumerableAggregate(group=[{0}], EMPNOS=[COLLECT($1) WITHIN GROUP ([1 DESC]) FILTER $2]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[7500], expr#9=[>($t0, $t8)], DEPTNO=[$t7], EMPNO=[$t0], $f2=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select deptno, collect(empno) within group (order by empno desc) as empnos1, @@ -2453,7 +2453,7 @@ group by deptno; !ok EnumerableAggregate(group=[{7}], EMPNOS1=[COLLECT($0) WITHIN GROUP ([0 DESC])], EMPNOS2=[COLLECT($0) WITHIN GROUP ([0])]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Aggregate WITHIN GROUP with JOIN @@ -2479,9 +2479,9 @@ EnumerableAggregate(group=[{0}], S=[COLLECT($1) WITHIN GROUP ([1 DESC])], S1=[CO EnumerableCalc(expr#0..3=[{inputs}], expr#4=[1], expr#5=[2000], expr#6=[>($t2, $t5)], expr#7=[IS TRUE($t6)], DEPTNO=[$t0], SAL=[$t2], $f2=[$t4], $f3=[$t7]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select deptno, collect(empno + 1) within group (order by 1) as empnos @@ -2500,7 +2500,7 @@ group by deptno; !ok EnumerableAggregate(group=[{0}], EMPNOS=[COLLECT($1) WITHIN GROUP ([2])]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[1], expr#9=[+($t0, $t8)], DEPTNO=[$t7], $f1=[$t9], $f2=[$t8]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # BIT_AND, BIT_OR aggregate functions diff --git a/plugins/sql-language/src/test/resources/sql/blank.iq b/plugins/sql-language/src/test/resources/sql/blank.iq index 3ebb7c55a8..3b403aacd7 100644 --- a/plugins/sql-language/src/test/resources/sql/blank.iq +++ b/plugins/sql-language/src/test/resources/sql/blank.iq @@ -1,4 +1,4 @@ -# blank.iq - Queries that start from a blank schema and create their own tables +# blank.iq - Queries that start from a blank namespace and create their own tables # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -18,7 +18,7 @@ !use blank !set outputformat mysql -create table foo (i int not null, j int); +create entity foo (i int not null, j int); (0 rows modified) !update @@ -54,15 +54,15 @@ select * from foo as f where i in ( !ok # Test case for "Wrong plan for NOT IN correlated queries" -create table table1(i int, j int); +create entity entity1(i int, j int); (0 rows modified) !update -create table table2(i int, j int); +create entity table2(i int, j int); (0 rows modified) !update -insert into table1 values (1, 2), (1, 3); +insert into entity1 values (1, 2), (1, 3); (2 rows modified) !update @@ -72,19 +72,19 @@ insert into table2 values (NULL, 1), (2, 1); !update # Checked on Oracle !set lateDecorrelate true -select i, j from table1 where table1.j NOT IN (select i from table2 where table1.i=table2.j); +select i, j from entity1 where entity1.j NOT IN (select i from table2 where entity1.i=table2.j); EnumerableCalc(expr#0..7=[{inputs}], expr#8=[IS NOT NULL($t7)], expr#9=[<($t4, $t3)], expr#10=[OR($t8, $t9)], expr#11=[IS NOT TRUE($t10)], expr#12=[0], expr#13=[=($t3, $t12)], expr#14=[IS TRUE($t13)], expr#15=[IS NULL($t1)], expr#16=[OR($t11, $t14, $t15)], proj#0..1=[{exprs}], $condition=[$t16]) EnumerableJoin(condition=[AND(=($0, $6), =($1, $5))], joinType=[left]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[IS NOT NULL($t1)], expr#6=[0], expr#7=[=($t3, $t6)], expr#8=[IS TRUE($t7)], expr#9=[OR($t5, $t8)], proj#0..4=[{exprs}], $condition=[$t9]) EnumerableJoin(condition=[=($0, $2)], joinType=[left]) - EnumerableScan(table=[[BLANK, TABLE1]]) + EnumerableScan(entity=[[BLANK, TABLE1]]) EnumerableAggregate(group=[{1}], c=[COUNT()], ck=[COUNT($0)]) EnumerableCalc(expr#0..1=[{inputs}], expr#2=[IS NOT NULL($t1)], proj#0..1=[{exprs}], $condition=[$t2]) - EnumerableScan(table=[[BLANK, TABLE2]]) + EnumerableScan(entity=[[BLANK, TABLE2]]) EnumerableCalc(expr#0..1=[{inputs}], expr#2=[true], proj#0..2=[{exprs}]) EnumerableAggregate(group=[{0, 1}]) EnumerableCalc(expr#0..1=[{inputs}], expr#2=[IS NOT NULL($t1)], expr#3=[IS NOT NULL($t0)], expr#4=[AND($t2, $t3)], proj#0..1=[{exprs}], $condition=[$t4]) - EnumerableScan(table=[[BLANK, TABLE2]]) + EnumerableScan(entity=[[BLANK, TABLE2]]) !plan +---+---+ | I | J | @@ -94,7 +94,7 @@ EnumerableCalc(expr#0..7=[{inputs}], expr#8=[IS NOT NULL($t7)], expr#9=[<($t4, $ !ok -select * from table1 where j not in (select i from table2); +select * from entity1 where j not in (select i from table2); +---+---+ | I | J | +---+---+ @@ -103,7 +103,7 @@ select * from table1 where j not in (select i from table2); !ok -select * from table1 where j not in (select i from table2) or i = 1; +select * from entity1 where j not in (select i from table2) or i = 1; +---+---+ | I | J | +---+---+ @@ -114,7 +114,7 @@ select * from table1 where j not in (select i from table2) or i = 1; !ok -select * from table1 where j not in (select i from table2) or j = 2; +select * from entity1 where j not in (select i from table2) or j = 2; +---+---+ | I | J | +---+---+ @@ -124,7 +124,7 @@ select * from table1 where j not in (select i from table2) or j = 2; !ok -select * from table1 where j not in (select i from table2) or j = 3; +select * from entity1 where j not in (select i from table2) or j = 3; +---+---+ | I | J | +---+---+ diff --git a/plugins/sql-language/src/test/resources/sql/join.iq b/plugins/sql-language/src/test/resources/sql/join.iq index f56b4e18da..861e4401b1 100644 --- a/plugins/sql-language/src/test/resources/sql/join.iq +++ b/plugins/sql-language/src/test/resources/sql/join.iq @@ -125,9 +125,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{0, 2}]) EnumerableJoin(condition=[=($0, $2)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select distinct dept.deptno @@ -145,9 +145,9 @@ from "scott".emp join "scott".dept using (deptno); EnumerableAggregate(group=[{0}]) EnumerableJoin(condition=[=($0, $2)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [CALCITE-676] AssertionError in GROUPING SETS query @@ -199,13 +199,13 @@ EnumerableCalc(expr#0..1=[{inputs}], DEPTNO=[$t1], ENAME=[$t0]) EnumerableAggregate(group=[{1, 3}]) EnumerableJoin(condition=[=($2, $4)], joinType=[inner]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[10], expr#9=[+($t7, $t8)], proj#0..1=[{exprs}], $f8=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[+($t1, $t3)], expr#5=[CAST($t4):INTEGER], DEPTNO=[$t1], $f16=[$t5]) EnumerableJoin(condition=[=($1, $3)], joinType=[inner]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan @@ -233,12 +233,12 @@ where e.deptno + 10 = d.deptno * 2; EnumerableCalc(expr#0..4=[{inputs}], DEPTNO=[$t3], DEPTNO0=[$t0]) EnumerableJoin(condition=[=($1, $4)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[2], expr#4=[*($t0, $t3)], DEPTNO=[$t0], $f1=[$t4]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[10], expr#9=[+($t7, $t8)], EMPNO=[$t0], DEPTNO=[$t7], $f2=[$t9]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan -### [CALCITE-801] NullPointerException using USING on table alias with column aliases +### [CALCITE-801] NullPointerException using USING on entity alias with column aliases select * from (values (100, 'Bill', 1), (200, 'Eric', 1), @@ -283,8 +283,8 @@ EnumerableCalc(expr#0..10=[{inputs}], expr#11=[COALESCE($t7, $t8)], DEPTNO=[$t11 EnumerableLimit(fetch=[10]) EnumerableJoin(condition=[=($7, $8)], joinType=[left]) EnumerableLimit(fetch=[10]) - EnumerableScan(table=[[scott, EMP]]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, EMP]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # End join.iq diff --git a/plugins/sql-language/src/test/resources/sql/lateral.iq b/plugins/sql-language/src/test/resources/sql/lateral.iq index 324df153f5..169fe3ab11 100644 --- a/plugins/sql-language/src/test/resources/sql/lateral.iq +++ b/plugins/sql-language/src/test/resources/sql/lateral.iq @@ -33,7 +33,7 @@ Was expecting one of: !error # Bad: LATERAL TABLE -select * from "scott".emp join lateral table "scott".dept using (deptno); +select * from "scott".emp join lateral entity "scott".dept using (deptno); parse failed: Encountered "\"scott\"" at line 1, column 46. Was expecting: "(" ... @@ -66,7 +66,7 @@ select * from lateral (select * from "scott".emp) as e where deptno = 10; !ok # Good: Explicit TABLE in parentheses -select * from (table "scott".emp) where deptno = 10; +select * from (entity "scott".emp) where deptno = 10; +-------+--------+-----------+------+------------+---------+------+--------+ | EMPNO | ENAME | JOB | MGR | HIREDATE | SAL | COMM | DEPTNO | +-------+--------+-----------+------+------------+---------+------+--------+ @@ -79,8 +79,8 @@ select * from (table "scott".emp) where deptno = 10; !ok # Bad: Explicit TABLE -select * from table "scott".emp; -parse failed: Encountered "table \"scott\"" at line 1, column 15. +select * from entity "scott".emp; +parse failed: Encountered "entity \"scott\"" at line 1, column 15. Was expecting one of: ... ... @@ -95,7 +95,7 @@ Was expecting one of: !error select * from lateral (select * from "scott".emp) as e -join (table "scott".dept) using (deptno) +join (entity "scott".dept) using (deptno) where e.deptno = 10; +--------+-------+--------+-----------+------+------------+---------+------+------------+----------+ | DEPTNO | EMPNO | ENAME | JOB | MGR | HIREDATE | SAL | COMM | DNAME | LOC | diff --git a/plugins/sql-language/src/test/resources/sql/misc.iq b/plugins/sql-language/src/test/resources/sql/misc.iq index c6e2802f1a..6af4df9380 100644 --- a/plugins/sql-language/src/test/resources/sql/misc.iq +++ b/plugins/sql-language/src/test/resources/sql/misc.iq @@ -18,7 +18,7 @@ !use post !set outputformat mysql -# [CALCITE-356] Allow column references of the form schema.table.column +# [CALCITE-356] Allow column references of the form namespace.entity.column select "hr"."emps"."empid" from "hr"."emps"; +-------+ @@ -33,7 +33,7 @@ from "hr"."emps"; !ok -# [CALCITE-881] Allow schema.table.column references in GROUP BY +# [CALCITE-881] Allow namespace.entity.column references in GROUP BY select "hr"."emps"."empid", count(*) as c from "hr"."emps" group by "hr"."emps"."empid"; @@ -66,7 +66,7 @@ group by "hr"."emps"."empid"; # Case-sensitive errors select empid from "hr"."emps"; -Column 'EMPID' not found in any table; did you mean 'empid'? +Column 'EMPID' not found in any entity; did you mean 'empid'? !error select empid from "hr".emps; @@ -293,9 +293,9 @@ and e."name" <> d."name"; EnumerableCalc(expr#0..4=[{inputs}], expr#5=[CAST($t2):VARCHAR], expr#6=[CAST($t4):VARCHAR], expr#7=[<>($t5, $t6)], empid=[$t0], name=[$t4], name0=[$t2], $condition=[$t7]) EnumerableJoin(condition=[=($1, $3)], joinType=[inner]) EnumerableCalc(expr#0..4=[{inputs}], proj#0..2=[{exprs}]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableCalc(expr#0..3=[{inputs}], proj#0..1=[{exprs}]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # Same query, expressed using WHERE. @@ -317,9 +317,9 @@ and e."name" <> d."name"; EnumerableCalc(expr#0..4=[{inputs}], expr#5=[CAST($t2):VARCHAR], expr#6=[CAST($t4):VARCHAR], expr#7=[<>($t5, $t6)], empid=[$t0], name=[$t4], name0=[$t2], $condition=[$t7]) EnumerableJoin(condition=[=($1, $3)], joinType=[inner]) EnumerableCalc(expr#0..4=[{inputs}], proj#0..2=[{exprs}]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableCalc(expr#0..3=[{inputs}], proj#0..1=[{exprs}]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # Un-correlated EXISTS @@ -340,9 +340,9 @@ EnumerableCalc(expr#0..1=[{inputs}], deptno=[$t1]) EnumerableCalc(expr#0=[{inputs}], expr#1=[IS NOT NULL($t0)], $f0=[$t0], $condition=[$t1]) EnumerableAggregate(group=[{}], agg#0=[MIN($0)]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[true], $f0=[$t5]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableCalc(expr#0..3=[{inputs}], deptno=[$t0]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # Un-correlated NOT EXISTS @@ -358,13 +358,13 @@ where not exists (select 1 from "hr"."emps"); EnumerableCalc(expr#0..1=[{inputs}], expr#2=[IS NOT NULL($t1)], expr#3=[NOT($t2)], deptno=[$t0], $condition=[$t3]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..3=[{inputs}], deptno=[$t0]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) EnumerableAggregate(group=[{}], agg#0=[MIN($0)]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[true], $f0=[$t5]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan -# Un-correlated EXISTS (table empty) +# Un-correlated EXISTS (entity empty) select "deptno" from "hr"."depts" where exists (select 1 from "hr"."emps" where "empid" < 0); +--------+ @@ -379,12 +379,12 @@ EnumerableCalc(expr#0..1=[{inputs}], deptno=[$t1]) EnumerableCalc(expr#0=[{inputs}], expr#1=[IS NOT NULL($t0)], $f0=[$t0], $condition=[$t1]) EnumerableAggregate(group=[{}], agg#0=[MIN($0)]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[true], expr#6=[0], expr#7=[<($t0, $t6)], $f0=[$t5], $condition=[$t7]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableCalc(expr#0..3=[{inputs}], deptno=[$t0]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan -# Un-correlated NOT EXISTS (table empty) +# Un-correlated NOT EXISTS (entity empty) select "deptno" from "hr"."depts" where not exists (select 1 from "hr"."emps" where "empid" < 0); +--------+ @@ -400,10 +400,10 @@ where not exists (select 1 from "hr"."emps" where "empid" < 0); EnumerableCalc(expr#0..1=[{inputs}], expr#2=[IS NOT NULL($t1)], expr#3=[NOT($t2)], deptno=[$t0], $condition=[$t3]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..3=[{inputs}], deptno=[$t0]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) EnumerableAggregate(group=[{}], agg#0=[MIN($0)]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[true], expr#6=[0], expr#7=[<($t0, $t6)], $f0=[$t5], $condition=[$t7]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan # EXISTS @@ -421,8 +421,8 @@ where exists ( !ok EnumerableSemiJoin(condition=[=($1, $5)], joinType=[inner]) - EnumerableScan(table=[[hr, emps]]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, emps]]) + EnumerableScan(entity=[[hr, depts]]) !plan # NOT EXISTS @@ -440,10 +440,10 @@ where not exists ( !ok EnumerableCalc(expr#0..6=[{inputs}], expr#7=[IS NULL($t6)], proj#0..4=[{exprs}], $condition=[$t7]) EnumerableJoin(condition=[=($1, $5)], joinType=[left]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableAggregate(group=[{0}], agg#0=[MIN($1)]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[true], deptno=[$t0], $f0=[$t4]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # NOT EXISTS .. OR NOT EXISTS @@ -468,13 +468,13 @@ EnumerableCalc(expr#0..7=[{inputs}], expr#8=[IS NULL($t5)], expr#9=[IS NULL($t7) EnumerableJoin(condition=[=($0, $6)], joinType=[left]) EnumerableCalc(expr#0..6=[{inputs}], proj#0..4=[{exprs}], $f0=[$t6]) EnumerableJoin(condition=[=($1, $5)], joinType=[left]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) EnumerableAggregate(group=[{0}], agg#0=[MIN($1)]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[true], deptno=[$t0], $f0=[$t4]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) EnumerableAggregate(group=[{0}], agg#0=[MIN($1)]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[90], expr#5=[+($t0, $t4)], expr#6=[true], $f4=[$t5], $f0=[$t6], $condition=[$t6]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) !plan # Left join to a relation with one row is recognized as a trivial semi-join @@ -494,7 +494,7 @@ left join (select count(*) from "hr"."depts") on true; !ok EnumerableCalc(expr#0..4=[{inputs}], deptno=[$t1]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan # Filter combined with an OR filter. @@ -527,7 +527,7 @@ where e."deptno" >= 10 and e."name" = 'Sebastian'; !ok -# [CALCITE-393] If no fields are projected from a table, field trimmer should +# [CALCITE-393] If no fields are projected from a entity, field trimmer should # project a dummy expression select 1 from "hr"."emps"; +--------+ @@ -542,10 +542,10 @@ select 1 from "hr"."emps"; !ok EnumerableCalc(expr#0..4=[{inputs}], expr#5=[1], EXPR$0=[$t5]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan -# [CALCITE-393] for table scan under join +# [CALCITE-393] for entity scan under join select count(*) as c from "hr"."emps", "hr"."depts"; +----+ | C | @@ -558,9 +558,9 @@ select count(*) as c from "hr"."emps", "hr"."depts"; EnumerableAggregate(group=[{}], C=[COUNT()]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[0], DUMMY=[$t4]) - EnumerableScan(table=[[hr, depts]]) + EnumerableScan(entity=[[hr, depts]]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=[0], DUMMY=[$t5]) - EnumerableScan(table=[[hr, emps]]) + EnumerableScan(entity=[[hr, emps]]) !plan # [CALCITE-345] AssertionError in RexToLixTranslator comparing to date literal @@ -640,7 +640,7 @@ from "customer" where period ("birthdate", DATE '1970-02-05') contains DATE '1964-01-01'; EnumerableAggregate(group=[{}], C=[COUNT()]) EnumerableCalc(expr#0..28=[{inputs}], expr#29=[1970-02-05], expr#30=[<=($t16, $t29)], expr#31=[CASE($t30, $t16, $t29)], expr#32=[1964-01-01], expr#33=[<=($t31, $t32)], expr#34=[CASE($t30, $t29, $t16)], expr#35=[>=($t34, $t32)], expr#36=[AND($t33, $t35)], proj#0..28=[{exprs}], $condition=[$t36]) - EnumerableScan(table=[[foodmart2, customer]]) + EnumerableScan(entity=[[foodmart2, customer]]) !plan +------+ | C | @@ -662,10 +662,10 @@ from "sales_fact_1997" as s where c."city" = 'San Francisco'; EnumerableJoin(condition=[=($0, $38)], joinType=[inner]) EnumerableJoin(condition=[=($2, $8)], joinType=[inner]) - EnumerableScan(table=[[foodmart2, sales_fact_1997]]) + EnumerableScan(entity=[[foodmart2, sales_fact_1997]]) EnumerableCalc(expr#0..28=[{inputs}], expr#29=['San Francisco':VARCHAR(30)], expr#30=[=($t9, $t29)], proj#0..28=[{exprs}], $condition=[$t30]) - EnumerableScan(table=[[foodmart2, customer]]) - EnumerableScan(table=[[foodmart2, product]]) + EnumerableScan(entity=[[foodmart2, customer]]) + EnumerableScan(entity=[[foodmart2, product]]) !plan # 4-way join whose optimal plan requires bushy join. @@ -687,12 +687,12 @@ EnumerableCalc(expr#0..56=[{inputs}], product_id=[$t20], time_id=[$t21], custome EnumerableJoin(condition=[=($6, $20)], joinType=[inner]) EnumerableJoin(condition=[=($0, $5)], joinType=[inner]) EnumerableCalc(expr#0..4=[{inputs}], expr#5=['Snacks':VARCHAR(30)], expr#6=[=($t3, $t5)], proj#0..4=[{exprs}], $condition=[$t6]) - EnumerableScan(table=[[foodmart2, product_class]]) - EnumerableScan(table=[[foodmart2, product]]) + EnumerableScan(entity=[[foodmart2, product_class]]) + EnumerableScan(entity=[[foodmart2, product]]) EnumerableJoin(condition=[=($2, $8)], joinType=[inner]) - EnumerableScan(table=[[foodmart2, sales_fact_1997]]) + EnumerableScan(entity=[[foodmart2, sales_fact_1997]]) EnumerableCalc(expr#0..28=[{inputs}], expr#29=['San Francisco':VARCHAR(30)], expr#30=[=($t9, $t29)], proj#0..28=[{exprs}], $condition=[$t30]) - EnumerableScan(table=[[foodmart2, customer]]) + EnumerableScan(entity=[[foodmart2, customer]]) !plan # Check that when filters are merged, duplicate conditions are eliminated. @@ -701,7 +701,7 @@ select * from ( where "day" = 1) where "day" = 1; EnumerableCalc(expr#0..1=[{inputs}], expr#2=[1], expr#3=[=($t0, $t2)], proj#0..1=[{exprs}], $condition=[$t3]) - EnumerableScan(table=[[foodmart2, days]]) + EnumerableScan(entity=[[foodmart2, days]]) !plan # [HIVE-5873] Semi-join to count sub-query @@ -1124,7 +1124,7 @@ select * from "scott".emp where hiredate < '1981-01-02'; !ok EnumerableCalc(expr#0..7=[{inputs}], expr#8=['1981-01-02'], expr#9=[CAST($t8):DATE NOT NULL], expr#10=[<($t4, $t9)], proj#0..7=[{exprs}], $condition=[$t10]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan select * from "scott".emp where '1981-01-02' > hiredate; +-------+-------+-------+------+------------+--------+------+--------+ @@ -1225,7 +1225,7 @@ select * from "scott".emp where '7369' between empno and '7876'; !ok -# [CALCITE-546] Allow table, column and field called "*" +# [CALCITE-546] Allow entity, column and field called "*" # See [DRILL-3859], [DRILL-3860]. SELECT * FROM (VALUES (0, 0)) AS T(A, "*"); +---+---+ @@ -1248,7 +1248,7 @@ SELECT a FROM (VALUES (0, 0)) AS T(A, "*"); !ok SELECT b FROM (VALUES (0, 0)) AS T(A, "*"); -Column 'B' not found in any table +Column 'B' not found in any entity !error # See [DRILL-3860]. diff --git a/plugins/sql-language/src/test/resources/sql/schema.iq b/plugins/sql-language/src/test/resources/sql/schema.iq index ef6b3e6109..9cc2238944 100644 --- a/plugins/sql-language/src/test/resources/sql/schema.iq +++ b/plugins/sql-language/src/test/resources/sql/schema.iq @@ -1,4 +1,4 @@ -# schema.iq - DDL on schemas +# namespace.iq - DDL on schemas # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -18,14 +18,14 @@ !use server !set outputformat mysql -# Create a schema -create schema s; +# Create a namespace +create namespace s; (0 rows modified) !update -# Create a table and a view in the schema -create table s.t (i int); +# Create a entity and a view in the namespace +create entity s.t (i int); (0 rows modified) !update @@ -45,52 +45,52 @@ select count(*) as c from s.v; !ok -# Try to create a schema that already exists -create schema s; +# Try to create a namespace that already exists +create namespace s; Schema 'S' already exists !error -create or replace schema s; +create or replace namespace s; (0 rows modified) !update -#create schema if exists s; +#create namespace if exists s; #Encountered "exists" at line 1, column 18. #!error -create schema if not exists s; +create namespace if not exists s; (0 rows modified) !update # Bad library -create foreign schema fs library 'com.example.BadSchemaFactory'; -Property 'com.example.BadSchemaFactory' not valid for plugin type org.polypheny.db.schema.SchemaFactory +create foreign namespace fs library 'com.example.BadSchemaFactory'; +Property 'com.example.BadSchemaFactory' not valid for plugin type org.polypheny.db.namespace.SchemaFactory !error # Bad type -create foreign schema fs type 'bad'; -Invalid schema type 'bad'; valid values: [MAP, JDBC, CUSTOM] +create foreign namespace fs type 'bad'; +Invalid namespace type 'bad'; valid values: [MAP, JDBC, CUSTOM] !error # Can not specify both type and library -create foreign schema fs +create foreign namespace fs type 'jdbc' library 'org.polypheny.db.languages.test.JdbcTest.MySchemaFactory'; Encountered "library" at line 3, column 3. !error -# Cannot specify type or library with non-foreign schema -create schema fs type 'jdbc'; +# Cannot specify type or library with non-foreign namespace +create namespace fs type 'jdbc'; Encountered "type" at line 1, column 18. !error -create schema fs library 'org.polypheny.db.languages.test.JdbcTest.MySchemaFactory'; +create namespace fs library 'org.polypheny.db.languages.test.JdbcTest.MySchemaFactory'; Encountered "library" at line 1, column 18. !error -create foreign schema fs; +create foreign namespace fs; Encountered "" at line 1, column 24. Was expecting one of: "TYPE" ... @@ -98,8 +98,8 @@ Was expecting one of: "." ... !error -# JDBC schema -create foreign schema scott type 'jdbc' options ( +# JDBC namespace +create foreign namespace scott type 'jdbc' options ( "jdbcUrl" 'jdbc:hsqldb:res:scott', "jdbcSchema" 'SCOTT', "jdbcUser" 'SCOTT', @@ -118,8 +118,8 @@ select count(*) as c from scott.dept; !ok -# Drop schema, then make sure that a query can't find it -drop schema if exists s; +# Drop namespace, then make sure that a query can't find it +drop namespace if exists s; (0 rows modified) !update @@ -129,7 +129,7 @@ Object 'T' not found !error # Create again and objects are still gone -create schema s; +create namespace s; select * from s.t; Object 'T' not found @@ -139,30 +139,30 @@ select * from s.v; Object 'V' not found !error -# Try to drop schema that does not exist -drop schema sss; +# Try to drop namespace that does not exist +drop namespace sss; Schema 'SSS' not found !error -drop schema if exists sss; +drop namespace if exists sss; (0 rows modified) !update -drop foreign schema if exists sss; +drop foreign namespace if exists sss; (0 rows modified) !update -# Use 'if exists' to drop a foreign schema that does exist -drop foreign schema if exists scott; +# Use 'if exists' to drop a foreign namespace that does exist +drop foreign namespace if exists scott; (0 rows modified) !update -drop foreign schema if exists scott; +drop foreign namespace if exists scott; (0 rows modified) !update -# End schema.iq +# End namespace.iq diff --git a/plugins/sql-language/src/test/resources/sql/sequence.iq b/plugins/sql-language/src/test/resources/sql/sequence.iq index 163d92cf54..03d9a2aed2 100644 --- a/plugins/sql-language/src/test/resources/sql/sequence.iq +++ b/plugins/sql-language/src/test/resources/sql/sequence.iq @@ -44,7 +44,7 @@ select next value for "my_seq" as c from (values 1, 2); C BIGINT(19) NOT NULL !type -# Qualified with schema name +# Qualified with namespace name select next value for "s"."my_seq" as c from (values 1, 2); C BIGINT(19) NOT NULL !type @@ -53,7 +53,7 @@ select next value for "unknown_seq" as c from (values 1, 2); From line 1, column 23 to line 1, column 35: Table 'unknown_seq' not found !error -# Qualified with bad schema name +# Qualified with bad namespace name select next value for "unknown_schema"."my_seq" as c from (values 1, 2); From line 1, column 23 to line 1, column 47: Table 'unknown_schema.my_seq' not found !error diff --git a/plugins/sql-language/src/test/resources/sql/some.iq b/plugins/sql-language/src/test/resources/sql/some.iq index c111381b4d..a07c279f29 100644 --- a/plugins/sql-language/src/test/resources/sql/some.iq +++ b/plugins/sql-language/src/test/resources/sql/some.iq @@ -111,8 +111,8 @@ from "scott".emp; EnumerableCalc(expr#0..10=[{inputs}], expr#11=[0], expr#12=[=($t1, $t11)], expr#13=[false], expr#14=[<=($t8, $t0)], expr#15=[IS TRUE($t14)], expr#16=[true], expr#17=[>($t1, $t2)], expr#18=[null:NULL], expr#19=[CASE($t12, $t13, $t15, $t16, $t17, $t18, $t14)], expr#20=[NOT($t19)], EMPNO=[$t3], ENAME=[$t4], JOB=[$t5], MGR=[$t6], HIREDATE=[$t7], SAL=[$t8], COMM=[$t9], DEPTNO=[$t10], X=[$t20]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableAggregate(group=[{}], m=[MAX($6)], c=[COUNT()], d=[COUNT($6)]) - EnumerableScan(table=[[scott, EMP]]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # NOT SOME; left side NOT NULL, right side nullable; converse of previous query. diff --git a/plugins/sql-language/src/test/resources/sql/sort.iq b/plugins/sql-language/src/test/resources/sql/sort.iq index 14890209d5..69d63f243b 100644 --- a/plugins/sql-language/src/test/resources/sql/sort.iq +++ b/plugins/sql-language/src/test/resources/sql/sort.iq @@ -21,7 +21,7 @@ # The ArrayTable "days" is sorted by "day", so plan must not contain sort select * from "days" order by "day"; !verify -EnumerableScan(table=[[foodmart2, days]]) +EnumerableScan(entity=[[foodmart2, days]]) !plan # The ArrayTable "days" is sorted by "day", so the plan does not sort, only applies limit @@ -36,7 +36,7 @@ select * from "days" order by "day" limit 2; !ok EnumerableLimit(fetch=[2]) - EnumerableScan(table=[[foodmart2, days]]) + EnumerableScan(entity=[[foodmart2, days]]) !plan # The ArrayTable "days" is sorted by "day", so the plan must not contain Sort @@ -52,7 +52,7 @@ select * from "days" where "day" between 2 and 4 order by "day"; !ok EnumerableCalc(expr#0..1=[{inputs}], expr#2=[2], expr#3=[>=($t0, $t2)], expr#4=[4], expr#5=[<=($t0, $t4)], expr#6=[AND($t3, $t5)], proj#0..1=[{exprs}], $condition=[$t6]) - EnumerableScan(table=[[foodmart2, days]]) + EnumerableScan(entity=[[foodmart2, days]]) !plan # [CALCITE-970] Default collation of NULL values @@ -135,12 +135,12 @@ order by deptno desc, dname, deptno; !ok EnumerableSort(sort0=[$0], sort1=[$1], dir0=[DESC], dir1=[ASC]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan !use post -# [CALCITE-603] WITH ... ORDER BY cannot find table +# [CALCITE-603] WITH ... ORDER BY cannot find entity with e as (select "empid" as empid from "hr"."emps" where "empid" < 120) select * from e as e1, e as e2 order by e1.empid + e2.empid, e1.empid; +-------+--------+ @@ -181,7 +181,7 @@ select * from "hr"."emps" offset 0; (4 rows) !ok -EnumerableScan(table=[[hr, emps]]) +EnumerableScan(entity=[[hr, emps]]) !plan # [CALCITE-634] Allow ORDER BY aggregate function in SELECT DISTINCT, provided diff --git a/plugins/sql-language/src/test/resources/sql/sub-query.iq b/plugins/sql-language/src/test/resources/sql/sub-query.iq index 9f69976b56..0e3bf7854e 100644 --- a/plugins/sql-language/src/test/resources/sql/sub-query.iq +++ b/plugins/sql-language/src/test/resources/sql/sub-query.iq @@ -350,13 +350,13 @@ EnumerableCalc(expr#0..5=[{inputs}], EMPNO=[$t0]) EnumerableCalc(expr#0..3=[{inputs}], expr#4=[>($t3, $t0)], proj#0..3=[{exprs}], $condition=[$t4]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableAggregate(group=[{7}]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan !} @@ -446,7 +446,7 @@ EnumerableCalc(expr#0..2=[{inputs}], proj#0..1=[{exprs}]) EnumerableValues(tuples=[[{ 1, 2 }]]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[true], expr#9=[CAST($t7):INTEGER], expr#10=[$cor0], expr#11=[$t10.A], expr#12=[=($t9, $t11)], i=[$t8], $condition=[$t12]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Similar query, identical plan @@ -457,7 +457,7 @@ EnumerableCalc(expr#0..2=[{inputs}], proj#0..1=[{exprs}]) EnumerableValues(tuples=[[{ 1, 2 }]]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[true], expr#9=[CAST($t7):INTEGER], expr#10=[$cor0], expr#11=[$t10.A], expr#12=[=($t9, $t11)], i=[$t8], $condition=[$t12]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Uncorrelated @@ -466,7 +466,7 @@ select * from t where a in (select deptno from "scott".dept); EnumerableCalc(expr#0..2=[{inputs}], A=[$t1], B=[$t2]) EnumerableMergeJoin(condition=[=($0, $1)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableValues(tuples=[[{ 60, 'b' }]]) !plan +---+---+ @@ -601,9 +601,9 @@ where empno IN ( EnumerableCalc(expr#0..4=[{inputs}], SAL=[$t4]) EnumerableJoin(condition=[AND(=($1, $3), =($0, $2))], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[IS NOT NULL($t1)], proj#0..1=[{exprs}], $condition=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], JOB=[$t2], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # As above, but for EXISTS @@ -622,9 +622,9 @@ where exists ( !ok EnumerableSemiJoin(condition=[=($0, $10)], joinType=[inner]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], expr#8=[=($t7, $t7)], expr#9=['SMITH':VARCHAR(10)], expr#10=[=($t1, $t9)], expr#11=[AND($t8, $t10)], proj#0..7=[{exprs}], $condition=[$t11]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # [DRILL-5644] @@ -789,12 +789,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], SAL=[$t1], EXPR$1=[$t6]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal IN null non-correlated @@ -825,12 +825,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS TRUE($t5)], expr#7=[null:BOOLEAN], expr#8=[IS NOT NULL($t3)], expr#9=[AND($t6, $t7, $t8)], expr#10=[IS NOT NULL($t2)], expr#11=[IS NOT TRUE($t5)], expr#12=[AND($t10, $t8, $t11)], expr#13=[OR($t9, $t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], expr#4=[123], expr#5=[null:INTEGER], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null IN literal non-correlated @@ -861,12 +861,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], SAL=[$t1], EXPR$1=[$t6]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null IN required @@ -897,12 +897,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], SAL=[$t1], EXPR$1=[$t6]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null IN nullable @@ -933,12 +933,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], SAL=[$t1], EXPR$1=[$t6]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal IN required @@ -969,10 +969,10 @@ from "scott".emp; EnumerableCalc(expr#0..2=[{inputs}], expr#3=[IS NOT NULL($t2)], SAL=[$t1], EXPR$1=[$t3]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], cs=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal IN nullable @@ -1003,12 +1003,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS TRUE($t5)], expr#7=[null:BOOLEAN], expr#8=[IS NOT NULL($t3)], expr#9=[AND($t6, $t7, $t8)], expr#10=[IS NOT NULL($t2)], expr#11=[IS NOT TRUE($t5)], expr#12=[AND($t10, $t8, $t11)], expr#13=[OR($t9, $t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[CAST($t0):TINYINT], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null NOT IN null non-correlated @@ -1039,12 +1039,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[null:INTEGER], expr#9=[IS NULL($t8)], expr#10=[IS NOT NULL($t2)], expr#11=[true], expr#12=[CASE($t4, $t5, $t6, $t7, $t9, $t7, $t10, $t11, $t5)], expr#13=[NOT($t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal NOT IN null non-correlated @@ -1075,12 +1075,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[IS NOT NULL($t2)], expr#9=[true], expr#10=[CASE($t4, $t5, $t6, $t7, $t8, $t9, $t5)], expr#11=[NOT($t10)], SAL=[$t1], EXPR$1=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], expr#4=[123], expr#5=[null:INTEGER], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null NOT IN literal non-correlated @@ -1111,12 +1111,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[null:INTEGER], expr#9=[IS NULL($t8)], expr#10=[IS NOT NULL($t2)], expr#11=[true], expr#12=[CASE($t4, $t5, $t6, $t7, $t9, $t7, $t10, $t11, $t5)], expr#13=[NOT($t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null NOT IN required @@ -1147,12 +1147,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[null:INTEGER], expr#9=[IS NULL($t8)], expr#10=[IS NOT NULL($t2)], expr#11=[true], expr#12=[CASE($t4, $t5, $t6, $t7, $t9, $t7, $t10, $t11, $t5)], expr#13=[NOT($t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null NOT IN nullable @@ -1183,12 +1183,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[null:INTEGER], expr#9=[IS NULL($t8)], expr#10=[IS NOT NULL($t2)], expr#11=[true], expr#12=[CASE($t4, $t5, $t6, $t7, $t9, $t7, $t10, $t11, $t5)], expr#13=[NOT($t12)], SAL=[$t1], EXPR$1=[$t13]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal NOT IN required @@ -1219,10 +1219,10 @@ from "scott".emp; EnumerableCalc(expr#0..2=[{inputs}], expr#3=[IS NOT NULL($t2)], expr#4=[true], expr#5=[false], expr#6=[CASE($t3, $t4, $t5)], expr#7=[NOT($t6)], SAL=[$t1], EXPR$1=[$t7]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], cs=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project literal NOT IN nullable @@ -1253,12 +1253,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[IS NULL($t3)], expr#5=[false], expr#6=[=($t2, $t5)], expr#7=[null:NULL], expr#8=[IS NOT NULL($t2)], expr#9=[true], expr#10=[CASE($t4, $t5, $t6, $t7, $t8, $t9, $t5)], expr#11=[NOT($t10)], SAL=[$t1], EXPR$1=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[CAST($t0):TINYINT], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test project null IN required is unknown @@ -1289,12 +1289,12 @@ from "scott".emp; EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], expr#7=[IS NULL($t6)], SAL=[$t1], EXPR$1=[$t7]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null IN null @@ -1390,9 +1390,9 @@ EnumerableCalc(expr#0..2=[{inputs}], SAL=[$t2]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], cs=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Test filter literal IN nullable @@ -1423,9 +1423,9 @@ EnumerableCalc(expr#0..2=[{inputs}], SAL=[$t2]) EnumerableJoin(condition=[true], joinType=[inner]) EnumerableAggregate(group=[{0}]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[CAST($t0):TINYINT], expr#6=[=($t4, $t5)], cs=[$t3], $condition=[$t6]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Test filter null NOT IN null non-correlated @@ -1441,12 +1441,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT TRUE($t5)], expr#7=[IS NULL($t3)], expr#8=[OR($t6, $t7)], expr#9=[IS TRUE($t5)], expr#10=[OR($t7, $t9)], expr#11=[AND($t8, $t10)], SAL=[$t1], $condition=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN null non-correlated @@ -1462,12 +1462,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT NULL($t2)], expr#7=[OR($t5, $t6)], expr#8=[IS NOT TRUE($t7)], expr#9=[IS NULL($t3)], expr#10=[OR($t8, $t9)], SAL=[$t1], $condition=[$t10]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], expr#4=[123], expr#5=[null:INTEGER], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN literal non-correlated @@ -1483,12 +1483,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT TRUE($t5)], expr#7=[IS NULL($t3)], expr#8=[OR($t6, $t7)], expr#9=[IS TRUE($t5)], expr#10=[OR($t7, $t9)], expr#11=[AND($t8, $t10)], SAL=[$t1], $condition=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN required @@ -1504,12 +1504,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT TRUE($t5)], expr#7=[IS NULL($t3)], expr#8=[OR($t6, $t7)], expr#9=[IS TRUE($t5)], expr#10=[OR($t7, $t9)], expr#11=[AND($t8, $t10)], SAL=[$t1], $condition=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN nullable @@ -1525,12 +1525,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT TRUE($t5)], expr#7=[IS NULL($t3)], expr#8=[OR($t6, $t7)], expr#9=[IS TRUE($t5)], expr#10=[OR($t7, $t9)], expr#11=[AND($t8, $t10)], SAL=[$t1], $condition=[$t11]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN required @@ -1546,12 +1546,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT NULL($t2)], expr#7=[OR($t5, $t6)], expr#8=[IS NOT TRUE($t7)], expr#9=[IS NULL($t3)], expr#10=[OR($t8, $t9)], SAL=[$t1], $condition=[$t10]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], cs=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN nullable @@ -1567,12 +1567,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[false], expr#5=[=($t2, $t4)], expr#6=[IS NOT NULL($t2)], expr#7=[OR($t5, $t6)], expr#8=[IS NOT TRUE($t7)], expr#9=[IS NULL($t3)], expr#10=[OR($t8, $t9)], SAL=[$t1], $condition=[$t10]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[CAST($t0):TINYINT], expr#6=[=($t4, $t5)], expr#7=[IS NULL($t5)], expr#8=[OR($t6, $t7)], cs=[$t3], $condition=[$t8]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null IN required is unknown @@ -1602,12 +1602,12 @@ select sal from "scott".emp EnumerableCalc(expr#0..3=[{inputs}], expr#4=[null:BOOLEAN], expr#5=[IS NOT NULL($t3)], expr#6=[AND($t4, $t5)], expr#7=[IS NULL($t6)], SAL=[$t1], $condition=[$t7]) EnumerableJoin(condition=[true], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableLimit(fetch=[1]) EnumerableSort(sort0=[$0], dir0=[DESC]) EnumerableAggregate(group=[{0}], c=[COUNT()]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], cs=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan #------------------------------- @@ -1625,7 +1625,7 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableValues(tuples=[[]]) !plan @@ -1655,7 +1655,7 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableValues(tuples=[[]]) !plan @@ -1672,7 +1672,7 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableValues(tuples=[[]]) !plan @@ -1689,7 +1689,7 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableValues(tuples=[[]]) !plan @@ -1709,9 +1709,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t2]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[10], expr#4=[=($t3, $t0)], DEPTNO=[$t0], $condition=[$t4]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Test filter literal IN nullable correlated @@ -1730,9 +1730,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t2]) EnumerableJoin(condition=[=($0, $3)], joinType=[inner]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[10], expr#4=[=($t3, $t0)], DEPTNO=[$t0], $condition=[$t4]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) !plan # Test filter null NOT IN null correlated @@ -1748,9 +1748,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT TRUE($t6)], expr#8=[IS TRUE($t6)], expr#9=[AND($t7, $t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN null correlated @@ -1766,9 +1766,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT NULL($t4)], expr#8=[OR($t6, $t7)], expr#9=[IS NOT TRUE($t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[false], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN literal correlated @@ -1784,9 +1784,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT TRUE($t6)], expr#8=[IS TRUE($t6)], expr#9=[AND($t7, $t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN required correlated @@ -1802,9 +1802,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT TRUE($t6)], expr#8=[IS TRUE($t6)], expr#9=[AND($t7, $t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null NOT IN nullable correlated @@ -1820,9 +1820,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT TRUE($t6)], expr#8=[IS TRUE($t6)], expr#9=[AND($t7, $t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], DEPTNO=[$t0], $f1=[$t3]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN required correlated @@ -1849,9 +1849,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT NULL($t4)], expr#8=[OR($t6, $t7)], expr#9=[IS NOT TRUE($t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], DEPTNO1=[$t0], $f1=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter literal NOT IN nullable correlated @@ -1878,9 +1878,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..4=[{inputs}], expr#5=[false], expr#6=[=($t4, $t5)], expr#7=[IS NOT NULL($t4)], expr#8=[OR($t6, $t7)], expr#9=[IS NOT TRUE($t8)], SAL=[$t1], $condition=[$t9]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], expr#3=[true], expr#4=[10], expr#5=[=($t4, $t0)], DEPTNO=[$t0], $f1=[$t3], $condition=[$t5]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan # Test filter null IN required is unknown correlated @@ -1910,9 +1910,9 @@ select sal from "scott".emp e EnumerableCalc(expr#0..3=[{inputs}], SAL=[$t1]) EnumerableJoin(condition=[=($2, $3)], joinType=[left]) EnumerableCalc(expr#0..7=[{inputs}], EMPNO=[$t0], SAL=[$t5], DEPTNO=[$t7]) - EnumerableScan(table=[[scott, EMP]]) + EnumerableScan(entity=[[scott, EMP]]) EnumerableCalc(expr#0..2=[{inputs}], DEPTNO=[$t0]) - EnumerableScan(table=[[scott, DEPT]]) + EnumerableScan(entity=[[scott, DEPT]]) !plan diff --git a/plugins/sql-language/src/test/resources/sql/table.iq b/plugins/sql-language/src/test/resources/sql/table.iq index 5dc8919078..61adf23446 100644 --- a/plugins/sql-language/src/test/resources/sql/table.iq +++ b/plugins/sql-language/src/test/resources/sql/table.iq @@ -1,4 +1,4 @@ -# table.iq - Table DDL +# entity.iq - Table DDL # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -18,18 +18,18 @@ !use server !set outputformat mysql -# Create a basic table -create table t (i int, j int not null); +# Create a basic entity +create entity t (i int, j int not null); (0 rows modified) !update -create table if not exists t (i int, j int not null, k date); +create entity if not exists t (i int, j int not null, k date); (0 rows modified) !update -# There is no "K" column, because table was not re-created +# There is no "K" column, because entity was not re-created select * from t; I INTEGER(10) J INTEGER(10) NOT NULL @@ -50,13 +50,13 @@ select * from t; !ok -drop table t; +drop entity t; (0 rows modified) !update -# Create a table with a DEFAULT column -create table t (i int, j int default i + 2); +# Create a entity with a DEFAULT column +create entity t (i int, j int default i + 2); (0 rows modified) !update @@ -82,14 +82,14 @@ select * from t; !ok -drop table t; +drop entity t; (0 rows modified) !update -# Create a table with a VIRTUAL column +# Create a entity with a VIRTUAL column -create table t (i int, j int as (i + k + 2) virtual, k int); +create entity t (i int, j int as (i + k + 2) virtual, k int); (0 rows modified) !update @@ -106,7 +106,7 @@ insert into t (i, k) values (1, 3); (1 row modified) !update -EnumerableTableModify(table=[[T]], operation=[INSERT], flattened=[false]) +EnumerableTableModify(entity=[[T]], operation=[INSERT], flattened=[false]) EnumerableValues(tuples=[[{ 1, 3 }]]) !plan @@ -114,7 +114,7 @@ insert into t (k, i) values (5, 2); (1 row modified) !update -EnumerableTableModify(table=[[T]], operation=[INSERT], flattened=[false]) +EnumerableTableModify(entity=[[T]], operation=[INSERT], flattened=[false]) EnumerableCalc(expr#0..1=[{inputs}], I=[$t1], K=[$t0]) EnumerableValues(tuples=[[{ 5, 2 }]]) !plan @@ -130,10 +130,10 @@ select * from t; !ok EnumerableCalc(expr#0..1=[{inputs}], expr#2=[+($t0, $t1)], expr#3=[2], expr#4=[+($t2, $t3)], I=[$t0], J=[$t4], K=[$t1]) - EnumerableScan(table=[[T]]) + EnumerableScan(entity=[[T]]) !plan -drop table if exists t; +drop entity if exists t; (0 rows modified) !update @@ -142,13 +142,13 @@ select * from t; Object 'T' not found !error -drop table t; +drop entity t; Table 'T' not found !error -drop table if exists t; +drop entity if exists t; (0 rows modified) !update -# End table.iq +# End entity.iq diff --git a/plugins/sql-language/src/test/resources/sql/table_as.iq b/plugins/sql-language/src/test/resources/sql/table_as.iq index 68ca9f1773..d83dd1d998 100644 --- a/plugins/sql-language/src/test/resources/sql/table_as.iq +++ b/plugins/sql-language/src/test/resources/sql/table_as.iq @@ -18,8 +18,8 @@ !use server !set outputformat mysql -# Create a source table -create table dept (deptno int not null, name varchar(10)); +# Create a source entity +create entity dept (deptno int not null, name varchar(10)); (0 rows modified) !update @@ -31,7 +31,7 @@ values (10, 'Sales'), (20, 'Marketing'), (30, 'Engineering'); !update # Create as select -create table d as +create entity d as select * from dept where deptno > 10; (0 rows modified) @@ -50,13 +50,13 @@ select * from d; !ok # Try to create again - fails -create table d as +create entity d as select * from dept where deptno < 30; Table 'D' already exists !error # Try to create again - fails silently -create table if not exists d as +create entity if not exists d as select * from dept where deptno < 30; (0 rows modified) @@ -75,7 +75,7 @@ select * from d; !ok # Drop -drop table if exists d; +drop entity if exists d; (0 rows modified) !update @@ -85,34 +85,34 @@ select * from d; Object 'D' not found !error -# Drop does nothing because table does not exist -drop table if exists d; +# Drop does nothing because entity does not exist +drop entity if exists d; (0 rows modified) !update -# Create table without either AS or column list - fails -create table d; +# Create entity without either AS or column list - fails +create entity d; At line 1, column 14: Missing column list !error -# Create table without AS or column types - fails -create table d (x, y); +# Create entity without AS or column types - fails +create entity d (x, y); At line 1, column 17: Type required for column 'X' in CREATE TABLE without AS !error -# Create table without AS or column types - fails -create table d (x int, y); +# Create entity without AS or column types - fails +create entity d (x int, y); At line 1, column 24: Type required for column 'Y' in CREATE TABLE without AS !error # Create based on itself - fails -create table d2 as select * from d2; +create entity d2 as select * from d2; Object 'D2' not found !error -# Create table based on UNION -create table d3 as +# Create entity based on UNION +create entity d3 as select deptno as dd from dept where deptno < 15 union all select deptno as ee from dept where deptno > 25; @@ -133,13 +133,13 @@ select * from d3; !ok # Drop -drop table d3; +drop entity d3; (0 rows modified) !update -# Create table based on UNION and ORDER BY -create table d4 as +# Create entity based on UNION and ORDER BY +create entity d4 as select deptno as dd from dept where deptno < 15 union all select deptno as dd from dept where deptno > 25 @@ -161,10 +161,10 @@ select * from d4; !ok # Drop -drop table d4; +drop entity d4; -# Create table based on VALUES -create table d5 as +# Create entity based on VALUES +create entity d5 as values (1, 'a'), (2, 'b'); (0 rows modified) @@ -183,7 +183,7 @@ select * from d5; !ok # Use just aliases -create table d6 (x, y) as +create entity d6 (x, y) as select * from dept where deptno < 15; (0 rows modified) @@ -201,7 +201,7 @@ select * from d6; !ok # Use a mixture of aliases and column declarations -create table d7 (x int, y) as +create entity d7 (x int, y) as select * from dept where deptno < 15; (0 rows modified) @@ -219,19 +219,19 @@ select * from d7; !ok # Too many columns -create table d8 (x, y, z) as +create entity d8 (x, y, z) as select * from dept where deptno < 15; Number of columns must match number of query columns !error # Too few columns -create table d9 (x) as +create entity d9 (x) as select * from dept where deptno < 15; Number of columns must match number of query columns !error # Specify column names and types -create table d10 (x int, y varchar(20)) as +create entity d10 (x int, y varchar(20)) as select * from dept where deptno < 15; (0 rows modified) diff --git a/plugins/sql-language/src/test/resources/sql/type.iq b/plugins/sql-language/src/test/resources/sql/type.iq index dada3b0506..a985b4282f 100644 --- a/plugins/sql-language/src/test/resources/sql/type.iq +++ b/plugins/sql-language/src/test/resources/sql/type.iq @@ -23,8 +23,8 @@ create type myint1 as int; !update -# Create a basic table -create table t (i myint1 not null, j int not null); +# Create a basic entity +create entity t (i myint1 not null, j int not null); (0 rows modified) !update @@ -50,7 +50,7 @@ select * from t; !ok -# Create a table with complex structure type +# Create a entity with complex structure type # This is to test struct type inference create type mytype1 as (ii int not null); @@ -58,8 +58,8 @@ create type mytype1 as (ii int not null); !update -# Create a complex table -create table v (i int not null, j mytype1 not null); +# Create a complex entity +create entity v (i int not null, j mytype1 not null); (0 rows modified) !update @@ -69,8 +69,8 @@ MYINT INTEGER(10) NOT NULL MYSTRUCT STRUCT NOT NULL !type -drop table t; -drop table v; +drop entity t; +drop entity v; (0 rows modified) !update diff --git a/plugins/sql-language/src/test/resources/sql/view.iq b/plugins/sql-language/src/test/resources/sql/view.iq index e1aeff55a9..f2e04e8655 100644 --- a/plugins/sql-language/src/test/resources/sql/view.iq +++ b/plugins/sql-language/src/test/resources/sql/view.iq @@ -91,7 +91,7 @@ select * from v; create or replace view v (x, y, z) as select a, a + 5 as b from (values 1, 2) as t(a); -List of column aliases must have same degree as table; table has 2 columns ('A', 'B'), whereas alias list has 3 columns +List of column aliases must have same degree as entity; entity has 2 columns ('A', 'B'), whereas alias list has 3 columns !error # Column names not unique @@ -119,8 +119,8 @@ select * from v; !ok -# View based on table -create table t (i int); +# View based on entity +create entity t (i int); (0 rows modified) !update diff --git a/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java b/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java index e432480337..b9388b16ee 100644 --- a/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java +++ b/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java @@ -41,26 +41,6 @@ private QueryPlanBuilder() { private static AlgBuilder createRelBuilder( final Statement statement ) { - /*final SchemaPlus rootSchema = transaction.getSchema().plus(); - FrameworkConfig config = Frameworks.newConfigBuilder() - .parserConfig( SqlParserConfig.DEFAULT ) - .defaultSchema( rootSchema.getSubSchema( transaction.getDefaultSchema().name ) ) - .traitDefs( (List) null ) - .programs( Programs.heuristicJoinOrder( Programs.RULE_SET, true, 2 ) ) - .prepareContext( new ContextImpl( - PolyphenyDbSchema.from( rootSchema ), - new SlimDataContext() { - @Override - public JavaTypeFactory getTypeFactory() { - return new JavaTypeFactoryImpl(); - } - }, - "", - 0, - 0, - transaction ) ).build(); - return AlgBuilder.create( config ); - */ return AlgBuilder.create( statement ); } From 8c434be7554f03545532e342581ce4cc8347fdbc Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 21 Feb 2023 23:53:25 +0100 Subject: [PATCH 016/436] started removing names --- .../adapter/java/AbstractQueryableEntity.java | 2 +- .../db/adapter/java/ReflectiveSchema.java | 2 +- .../org/polypheny/db/algebra/core/Modify.java | 4 +- .../org/polypheny/db/algebra/core/Scan.java | 6 +- .../common/LogicalConstraintEnforcer.java | 16 ----- .../logical/common/LogicalStreamer.java | 2 +- .../db/algebra/metadata/AlgColumnOrigin.java | 4 +- .../db/algebra/mutable/MutableScan.java | 2 +- .../algebra/mutable/MutableTableModify.java | 6 +- .../algebra/rules/LoptOptimizeJoinRule.java | 4 +- .../db/algebra/stream/StreamRules.java | 8 +-- .../db/catalog/entity/CatalogCollection.java | 2 +- .../db/catalog/entity/CatalogEntity.java | 4 +- .../catalog/entity/CatalogGraphDatabase.java | 4 +- .../db/catalog/entity/CatalogNamespace.java | 33 +++++++++++ .../db/catalog/entity/CatalogSchema.java | 5 +- .../db/catalog/entity/CatalogTable.java | 4 +- .../polypheny/db/interpreter/Bindables.java | 4 +- .../polypheny/db/interpreter/ScanNode.java | 12 ++-- .../db/plan/AlgOptAbstractEntity.java | 7 --- .../org/polypheny/db/plan/AlgOptEntity.java | 7 --- .../org/polypheny/db/plan/AlgOptUtil.java | 7 --- .../db/prepare/AlgOptEntityImpl.java | 21 +++---- .../processing/LogicalAlgAnalyzeShuttle.java | 10 ++-- .../polypheny/db/rex/RexTableInputRef.java | 7 ++- .../db/routing/LogicalQueryInformation.java | 2 +- .../java/org/polypheny/db/schema/Entity.java | 2 +- .../polypheny/db/schema/QueryableEntity.java | 2 +- .../org/polypheny/db/tools/AlgBuilder.java | 5 +- .../db/catalog/MockCatalogReader.java | 2 +- .../db/processing/AbstractQueryProcessor.java | 37 ++---------- .../processing/ConstraintEnforceAttacher.java | 27 +++------ .../shuttles/LogicalQueryInformationImpl.java | 6 +- .../db/routing/routers/BaseRouter.java | 3 +- .../events/analyzer/DmlEventAnalyzer.java | 2 +- .../events/analyzer/QueryEventAnalyzer.java | 2 +- .../events/metrics/DmlDataPoint.java | 2 +- .../events/metrics/QueryDataPointImpl.java | 2 +- .../db/adapter/csv/CsvTranslatableTable.java | 2 +- .../db/adapter/druid/DruidQuery.java | 4 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 6 +- .../jdbc/rel2sql/AlgToSqlConverter.java | 59 +++++++++++++++---- .../languages/mql2alg/MqlToAlgConverter.java | 4 +- .../org/polypheny/db/adapter/pig/PigAlg.java | 3 +- .../org/polypheny/db/tools/PigAlgBuilder.java | 7 +-- .../db/sql/sql2alg/SqlToAlgConverter.java | 2 +- .../org/polypheny/db/sql/FrameworksTest.java | 2 +- .../db/sql/language/SqlToAlgTestBase.java | 35 +++++------ 48 files changed, 186 insertions(+), 215 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java index 2c038a1e1c..87e3d5f664 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java @@ -64,7 +64,7 @@ public Type getElementType() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { return Schemas.tableExpression( schema, elementType, tableName, clazz ); } } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index 1cbecbe8e3..3d0ea71ce0 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -362,7 +362,7 @@ public Statistic getStatistic() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { return Expressions.field( schema.unwrap( ReflectiveSchema.class ).getTargetExpression( schema.getParentSchema(), schema.getName() ), field ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Modify.java b/core/src/main/java/org/polypheny/db/algebra/core/Modify.java index 5f1b3679b1..c880f1a7e9 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Modify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Modify.java @@ -210,7 +210,7 @@ public AlgDataType getExpectedInputRowType( int ordinalInParent ) { @Override public AlgWriter explainTerms( AlgWriter pw ) { return super.explainTerms( pw ) - .item( "table", table.getQualifiedName() ) + .item( "table", table.getCatalogEntity().id ) .item( "operation", getOperation() ) .itemIf( "updateColumnList", updateColumnList, updateColumnList != null ) .itemIf( "sourceExpressionList", sourceExpressionList, sourceExpressionList != null ) @@ -229,7 +229,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public String algCompareString() { return this.getClass().getSimpleName() + "$" + - String.join( ".", table.getQualifiedName() ) + "$" + + "." + table.getCatalogEntity().id + "$" + (getInputs() != null ? getInputs().stream().map( AlgNode::algCompareString ).collect( Collectors.joining( "$" ) ) + "$" : "") + getOperation().name() + "$" + (getUpdateColumnList() != null ? String.join( "$", getUpdateColumnList() ) + "$" : "") + diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Scan.java b/core/src/main/java/org/polypheny/db/algebra/core/Scan.java index d40c1d36ca..ee33cc8a1a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Scan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Scan.java @@ -47,8 +47,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; @@ -130,7 +130,7 @@ public ImmutableIntList identity() { @Override public AlgWriter explainTerms( AlgWriter pw ) { - return super.explainTerms( pw ).item( "table", table.getQualifiedName() ); + return super.explainTerms( pw ).item( "table", table.getCatalogEntity().id ); } @@ -183,7 +183,7 @@ public AlgNode accept( AlgShuttle shuttle ) { @Override public String algCompareString() { return this.getClass().getSimpleName() + "$" + - String.join( ".", table.getQualifiedName() ) + "&"; + table.getCatalogEntity().id + "&"; } } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 4255cbd5c0..3595fa6c6e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -43,7 +43,6 @@ import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; @@ -380,21 +379,6 @@ public static AlgNode create( AlgNode node, Statement statement ) { } - public static String getEntityName( Modify root, CatalogSchema schema ) { - String tableName; - if ( root.getTable().getQualifiedName().size() == 1 ) { // tableName - tableName = root.getTable().getQualifiedName().get( 0 ); - } else if ( root.getTable().getQualifiedName().size() == 2 ) { // schemaName.tableName - if ( !schema.name.equalsIgnoreCase( root.getTable().getQualifiedName().get( 0 ) ) ) { - throw new RuntimeException( "Schema name does not match expected schema name: " + root.getTable().getQualifiedName().get( 0 ) ); - } - tableName = root.getTable().getQualifiedName().get( 1 ); - } else { - throw new RuntimeException( "Invalid table name: " + root.getTable().getQualifiedName() ); - } - return tableName; - } - @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java index 1bb778add7..de2e661f5e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java @@ -106,7 +106,7 @@ private static LogicalStreamer getLogicalStreamer( Modify modify, AlgBuilder alg if ( !modify.isInsert() ) { // get collection, which is modified - algBuilder.scan( modify.getTable().getQualifiedName() ); + algBuilder.scan( modify.getTable() ); // at the moment no data model is able to conditionally insert attachFilter( modify, algBuilder, rexBuilder ); } else { diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java index 35ef9b5a9f..aa22c99fb0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java @@ -88,7 +88,7 @@ public boolean equals( Object obj ) { return false; } AlgColumnOrigin other = (AlgColumnOrigin) obj; - return originTable.getQualifiedName().equals( other.originTable.getQualifiedName() ) + return originTable.getCatalogEntity().id == other.originTable.getCatalogEntity().id && (iOriginColumn == other.iOriginColumn) && (isDerived == other.isDerived); } @@ -96,7 +96,7 @@ public boolean equals( Object obj ) { // override Object public int hashCode() { - return originTable.getQualifiedName().hashCode() + iOriginColumn + (isDerived ? 313 : 0); + return originTable.getCatalogEntity().hashCode() + iOriginColumn + (isDerived ? 313 : 0); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java index 6179f0e6a2..a226a45420 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java @@ -73,7 +73,7 @@ public int hashCode() { @Override public StringBuilder digest( StringBuilder buf ) { - return buf.append( "Scan(table: " ).append( alg.getTable().getQualifiedName() ).append( ")" ); + return buf.append( "Scan(table: " ).append( alg.getTable().getCatalogEntity().name ).append( ")" ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java index 28ff74558b..9babdd7616 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java @@ -89,7 +89,7 @@ public static MutableTableModify of( AlgDataType rowType, MutableAlg input, AlgO public boolean equals( Object obj ) { return obj == this || obj instanceof MutableTableModify - && table.getQualifiedName().equals( ((MutableTableModify) obj).table.getQualifiedName() ) + && table.getCatalogEntity().id == ((MutableTableModify) obj).table.getCatalogEntity().id && operation == ((MutableTableModify) obj).operation && Objects.equals( updateColumnList, ((MutableTableModify) obj).updateColumnList ) && PAIRWISE_STRING_EQUIVALENCE.equivalent( sourceExpressionList, ((MutableTableModify) obj).sourceExpressionList ) @@ -102,7 +102,7 @@ public boolean equals( Object obj ) { public int hashCode() { return Objects.hash( input, - table.getQualifiedName(), + table.getCatalogEntity().id, operation, updateColumnList, PAIRWISE_STRING_EQUIVALENCE.hash( sourceExpressionList ), @@ -112,7 +112,7 @@ public int hashCode() { @Override public StringBuilder digest( StringBuilder buf ) { - buf.append( "Modify(table: " ).append( table.getQualifiedName() ).append( ", operation: " ).append( operation ); + buf.append( "Modify(table: " ).append( table.getCatalogEntity().name ).append( ", operation: " ).append( operation ); if ( updateColumnList != null ) { buf.append( ", updateColumnList: " ).append( updateColumnList ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java index c7533f0b6b..8df4138fb7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java @@ -267,7 +267,7 @@ private void findRemovableSelfJoins( AlgMetadataQuery mq, LoptMultiJoin multiJoi for ( int j = i + 1; j < factors.length; j++ ) { int leftFactor = factors[i]; int rightFactor = factors[j]; - if ( simpleFactors.get( leftFactor ).getQualifiedName().equals( simpleFactors.get( rightFactor ).getQualifiedName() ) ) { + if ( simpleFactors.get( leftFactor ).getCatalogEntity().id == simpleFactors.get( rightFactor ).getCatalogEntity().id ) { selfJoinPairs.put( leftFactor, rightFactor ); repeatedTables.add( simpleFactors.get( leftFactor ) ); break; @@ -1517,7 +1517,7 @@ public static boolean isRemovableSelfJoin( Join joinRel ) { if ( rightTable == null ) { return false; } - if ( !leftTable.getQualifiedName().equals( rightTable.getQualifiedName() ) ) { + if ( leftTable.getCatalogEntity().id != rightTable.getCatalogEntity().id ) { return false; } diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java index 4af15d5d8f..68c60e6860 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java @@ -54,7 +54,6 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; @@ -273,15 +272,12 @@ public void onMatch( AlgOptRuleCall call ) { final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); if ( streamableTable != null ) { final Entity entity1 = streamableTable.stream(); - final CatalogTable catalogTable = Catalog.getInstance().getTable( entity1.getId() ); + final CatalogTable catalogTable = algOptEntity.getCatalogEntity().unwrap( CatalogTable.class ); final AlgOptEntity algOptEntity2 = AlgOptEntityImpl.create( algOptEntity.getRelOptSchema(), algOptEntity.getRowType(), entity1, - catalogTable, - ImmutableList.builder() - .addAll( algOptEntity.getQualifiedName() ) - .add( "(STREAM)" ).build() ); + catalogTable ); final LogicalRelScan newScan = LogicalRelScan.create( cluster, algOptEntity2 ); call.transformTo( newScan ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java index a42ebd6bbc..3fe27fc346 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java @@ -45,7 +45,7 @@ public class CatalogCollection extends CatalogEntity implements CatalogObject { public CatalogCollection( long databaseId, long namespaceId, long id, String name, @NonNull Collection placements, EntityType type, String physicalName ) { - super( id, EntityType.ENTITY, NamespaceType.DOCUMENT ); + super( id, name, EntityType.ENTITY, NamespaceType.DOCUMENT ); this.id = id; this.databaseId = databaseId; this.namespaceId = namespaceId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index 4f50873c53..f075f7c178 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -26,10 +26,12 @@ public abstract class CatalogEntity implements Wrapper, Serializable { public final long id; public final EntityType entityType; public final NamespaceType namespaceType; + public final String name; - protected CatalogEntity( long id, EntityType type, NamespaceType namespaceType ) { + protected CatalogEntity( long id, String name, EntityType type, NamespaceType namespaceType ) { this.id = id; + this.name = name; this.entityType = type; this.namespaceType = namespaceType; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java index 4482615c0c..cd39d9087f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java @@ -29,7 +29,7 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; -@EqualsAndHashCode +@EqualsAndHashCode(callSuper = false) public class CatalogGraphDatabase extends CatalogEntity implements CatalogObject, Comparable { private static final long serialVersionUID = 7343856827901459672L; @@ -45,7 +45,7 @@ public class CatalogGraphDatabase extends CatalogEntity implements CatalogObject public CatalogGraphDatabase( long databaseId, long id, String name, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { - super( id, EntityType.ENTITY, NamespaceType.GRAPH ); + super( id, name, EntityType.ENTITY, NamespaceType.GRAPH ); this.id = id; this.name = name; this.ownerId = ownerId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java new file mode 100644 index 0000000000..29a313a10d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java @@ -0,0 +1,33 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity; + +import java.io.Serializable; +import org.polypheny.db.catalog.Catalog.NamespaceType; + +public abstract class CatalogNamespace implements CatalogObject, Serializable { + + public final long id; + public final NamespaceType namespaceType; + + + public CatalogNamespace( long id, NamespaceType type ) { + this.id = id; + this.namespaceType = type; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java index 05c21b3413..35b1a431ca 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java @@ -27,8 +27,8 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; -@EqualsAndHashCode -public final class CatalogSchema implements CatalogObject, Comparable { +@EqualsAndHashCode(callSuper = false) +public final class CatalogSchema extends CatalogNamespace implements CatalogObject, Comparable { private static final long serialVersionUID = 3090632164988970558L; @@ -53,6 +53,7 @@ public CatalogSchema( @NonNull final String ownerName, @NonNull final Catalog.NamespaceType namespaceType, boolean caseSensitive ) { + super( id, namespaceType ); this.id = id; this.name = name; this.databaseId = databaseId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java index 219e77e427..87206a0a18 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java @@ -68,7 +68,7 @@ public CatalogTable( @NonNull final ImmutableList dataPlacements, boolean modifiable, PartitionProperty partitionProperty ) { - super( id, type, NamespaceType.RELATIONAL ); + super( id, name, type, NamespaceType.RELATIONAL ); this.id = id; this.name = name; this.fieldIds = fieldIds; @@ -103,7 +103,7 @@ public CatalogTable( boolean modifiable, PartitionProperty partitionProperty, ImmutableList connectedViews ) { - super( id, type, NamespaceType.RELATIONAL ); + super( id, name, type, NamespaceType.RELATIONAL ); this.id = id; this.name = name; this.fieldIds = fieldIds; diff --git a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java index 0e4753f2a8..d276c40fdb 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java @@ -215,7 +215,7 @@ public static BindableScan create( AlgOptCluster cluster, AlgOptEntity algOptEnt final Entity entity = algOptEntity.unwrap( Entity.class ); final AlgTraitSet traitSet = cluster.traitSetOf( BindableConvention.INSTANCE ) - .replace( entity.getSchemaType().getModelTrait() ) + .replace( entity.getNamespaceType().getModelTrait() ) .replaceIfs( AlgCollationTraitDef.INSTANCE, () -> { if ( entity != null ) { return entity.getStatistic().getCollations(); @@ -268,7 +268,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public String algCompareString() { return "BindableScan$" + - String.join( ".", table.getQualifiedName() ) + + "." + table.getCatalogEntity().id + (filters != null ? filters.stream().map( RexNode::hashCode ).map( Objects::toString ).collect( Collectors.joining( "$" ) ) : "") + "$" + (projects != null ? projects.toString() : "") + "&"; } diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index 7d27e209b7..316c363469 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -52,6 +52,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexNode; @@ -61,11 +62,9 @@ import org.polypheny.db.schema.ProjectableFilterableEntity; import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.ScannableEntity; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.ImmutableIntList; -import org.polypheny.db.util.Util; import org.polypheny.db.util.mapping.Mapping; import org.polypheny.db.util.mapping.Mappings; @@ -128,14 +127,11 @@ private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableL final DataContext root = compiler.getDataContext(); final AlgOptEntity algOptEntity = alg.getTable(); final Type elementType = queryableTable.getElementType(); - SchemaPlus schema = root.getRootSchema(); - for ( String name : Util.skipLast( algOptEntity.getQualifiedName() ) ) { - schema = schema.getSubNamespace( name ); - } + final Enumerable rowEnumerable; if ( elementType instanceof Class ) { //noinspection unchecked - final Queryable queryable = Schemas.queryable( root, (Class) elementType, algOptEntity.getQualifiedName() ); + final Queryable queryable = Schemas.queryable( root, (Class) elementType, List.of( algOptEntity.getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); ImmutableList.Builder fieldBuilder = ImmutableList.builder(); Class type = (Class) elementType; for ( Field field : type.getFields() ) { @@ -157,7 +153,7 @@ private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableL return new Row( values ); } ); } else { - rowEnumerable = Schemas.queryable( root, Row.class, algOptEntity.getQualifiedName() ); + rowEnumerable = Schemas.queryable( root, Row.class, List.of( algOptEntity.getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); } return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java index 179cd6be37..c44b1000a2 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java @@ -34,7 +34,6 @@ package org.polypheny.db.plan; -import com.google.common.collect.ImmutableList; import java.util.Collections; import java.util.List; import org.apache.calcite.linq4j.tree.Expression; @@ -73,12 +72,6 @@ public String getName() { } - @Override - public List getQualifiedName() { - return ImmutableList.of( name ); - } - - @Override public double getRowCount() { return 100; diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java index c370d43063..afb9c255e6 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java @@ -56,13 +56,6 @@ */ public interface AlgOptEntity extends Wrapper { - /** - * Obtains an identifier for this table. The identifier must be unique with respect to the Connection producing this table. - * - * @return qualified name - */ - List getQualifiedName(); - /** * Returns an estimate of the number of rows in the table. */ diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java index d29edebbba..c2d1f91959 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java @@ -209,13 +209,6 @@ public static List findAllTables( AlgNode alg ) { } - /** - * Returns a list of all table qualified names used by this expression or its children. - */ - public static List findAllTableQualifiedNames( AlgNode alg ) { - return Lists.transform( findAllTables( alg ), table -> table.getQualifiedName().toString() ); - } - /** * Returns a list of variables set by a relational expression or its descendants. diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index fbb1f56402..a093844441 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -58,6 +58,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchema; @@ -100,7 +101,6 @@ public class AlgOptEntityImpl extends AbstractPreparingEntity { private final CatalogEntity catalogEntity; @Nullable private final transient Function, Expression> expressionFunction; - private final ImmutableList names; /** * Estimate for the row count, or null. @@ -113,14 +113,12 @@ public class AlgOptEntityImpl extends AbstractPreparingEntity { private AlgOptEntityImpl( AlgOptSchema schema, AlgDataType rowType, - List names, Entity entity, CatalogEntity catalogEntity, Function, Expression> expressionFunction, Double rowCount ) { this.schema = schema; this.rowType = Objects.requireNonNull( rowType ); - this.names = ImmutableList.copyOf( names ); this.entity = entity; // may be null this.catalogEntity = catalogEntity; this.expressionFunction = expressionFunction; // may be null @@ -129,7 +127,7 @@ private AlgOptEntityImpl( public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, List names, Expression expression ) { - return new AlgOptEntityImpl( schema, rowType, names, null, null, c -> expression, null ); + return new AlgOptEntityImpl( schema, rowType, null, null, c -> expression, null ); } @@ -142,7 +140,7 @@ public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, rowCount = count; } - return new AlgOptEntityImpl( schema, rowType, tableEntry.path(), entity, catalogEntity, getClassExpressionFunction( tableEntry, entity ), rowCount ); + return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, getClassExpressionFunction( tableEntry, entity ), rowCount ); } @@ -150,7 +148,7 @@ public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, * Creates a copy of this RelOptTable. The new RelOptTable will have newRowType. */ public AlgOptEntityImpl copy( AlgDataType newRowType ) { - return new AlgOptEntityImpl( this.schema, newRowType, this.names, this.entity, this.catalogEntity, this.expressionFunction, this.rowCount ); + return new AlgOptEntityImpl( this.schema, newRowType, this.entity, this.catalogEntity, this.expressionFunction, this.rowCount ); } @@ -177,11 +175,11 @@ private static Function, Expression> getClassExpressionFunction( final } - public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Entity entity, CatalogEntity catalogEntity, ImmutableList names ) { + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Entity entity, CatalogEntity catalogEntity ) { assert entity instanceof TranslatableEntity || entity instanceof ScannableEntity || entity instanceof ModifiableEntity; - return new AlgOptEntityImpl( schema, rowType, names, entity, catalogEntity, null, null ); + return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, null, null ); } @@ -200,7 +198,7 @@ public T unwrap( Class clazz ) { } } if ( clazz == PolyphenyDbSchema.class ) { - return clazz.cast( Schemas.subSchema( ((PolyphenyDbCatalogReader) schema).rootSchema, Util.skipLast( getQualifiedName() ) ) ); + return clazz.cast( Schemas.subSchema( ((PolyphenyDbCatalogReader) schema).rootSchema, List.of( catalogEntity.unwrap( CatalogTable.class ).getNamespaceName(), catalogEntity.name ) ) ); } return null; } @@ -221,7 +219,6 @@ protected AlgOptEntity extend( Entity extendedEntity ) { return new AlgOptEntityImpl( getRelOptSchema(), extendedRowType, - getQualifiedName(), extendedEntity, null, expressionFunction, @@ -284,7 +281,7 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { } } final AlgOptEntity algOptEntity = - new AlgOptEntityImpl( this.schema, b.build(), this.names, this.entity, this.catalogEntity, this.expressionFunction, this.rowCount ) { + new AlgOptEntityImpl( this.schema, b.build(), this.entity, this.catalogEntity, this.expressionFunction, this.rowCount ) { @Override public T unwrap( Class clazz ) { if ( clazz.isAssignableFrom( InitializerExpressionFactory.class ) ) { @@ -373,7 +370,7 @@ public boolean supportsModality( Modality modality ) { @Override public List getQualifiedName() { - return names; + return List.of( catalogEntity.unwrap( CatalogTable.class ).getNamespaceName(), catalogEntity.name ); } diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 305b65d618..6aa900cb8a 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -81,7 +81,7 @@ public class LogicalAlgAnalyzeShuttle extends AlgShuttleImpl { protected final LinkedHashMap availableColumns = new LinkedHashMap<>(); // column id -> schemaName.tableName.ColumnName protected final HashMap availableColumnsWithTable = new HashMap<>(); // columnId -> tableId @Getter - protected final List entities = new ArrayList<>(); + protected final List entityId = new ArrayList<>(); private final Statement statement; @Getter @@ -280,14 +280,14 @@ public AlgNode visit( LogicalConstraintEnforcer enforcer ) { @Override public AlgNode visit( LogicalMatch match ) { - hashBasis.add( "LogicalMatch#" + match.getTable().getQualifiedName() ); + hashBasis.add( "LogicalMatch#" + match.getTable().getCatalogEntity().id ); return visitChild( match, 0, match.getInput() ); } @Override public AlgNode visit( Scan scan ) { - hashBasis.add( "Scan#" + scan.getTable().getQualifiedName() ); + hashBasis.add( "Scan#" + scan.getTable().getCatalogEntity().id ); // get available columns for every table scan this.getAvailableColumns( scan ); @@ -326,7 +326,7 @@ public AlgNode visit( LogicalCorrelate correlate ) { @Override public AlgNode visit( LogicalJoin join ) { if ( join.getLeft() instanceof LogicalRelScan && join.getRight() instanceof LogicalRelScan ) { - hashBasis.add( "LogicalJoin#" + join.getLeft().getTable().getQualifiedName() + "#" + join.getRight().getTable().getQualifiedName() ); + hashBasis.add( "LogicalJoin#" + join.getLeft().getTable().getCatalogEntity().id + "#" + join.getRight().getTable().getCatalogEntity().id ); } super.visit( join ); @@ -391,7 +391,7 @@ public AlgNode visit( AlgNode other ) { private void getAvailableColumns( AlgNode scan ) { - this.entities.addAll( scan.getTable().getQualifiedName() ); + this.entityId.add( scan.getTable().getCatalogEntity().id ); final CatalogTable table = (CatalogTable) scan.getTable().getCatalogEntity(); if ( table != null ) { final List ids = table.fieldIds; diff --git a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java index 8f0ca74cf6..b5f2afb507 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java +++ b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java @@ -39,6 +39,7 @@ import org.polypheny.db.algebra.metadata.BuiltInMetadata.AllPredicates; import org.polypheny.db.algebra.metadata.BuiltInMetadata.ExpressionLineage; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptEntity; @@ -137,7 +138,7 @@ public static class AlgTableRef implements Comparable { private AlgTableRef( AlgOptEntity table, int entityNumber ) { this.table = table; this.entityNumber = entityNumber; - this.digest = table.getQualifiedName() + ".#" + entityNumber; + this.digest = table.getCatalogEntity().id + ".#" + entityNumber; } @@ -145,7 +146,7 @@ private AlgTableRef( AlgOptEntity table, int entityNumber ) { public boolean equals( Object obj ) { return this == obj || obj instanceof AlgTableRef - && table.getQualifiedName().equals( ((AlgTableRef) obj).getQualifiedName() ) + && table.getCatalogEntity().id == ((AlgTableRef) obj).getTable().getCatalogEntity().id && entityNumber == ((AlgTableRef) obj).entityNumber; } @@ -162,7 +163,7 @@ public AlgOptEntity getTable() { public List getQualifiedName() { - return table.getQualifiedName(); + return List.of( table.getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), table.getCatalogEntity().name ); } diff --git a/core/src/main/java/org/polypheny/db/routing/LogicalQueryInformation.java b/core/src/main/java/org/polypheny/db/routing/LogicalQueryInformation.java index 3af0ff8a3b..b10777a5e8 100644 --- a/core/src/main/java/org/polypheny/db/routing/LogicalQueryInformation.java +++ b/core/src/main/java/org/polypheny/db/routing/LogicalQueryInformation.java @@ -61,6 +61,6 @@ public interface LogicalQueryInformation { /** * @return Gets a list of all accessed tables. */ - List getTables(); + List getTablesIds(); } diff --git a/core/src/main/java/org/polypheny/db/schema/Entity.java b/core/src/main/java/org/polypheny/db/schema/Entity.java index 6b03a54586..19d75ab382 100644 --- a/core/src/main/java/org/polypheny/db/schema/Entity.java +++ b/core/src/main/java/org/polypheny/db/schema/Entity.java @@ -107,7 +107,7 @@ default AlgDataTypeFactory getTypeFactory() { boolean rolledUpColumnValidInsideAgg( String column, Call call, Node parent ); - default NamespaceType getSchemaType() { + default NamespaceType getNamespaceType() { return NamespaceType.RELATIONAL; } diff --git a/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java index 6f3c3e3d58..6735b54804 100644 --- a/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java @@ -62,6 +62,6 @@ public interface QueryableEntity extends Entity { * @param tableName Table name (unique within schema) * @param clazz The desired collection class; for example {@code Queryable}. */ - Expression getExpression( SchemaPlus schema, String tableName, Class clazz ); + Expression getExpression( SchemaPlus schema, String tableName, Class clazz ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index e6af238eb2..3fc1f087d6 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -2919,10 +2919,7 @@ private Frame( AlgNode alg ) { private static String deriveAlias( AlgNode alg ) { if ( alg instanceof Scan ) { - final List names = alg.getTable().getQualifiedName(); - if ( !names.isEmpty() ) { - return Util.last( names ); - } + return alg.getTable().getCatalogEntity().name; } return null; } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index f4d26c6d0f..80a4a057f7 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -366,7 +366,7 @@ public Type getElementType() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { return null; } diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 4dad07b088..ee57be1715 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -81,9 +81,6 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationCode; import org.polypheny.db.information.InformationGroup; @@ -619,26 +616,8 @@ public AlgNode visit( AlgNode node ) { if ( node instanceof LogicalModify ) { final Catalog catalog = Catalog.getInstance(); final LogicalModify ltm = (LogicalModify) node; - final CatalogTable table; - final CatalogSchema schema; - try { - String tableName; - if ( ltm.getTable().getQualifiedName().size() == 3 ) { // DatabaseName.SchemaName.TableName - schema = catalog.getSchema( ltm.getTable().getQualifiedName().get( 0 ), ltm.getTable().getQualifiedName().get( 1 ) ); - tableName = ltm.getTable().getQualifiedName().get( 2 ); - } else if ( ltm.getTable().getQualifiedName().size() == 2 ) { // SchemaName.TableName - schema = catalog.getSchema( statement.getPrepareContext().getDatabaseId(), ltm.getTable().getQualifiedName().get( 0 ) ); - tableName = ltm.getTable().getQualifiedName().get( 1 ); - } else { // TableName - schema = catalog.getSchema( statement.getPrepareContext().getDatabaseId(), statement.getPrepareContext().getDefaultSchemaName() ); - tableName = ltm.getTable().getQualifiedName().get( 0 ); - } - table = catalog.getTable( schema.id, tableName ); - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { - // This really should not happen - log.error( "Table not found: {}", ltm.getTable().getQualifiedName().get( 0 ), e ); - throw new RuntimeException( e ); - } + final CatalogTable table = ltm.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + final CatalogSchema schema = catalog.getSchema( table.namespaceId ); final List indices = IndexManager.getInstance().getIndices( schema, table ); // Check if there are any indexes effected by this table modify @@ -917,7 +896,6 @@ public AlgNode visit( LogicalProject project ) { if ( project.getInput() instanceof LogicalRelScan ) { // Figure out the original column names required for index lookup final LogicalRelScan scan = (LogicalRelScan) project.getInput(); - final String table = scan.getTable().getQualifiedName().get( scan.getTable().getQualifiedName().size() - 1 ); final List columns = new ArrayList<>( project.getChildExps().size() ); final List ctypes = new ArrayList<>( project.getChildExps().size() ); for ( final RexNode expr : project.getChildExps() ) { @@ -933,14 +911,7 @@ public AlgNode visit( LogicalProject project ) { } // Retrieve the catalog schema and database representations required for index lookup final CatalogSchema schema = statement.getTransaction().getDefaultSchema(); - final CatalogTable ctable; - try { - ctable = Catalog.getInstance().getTable( schema.id, table ); - } catch ( UnknownTableException e ) { - log.error( "Could not fetch table", e ); - IndexManager.getInstance().incrementNoIndex(); - return super.visit( project ); - } + final CatalogTable ctable = scan.getTable().getCatalogEntity().unwrap( CatalogTable.class ); // Retrieve any index and use for simplification final Index idx = IndexManager.getInstance().getIndex( schema, ctable, columns ); if ( idx == null ) { @@ -1303,7 +1274,7 @@ private LogicalQueryInformation analyzeQueryAndPrepareMonitoring( Statement stat analyzeRelShuttle.availableColumns, analyzeRelShuttle.availableColumnsWithTable, analyzeRelShuttle.getUsedColumns(), - analyzeRelShuttle.getEntities() ); + analyzeRelShuttle.getEntityId() ); this.prepareMonitoring( statement, logicalRoot, isAnalyze, isSubquery, queryInformation ); return queryInformation; diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 874913e869..25036ec09e 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -60,13 +60,11 @@ import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; @@ -202,27 +200,20 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final Modify root = (Modify) logicalRoot.alg; final Catalog catalog = Catalog.getInstance(); - final CatalogSchema schema = statement.getTransaction().getDefaultSchema(); final CatalogTable table; final CatalogPrimaryKey primaryKey; final List constraints; final List foreignKeys; final List exportedKeys; - try { - String entityName = LogicalConstraintEnforcer.getEntityName( root, schema ); - table = catalog.getTable( schema.id, entityName ); - primaryKey = catalog.getPrimaryKey( table.primaryKey ); - constraints = new ArrayList<>( Catalog.getInstance().getConstraints( table.id ) ); - foreignKeys = Catalog.getInstance().getForeignKeys( table.id ); - exportedKeys = Catalog.getInstance().getExportedKeys( table.id ); - // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = Catalog.getInstance().getPrimaryKey( table.primaryKey ); - final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); - constraints.add( pkc ); - } catch ( UnknownTableException e ) { - log.error( "Caught exception", e ); - return logicalRoot; - } + table = root.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + primaryKey = catalog.getPrimaryKey( table.primaryKey ); + constraints = new ArrayList<>( Catalog.getInstance().getConstraints( table.id ) ); + foreignKeys = Catalog.getInstance().getForeignKeys( table.id ); + exportedKeys = Catalog.getInstance().getExportedKeys( table.id ); + // Turn primary key into an artificial unique constraint + CatalogPrimaryKey pk = Catalog.getInstance().getPrimaryKey( table.primaryKey ); + final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); + constraints.add( pkc ); AlgNode lceRoot = root; diff --git a/dbms/src/main/java/org/polypheny/db/processing/shuttles/LogicalQueryInformationImpl.java b/dbms/src/main/java/org/polypheny/db/processing/shuttles/LogicalQueryInformationImpl.java index ec576b8392..270ad53e51 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/shuttles/LogicalQueryInformationImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/shuttles/LogicalQueryInformationImpl.java @@ -36,7 +36,7 @@ public class LogicalQueryInformationImpl implements LogicalQueryInformation { protected final Map usedColumns; @Getter - protected final List tables; + protected final List tablesIds; public LogicalQueryInformationImpl( @@ -45,13 +45,13 @@ public LogicalQueryInformationImpl( LinkedHashMap availableColumns, HashMap availableColumnsWithTable, Map usedColumns, - List tables ) { + List tablesIds ) { this.queryId = queryId; this.accessedPartitions = accessedPartitionMap; this.availableColumns = availableColumns; this.availableColumnsWithTable = availableColumnsWithTable; this.usedColumns = usedColumns; - this.tables = tables; + this.tablesIds = tablesIds; } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index ee2a838c3f..941e89552b 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -615,8 +615,7 @@ private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer private RoutedAlgBuilder handleDocumentOnGraph( DocumentScan alg, Statement statement, RoutedAlgBuilder builder ) { AlgTraitSet out = alg.getTraitSet().replace( ModelTrait.GRAPH ); builder.lpgScan( alg.getCollection().getCatalogEntity().id ); - List names = alg.getCollection().getQualifiedName(); - builder.lpgMatch( List.of( builder.lpgNodeMatch( List.of( names.get( names.size() - 1 ) ) ) ), List.of( "n" ) ); + builder.lpgMatch( List.of( builder.lpgNodeMatch( List.of( alg.getCollection().getCatalogEntity().name ) ) ), List.of( "n" ) ); AlgNode unrouted = builder.build(); builder.push( new LogicalTransformer( builder.getCluster(), List.of( routeGraph( builder, (AlgNode & LpgAlg) unrouted, statement ) ), null, out.replace( ModelTrait.DOCUMENT ), ModelTrait.GRAPH, ModelTrait.DOCUMENT, alg.getRowType(), true ) ); return builder; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DmlEventAnalyzer.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DmlEventAnalyzer.java index f09d5cbe26..fd8faad2bd 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DmlEventAnalyzer.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DmlEventAnalyzer.java @@ -32,7 +32,7 @@ public static DmlDataPoint analyze( DmlEvent dmlEvent ) { DmlDataPoint metric = DmlDataPoint .builder() .Id( dmlEvent.getId() ) - .tables( dmlEvent.getLogicalQueryInformation().getTables() ) + .tables( dmlEvent.getLogicalQueryInformation().getTablesIds() ) .fieldNames( dmlEvent.getFieldNames() ) .executionTime( dmlEvent.getExecutionTime() ) .rowCount( dmlEvent.getRowCount() ) diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java index 4110812c09..c6d620f063 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java @@ -32,7 +32,7 @@ public static QueryDataPointImpl analyze( QueryEvent queryEvent ) { QueryDataPointImpl metric = QueryDataPointImpl .builder() .Id( queryEvent.getId() ) - .tables( queryEvent.getLogicalQueryInformation().getTables() ) + .tables( queryEvent.getLogicalQueryInformation().getTablesIds() ) .fieldNames( queryEvent.getFieldNames() ) .executionTime( queryEvent.getExecutionTime() ) .rowCount( queryEvent.getRowCount() ) diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java index 5f141e2991..f99080a156 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java @@ -43,7 +43,7 @@ public class DmlDataPoint implements MonitoringDataPoint, Serializable { private static final long serialVersionUID = 8159995420459385039L; @Builder.Default - private final List tables = new ArrayList<>(); + private final List tables = new ArrayList<>(); private final Map dataElements = new HashMap<>(); private UUID Id; private Timestamp recordedTimestamp; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java index 1f6b7e850e..2712e39b17 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java @@ -43,7 +43,7 @@ public class QueryDataPointImpl implements QueryDataPoint, Serializable { private static final long serialVersionUID = 4389301720141941770L; @Builder.Default - private final List tables = new ArrayList<>(); + private final List tables = new ArrayList<>(); private final HashMap dataElements = new HashMap<>(); private UUID Id; private Timestamp recordedTimestamp; diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java index bd0af66996..67748ef8cf 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java @@ -90,7 +90,7 @@ public Enumerator enumerator() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { return Schemas.tableExpression( schema, getElementType(), tableName, clazz ); } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java index f74ecf367e..643beb36ab 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java @@ -540,7 +540,7 @@ public AlgWriter explainTerms( AlgWriter pw ) { for ( AlgNode alg : algs ) { if ( alg instanceof Scan ) { Scan scan = (Scan) alg; - pw.item( "table", scan.getTable().getQualifiedName() ); + pw.item( "table", scan.getTable().getCatalogEntity().id ); pw.item( "intervals", intervals ); } else if ( alg instanceof Filter ) { pw.item( "filter", ((Filter) alg).getCondition() ); @@ -625,7 +625,7 @@ public void register( AlgOptPlanner planner ) { @Override public String algCompareString() { return this.getClass().getSimpleName() + "$" + - String.join( ".", table.getQualifiedName() ) + "$" + + "." + table.getCatalogEntity().id + "$" + (algs != null ? algs.stream().map( AlgNode::algCompareString ).collect( Collectors.joining( "$" ) ) : "") + "&"; } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 18645eb6e7..46e2195bd9 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -90,15 +90,13 @@ */ public class JdbcEntity extends AbstractQueryableEntity implements TranslatableEntity, ScannableEntity, ModifiableEntity { - private AlgProtoDataType protoRowType; + private final AlgProtoDataType protoRowType; private JdbcSchema jdbcSchema; private final String physicalSchemaName; private final String physicalTableName; private final List physicalColumnNames; - private final String logicalSchemaName; - private final String logicalTableName; private final List logicalColumnNames; private final TableType jdbcTableType; @@ -117,8 +115,6 @@ public JdbcEntity( Long tableId ) { super( Object[].class ); this.jdbcSchema = jdbcSchema; - this.logicalSchemaName = logicalSchemaName; - this.logicalTableName = logicalTableName; this.logicalColumnNames = logicalColumnNames; this.physicalSchemaName = physicalSchemaName; this.physicalTableName = physicalTableName; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java index 8a3a04c0f2..d9bc0b0359 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java @@ -37,20 +37,61 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Deque; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.SortedSet; +import java.util.stream.Collectors; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.JoinConditionType; import org.polypheny.db.algebra.constant.JoinType; -import org.polypheny.db.algebra.core.*; +import org.polypheny.db.algebra.core.Aggregate; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.Calc; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.core.Filter; +import org.polypheny.db.algebra.core.Intersect; +import org.polypheny.db.algebra.core.Join; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.Match; +import org.polypheny.db.algebra.core.Minus; +import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.Project; +import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; -import org.polypheny.db.rex.*; -import org.polypheny.db.sql.language.*; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexLocalRef; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexProgram; +import org.polypheny.db.sql.language.SqlCall; +import org.polypheny.db.sql.language.SqlDelete; +import org.polypheny.db.sql.language.SqlDialect; +import org.polypheny.db.sql.language.SqlIdentifier; +import org.polypheny.db.sql.language.SqlInsert; +import org.polypheny.db.sql.language.SqlIntervalLiteral; +import org.polypheny.db.sql.language.SqlJoin; +import org.polypheny.db.sql.language.SqlLiteral; +import org.polypheny.db.sql.language.SqlMatchRecognize; +import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlNodeList; +import org.polypheny.db.sql.language.SqlSelect; +import org.polypheny.db.sql.language.SqlSetOperator; +import org.polypheny.db.sql.language.SqlUpdate; import org.polypheny.db.sql.language.fun.SqlRowOperator; import org.polypheny.db.sql.language.fun.SqlSingleValueAggFunction; import org.polypheny.db.sql.language.validate.SqlValidatorUtil; @@ -58,9 +99,6 @@ import org.polypheny.db.util.ReflectUtil; import org.polypheny.db.util.ReflectiveVisitor; -import java.util.*; -import java.util.stream.Collectors; - /** * Utility to convert relational expressions to SQL abstract syntax tree. @@ -235,9 +273,8 @@ public Result visit( Aggregate e ) { * @see #dispatch */ public Result visit( Scan e ) { - //final SqlIdentifier identifier = getPhysicalTableName( e.getTable().getQualifiedName() ); return result( - new SqlIdentifier( e.getTable().getQualifiedName(), ParserPos.ZERO ), + new SqlIdentifier( List.of( e.getTable().unwrap( CatalogTable.class ).getNamespaceName(), e.getTable().getCatalogEntity().name ), ParserPos.ZERO ), ImmutableList.of( Clause.FROM ), e, null ); @@ -416,7 +453,7 @@ public Result visit( Modify modify ) { // Target Table Name //final SqlIdentifier sqlTargetTable = new SqlIdentifier( modify.getTable().getQualifiedName(), POS ); - final SqlIdentifier sqlTargetTable = getPhysicalTableName( modify.getTable().getQualifiedName() ); + final SqlIdentifier sqlTargetTable = getPhysicalTableName( List.of( modify.getTable().getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), modify.getTable().getCatalogEntity().name ) ); switch ( modify.getOperation() ) { case INSERT: { @@ -429,7 +466,7 @@ public Result visit( Modify modify ) { sqlTargetTable, sqlSource, physicalIdentifierList( - modify.getTable().getQualifiedName(), + List.of( modify.getTable().getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), modify.getTable().getCatalogEntity().name ), modify.getInput().getRowType().getFieldNames() ) ); return result( sqlInsert, ImmutableList.of(), modify, null ); } @@ -438,7 +475,7 @@ public Result visit( Modify modify ) { final SqlUpdate sqlUpdate = new SqlUpdate( POS, sqlTargetTable, - physicalIdentifierList( modify.getTable().getQualifiedName(), modify.getUpdateColumnList() ), + physicalIdentifierList( List.of( modify.getTable().getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), modify.getTable().getCatalogEntity().name ), modify.getUpdateColumnList() ), exprList( context, modify.getSourceExpressionList() ), ((SqlSelect) input.node).getWhere(), input.asSelect(), diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index e5055f56e7..794d70c3d2 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -282,7 +282,7 @@ public AlgRoot convert( MqlCollectionStatement query ) { AlgNode node; - if ( entity.getTable().getSchemaType() == NamespaceType.RELATIONAL ) { + if ( entity.getTable().getNamespaceType() == NamespaceType.RELATIONAL ) { _dataExists = false; } @@ -336,7 +336,7 @@ private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaNam if ( table == null ) { return catalogReader.getCollection( names ); - } else if ( table.getTable().getSchemaType() == NamespaceType.GRAPH ) { + } else if ( table.getTable().getNamespaceType() == NamespaceType.GRAPH ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlg.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlg.java index 22a2201009..2e230f97fb 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlg.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlg.java @@ -74,8 +74,7 @@ class Implementor { public String getTableName( AlgNode input ) { - final List qualifiedName = input.getTable().getQualifiedName(); - return qualifiedName.get( qualifiedName.size() - 1 ); + return input.getTable().getCatalogEntity().name; } diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java index 2e938985f7..d7f3e474b6 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java @@ -35,6 +35,8 @@ import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; import org.apache.calcite.linq4j.Ord; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.JoinAlgType; @@ -51,9 +53,6 @@ import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Util; -import java.util.ArrayList; -import java.util.List; - /** * Extension to {@link AlgBuilder} for Pig relational operators. @@ -205,7 +204,7 @@ String getAlias() { } else { AlgNode top = peek(); if ( top instanceof Scan ) { - return Util.last( top.getTable().getQualifiedName() ); + return top.getTable().getCatalogEntity().name; } else { return null; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 65dddb231e..5d526fd885 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -2163,7 +2163,7 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { final TranslatableEntity table = udf.getTable( typeFactory, callBinding.sqlOperands() ); final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getId() ); final AlgDataType rowType = table.getRowType( typeFactory ); - AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, udf.getNameAsId().names ); + AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable ); AlgNode converted = toAlg( algOptEntity ); bb.setRoot( converted, true ); return; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java index 5ad9102cc8..a6f792e59b 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java @@ -435,7 +435,7 @@ public Type getElementType() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { throw new UnsupportedOperationException(); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java index 98128cb624..04e1d90089 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java @@ -48,6 +48,7 @@ import org.polypheny.db.catalog.MockCatalogReader; import org.polypheny.db.catalog.MockCatalogReaderDynamic; import org.polypheny.db.catalog.MockCatalogReaderSimple; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.NodeToAlgConverter.Config; @@ -56,10 +57,10 @@ import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgOptSchemaWithSampling; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Context; @@ -324,14 +325,6 @@ public AlgOptEntity getTableForMember( List names, final String datasetN // If they're asking for a sample, just for test purposes, assume there's a table called "
    :". AlgOptEntity datasetTable = new DelegatingRelOptEntity( table ) { - @Override - public List getQualifiedName() { - final List list = new ArrayList<>( super.getQualifiedName() ); - list.set( - list.size() - 1, - list.get( list.size() - 1 ) + ":" + datasetName ); - return ImmutableList.copyOf( list ); - } }; if ( usedDataset != null ) { assert usedDataset.length == 1; @@ -372,12 +365,6 @@ public T unwrap( Class clazz ) { } - @Override - public List getQualifiedName() { - return names; - } - - @Override public double getRowCount() { // use something other than 0 to give costing tests some room, and make emps bigger than depts for join asymmetry @@ -437,6 +424,12 @@ public List getColumnStrategies() { } + @Override + public CatalogEntity getCatalogEntity() { + return null; + } + + @Override public Expression getExpression( Class clazz ) { return null; @@ -491,12 +484,6 @@ public AlgOptEntity extend( List extendedFields ) { } - @Override - public List getQualifiedName() { - return parent.getQualifiedName(); - } - - @Override public double getRowCount() { return parent.getRowCount(); @@ -550,6 +537,12 @@ public List getColumnStrategies() { return parent.getColumnStrategies(); } + + @Override + public CatalogEntity getCatalogEntity() { + return null; + } + } From b6babf82f248a72a2e25f923fb75f246019f7813 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 22 Feb 2023 16:04:25 +0100 Subject: [PATCH 017/436] added info to AlgOptEntity --- .../adapter/enumerable/EnumerableRules.java | 2 - .../enumerable/EnumerableTableModify.java | 174 ---- .../enumerable/EnumerableTableModifyRule.java | 90 -- .../adapter/java/AbstractQueryableEntity.java | 5 +- .../db/adapter/java/ReflectiveSchema.java | 14 +- .../logical/common/LogicalStreamer.java | 73 +- .../db/algebra/stream/StreamRules.java | 5 +- .../org/polypheny/db/plan/AlgOptEntity.java | 3 + .../db/prepare/AlgOptEntityImpl.java | 37 +- .../db/prepare/LixToAlgTranslator.java | 3 - .../db/prepare/PolyphenyDbCatalogReader.java | 10 +- .../db/prepare/PolyphenyDbPrepareImpl.java | 1 - .../db/prepare/QueryableAlgBuilder.java | 7 +- .../java/org/polypheny/db/schema/Entity.java | 23 + .../polypheny/db/schema/LogicalEntity.java | 9 +- .../polypheny/db/schema/ModifiableEntity.java | 6 - .../db/schema/impl/AbstractEntity.java | 12 +- .../java/org/polypheny/db/tools/Programs.java | 1 - .../org/polypheny/db/util/BuiltInMethod.java | 2 - .../db/catalog/MockCatalogReader.java | 25 +- .../db/schemas/HrClusteredSchema.java | 3 +- .../java/org/polypheny/db/test/JdbcTest.java | 2 +- .../db/test/ScannableEntityTest.java | 14 +- .../db/processing/VolcanoQueryProcessor.java | 1 - .../db/schema/PolySchemaBuilder.java | 2 +- .../db/adapter/cassandra/CassandraTable.java | 4 - .../adapter/cottontail/CottontailEntity.java | 28 +- .../adapter/cottontail/CottontailPlugin.java | 26 +- .../polypheny/db/adapter/csv/CsvTable.java | 2 +- .../db/adapter/druid/DruidEntity.java | 1 + .../elasticsearch/ElasticsearchEntity.java | 4 +- .../elasticsearch/ElasticsearchSchema.java | 8 +- .../db/adapter/file/FileStoreSchema.java | 1 + .../adapter/file/FileTranslatableEntity.java | 18 +- .../db/adapter/file/source/QfsSchema.java | 1 + .../db/adapter/geode/algebra/GeodeEntity.java | 4 +- .../db/adapter/geode/algebra/GeodeSchema.java | 2 +- .../simple/GeodeSimpleScannableEntity.java | 2 +- .../polypheny/db/adapter/html/HtmlEntity.java | 6 +- .../polypheny/db/adapter/html/JsonEntity.java | 1 + .../polypheny/db/adapter/jdbc/JdbcEntity.java | 20 +- .../db/adapter/jdbc/JdbcImplementor.java | 22 +- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 4 +- .../jdbc/rel2sql/AlgToSqlConverter.java | 10 +- .../db/adapter/mongodb/MongoEntity.java | 25 +- .../languages/mql2alg/MqlToAlgConverter.java | 5 +- .../polypheny/db/adapter/neo4j/NeoEntity.java | 18 +- .../polypheny/db/adapter/neo4j/NeoSchema.java | 4 +- .../polypheny/db/adapter/pig/PigEntity.java | 4 +- .../sql-language/src/main/codegen/config.fmpp | 2 +- .../db/sql/language/validate/EmptyScope.java | 6 +- .../language/validate/SqlValidatorScope.java | 2 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 2 +- .../org/polypheny/db/sql/FrameworksTest.java | 6 - .../java/org/polypheny/db/sql/Smalls.java | 860 ------------------ 55 files changed, 197 insertions(+), 1425 deletions(-) delete mode 100644 core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java delete mode 100644 core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyRule.java diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableRules.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableRules.java index ea99b89e15..46ed8d8d22 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableRules.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableRules.java @@ -101,8 +101,6 @@ private EnumerableRules() { public static final EnumerableMinusRule ENUMERABLE_MINUS_RULE = new EnumerableMinusRule(); - public static final EnumerableTableModifyRule ENUMERABLE_TABLE_MODIFICATION_RULE = new EnumerableTableModifyRule( AlgFactories.LOGICAL_BUILDER ); - public static final EnumerableValuesRule ENUMERABLE_VALUES_RULE = new EnumerableValuesRule( AlgFactories.LOGICAL_BUILDER ); public static final EnumerableWindowRule ENUMERABLE_WINDOW_RULE = new EnumerableWindowRule(); diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java deleted file mode 100644 index 2c865b529e..0000000000 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModify.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.enumerable; - - -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import org.apache.calcite.linq4j.tree.BlockBuilder; -import org.apache.calcite.linq4j.tree.Expression; -import org.apache.calcite.linq4j.tree.Expressions; -import org.apache.calcite.linq4j.tree.ParameterExpression; -import org.apache.calcite.linq4j.tree.Types; -import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableEntity; -import org.polypheny.db.util.BuiltInMethod; - - -/** - * Implementation of {@link Modify} in {@link org.polypheny.db.adapter.enumerable.EnumerableConvention enumerable calling convention}. - */ -public class EnumerableTableModify extends Modify implements EnumerableAlg { - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - if ( getOperation() != Operation.UPDATE ) { - return super.computeSelfCost( planner, mq ); - } else { - return super.computeSelfCost( planner, mq ).multiplyBy( 2 ); - } - } - - - public EnumerableTableModify( AlgOptCluster cluster, AlgTraitSet traits, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { - super( cluster, traits, table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); - assert child.getConvention() instanceof EnumerableConvention; - assert getConvention() instanceof EnumerableConvention; - final ModifiableEntity modifiableTable = table.unwrap( ModifiableEntity.class ); - if ( modifiableTable == null ) { - throw new AssertionError(); // TODO: user error in validator - } - } - - - @Override - public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new EnumerableTableModify( getCluster(), traitSet, getTable(), getCatalogReader(), sole( inputs ), getOperation(), getUpdateColumnList(), getSourceExpressionList(), isFlattened() ); - } - - - @Override - public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { - final BlockBuilder builder = new BlockBuilder(); - final Result result = implementor.visitChild( this, 0, (EnumerableAlg) getInput(), pref ); - Expression childExp = builder.append( "child", result.block ); - final ParameterExpression collectionParameter = Expressions.parameter( Collection.class, builder.newName( "collection" ) ); - final Expression expression = table.getExpression( ModifiableEntity.class ); - assert expression != null; // TODO: user error in validator - assert ModifiableEntity.class.isAssignableFrom( Types.toClass( expression.getType() ) ) : expression.getType(); - builder.add( - Expressions.declare( - Modifier.FINAL, - collectionParameter, - Expressions.call( expression, BuiltInMethod.MODIFIABLE_TABLE_GET_MODIFIABLE_COLLECTION.method ) ) ); - final Expression countParameter = - builder.append( - "count", - Expressions.call( collectionParameter, "size" ), - false ); - Expression convertedChildExp; - if ( !getInput().getRowType().equals( getRowType() ) ) { - final JavaTypeFactory typeFactory = (JavaTypeFactory) getCluster().getTypeFactory(); - final JavaRowFormat format = EnumerableScan.deduceFormat( table ); - PhysType physType = PhysTypeImpl.of( typeFactory, table.getRowType(), format ); - List expressionList = new ArrayList<>(); - final PhysType childPhysType = result.physType; - final ParameterExpression o_ = Expressions.parameter( childPhysType.getJavaRowType(), "o" ); - final int fieldCount = childPhysType.getRowType().getFieldCount(); - for ( int i = 0; i < fieldCount; i++ ) { - expressionList.add( childPhysType.fieldReference( o_, i, physType.getJavaFieldType( i ) ) ); - } - convertedChildExp = - builder.append( - "convertedChild", - Expressions.call( - childExp, - BuiltInMethod.SELECT.method, - Expressions.lambda( physType.record( expressionList ), o_ ) ) ); - } else { - convertedChildExp = childExp; - } - final Method method; - switch ( getOperation() ) { - case INSERT: - method = BuiltInMethod.INTO.method; - break; - case DELETE: - method = BuiltInMethod.REMOVE_ALL.method; - break; - default: - throw new AssertionError( getOperation() ); - } - builder.add( Expressions.statement( Expressions.call( convertedChildExp, method, collectionParameter ) ) ); - final Expression updatedCountParameter = - builder.append( - "updatedCount", - Expressions.call( collectionParameter, "size" ), - false ); - builder.add( - Expressions.return_( - null, - Expressions.call( - BuiltInMethod.SINGLETON_ENUMERABLE.method, - Expressions.convert_( - Expressions.condition( - Expressions.greaterThanOrEqual( updatedCountParameter, countParameter ), - Expressions.subtract( updatedCountParameter, countParameter ), - Expressions.subtract( countParameter, updatedCountParameter ) ), - long.class ) ) ) ); - final PhysType physType = - PhysTypeImpl.of( - implementor.getTypeFactory(), - getRowType(), - pref == Prefer.ARRAY - ? JavaRowFormat.ARRAY - : JavaRowFormat.SCALAR ); - return implementor.result( physType, builder.toBlock() ); - } - -} - diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyRule.java deleted file mode 100644 index 283ffca3e6..0000000000 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyRule.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.enumerable; - - -import java.util.function.Predicate; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.convert.ConverterRule; -import org.polypheny.db.algebra.logical.relational.LogicalModify; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.schema.ModifiableEntity; -import org.polypheny.db.tools.AlgBuilderFactory; - - -/** - * Planner rule that converts a {@link LogicalModify} relational expression {@link org.polypheny.db.adapter.enumerable.EnumerableConvention enumerable calling convention}. - */ -public class EnumerableTableModifyRule extends ConverterRule { - - /** - * Creates an EnumerableTableModifyRule. - * - * @param algBuilderFactory Builder for relational expressions - */ - public EnumerableTableModifyRule( AlgBuilderFactory algBuilderFactory ) { - super( LogicalModify.class, (Predicate) r -> true, Convention.NONE, EnumerableConvention.INSTANCE, algBuilderFactory, "EnumerableTableModificationRule" ); - } - - - @Override - public AlgNode convert( AlgNode alg ) { - final LogicalModify modify = (LogicalModify) alg; - // EnumerableTableModify uses getModifiableCollection, which no store implements correctly - // the streamer should be able to handles it without this method - if ( true ) { - // this is something, which is not supported therefore we can just substitute it - // return EnumerableRules.ENUMERABLE_TABLE_MODIFY_TO_STREAMER_RULE.convert( alg ); - return null; - } - final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); - if ( modifiableTable == null ) { - return null; - } - final AlgTraitSet traitSet = modify.getTraitSet().replace( EnumerableConvention.INSTANCE ); - return new EnumerableTableModify( - modify.getCluster(), - traitSet, - modify.getTable(), - modify.getCatalogReader(), - convert( modify.getInput(), traitSet ), - modify.getOperation(), - modify.getUpdateColumnList(), - modify.getSourceExpressionList(), - modify.isFlattened() ); - } - -} - diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java index 87e3d5f664..8a35d9c22f 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java @@ -51,8 +51,8 @@ public abstract class AbstractQueryableEntity extends AbstractEntity implements protected final Type elementType; - protected AbstractQueryableEntity( Type elementType ) { - super(); + protected AbstractQueryableEntity( Type elementType, Long id, Long partitionId, Long adapterId ) { + super( id, partitionId, adapterId ); this.elementType = elementType; } @@ -67,5 +67,6 @@ public Type getElementType() { public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { return Schemas.tableExpression( schema, elementType, tableName, clazz ); } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index 3d0ea71ce0..f5249a7a9f 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -207,7 +207,7 @@ private Entity fieldRelation( final Field field ) { throw new RuntimeException( "Error while accessing field " + field, e ); } @SuppressWarnings("unchecked") final Enumerable enumerable = toEnumerable( o ); - return new FieldEntity<>( field, elementType, enumerable ); + return new FieldEntity<>( field, elementType, enumerable, null, null, null ); } @@ -249,8 +249,8 @@ private static class ReflectiveEntity extends AbstractQueryableEntity implements private final Enumerable enumerable; - ReflectiveEntity( Type elementType, Enumerable enumerable ) { - super( elementType ); + ReflectiveEntity( Type elementType, Enumerable enumerable, Long id, Long partitionId, Long adapterId ) { + super( elementType, id, partitionId, adapterId ); this.elementType = elementType; this.enumerable = enumerable; } @@ -338,13 +338,13 @@ private static class FieldEntity extends ReflectiveEntity { private Statistic statistic; - FieldEntity( Field field, Type elementType, Enumerable enumerable ) { - this( field, elementType, enumerable, Statistics.UNKNOWN ); + FieldEntity( Field field, Type elementType, Enumerable enumerable, Long id, Long partitionId, Long adapterId ) { + this( field, elementType, enumerable, Statistics.UNKNOWN, id, partitionId, adapterId ); } - FieldEntity( Field field, Type elementType, Enumerable enumerable, Statistic statistic ) { - super( elementType, enumerable ); + FieldEntity( Field field, Type elementType, Enumerable enumerable, Statistic statistic, Long id, Long partitionId, Long adapterId ) { + super( elementType, enumerable, id, partitionId, adapterId ); this.field = field; this.statistic = statistic; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java index de2e661f5e..89360b1bd8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java @@ -20,14 +20,12 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; -import org.polypheny.db.adapter.enumerable.EnumerableTableModify; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.common.Streamer; -import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalValues; @@ -151,18 +149,6 @@ private static List createSourceList( Modify modify, RexBuilder rexBuil } - private static List createSourceList( DocumentModify modify, RexBuilder rexBuilder ) { - return modify.getUpdates() - .stream() - .map( name -> { - int size = modify.getRowType().getFieldList().size(); - int index = modify.getTable().getRowType().getFieldNames().indexOf( name ); - return rexBuilder.makeDynamicParam( - modify.getTable().getRowType().getFieldList().get( index ).getType(), size + index ); - } ).collect( Collectors.toList() ); - } - - private static void attachFilter( Modify modify, AlgBuilder algBuilder, RexBuilder rexBuilder ) { List fields = new ArrayList<>(); int i = 0; @@ -179,21 +165,6 @@ private static void attachFilter( Modify modify, AlgBuilder algBuilder, RexBuild } - private static void attachFilter( DocumentModify modify, AlgBuilder algBuilder, RexBuilder rexBuilder ) { - List fields = new ArrayList<>(); - int i = 0; - for ( AlgDataTypeField field : modify.getTable().getRowType().getFieldList() ) { - fields.add( - algBuilder.equals( - rexBuilder.makeInputRef( modify.getTable().getRowType(), i ), - rexBuilder.makeDynamicParam( field.getType(), i ) ) ); - i++; - } - algBuilder.filter( fields.size() == 1 - ? fields.get( 0 ) - : algBuilder.and( fields ) ); - } - private static AlgNode getChild( AlgNode child ) { if ( child instanceof AlgSubset ) { @@ -205,54 +176,16 @@ private static AlgNode getChild( AlgNode child ) { public static boolean isModifyApplicable( Modify modify ) { + // simple delete, which all store should be able to handle by themselves if ( modify.isInsert() && modify.getInput() instanceof Values ) { // simple insert, which all store should be able to handle by themselves return false; - } else if ( modify.isDelete() && modify.getInput() instanceof Scan ) { - // simple delete, which all store should be able to handle by themselves - return false; - } - return true; - } - - - public static boolean isModifyApplicable( DocumentModify modify ) { - - if ( modify.isInsert() && modify.getInput() instanceof Values ) { - // simple insert, which all store should be able to handle by themselves - return false; - } else if ( modify.isDelete() && modify.getInput() instanceof Scan ) { - // simple delete, which all store should be able to handle by themselves - return false; + } else { + return !modify.isDelete() || !(modify.getInput() instanceof Scan); } - return true; } - public static boolean isEnumerableModifyApplicable( EnumerableTableModify modify ) { - if ( modify.getSourceExpressionList() == null || modify.getUpdateColumnList() == null ) { - return false; - } - - if ( modify.isInsert() ) { - if ( modify.getInput() instanceof AlgSubset ) { - if ( ((AlgSubset) modify.getInput()).getOriginal() instanceof Values ) { - // simple insert, which no store shouldn't be able to handle by themselves - return false; - } - } - } - - if ( modify.isDelete() ) { - if ( modify.getInput() instanceof AlgSubset ) { - if ( ((AlgSubset) modify.getInput()).getOriginal() instanceof Scan ) { - // simple delete, which no store shouldn't be able to handle by themselves - return false; - } - } - } - return true; - } private static List getOldFieldRefs( AlgDataType rowType ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java index 68c60e6860..064df1f31b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java @@ -54,6 +54,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; @@ -273,11 +274,13 @@ public void onMatch( AlgOptRuleCall call ) { if ( streamableTable != null ) { final Entity entity1 = streamableTable.stream(); final CatalogTable catalogTable = algOptEntity.getCatalogEntity().unwrap( CatalogTable.class ); + final CatalogPartitionPlacement placement = algOptEntity.getPartitionPlacement(); final AlgOptEntity algOptEntity2 = AlgOptEntityImpl.create( algOptEntity.getRelOptSchema(), algOptEntity.getRowType(), entity1, - catalogTable ); + catalogTable, + placement ); final LogicalRelScan newScan = LogicalRelScan.create( cluster, algOptEntity2 ); call.transformTo( newScan ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java index afb9c255e6..7f27baa1ec 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java @@ -45,6 +45,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Wrapper; @@ -136,6 +137,8 @@ default Entity getTable() { CatalogEntity getCatalogEntity(); + CatalogPartitionPlacement getPartitionPlacement(); + /** * Contains the context needed to convert a table into a relational expression. diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index a093844441..5b8d586fb2 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -58,6 +58,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; @@ -108,30 +109,35 @@ public class AlgOptEntityImpl extends AbstractPreparingEntity { * If not null, overrides the estimate from the actual table. */ private final Double rowCount; + @Getter + @Nullable + private final CatalogPartitionPlacement partitionPlacement; private AlgOptEntityImpl( AlgOptSchema schema, AlgDataType rowType, - Entity entity, - CatalogEntity catalogEntity, - Function, Expression> expressionFunction, - Double rowCount ) { + @Nullable Entity entity, + @Nullable CatalogEntity catalogEntity, + @Nullable CatalogPartitionPlacement placement, + @Nullable Function, Expression> expressionFunction, + @Nullable Double rowCount ) { this.schema = schema; this.rowType = Objects.requireNonNull( rowType ); - this.entity = entity; // may be null + this.entity = entity; + this.partitionPlacement = placement; this.catalogEntity = catalogEntity; - this.expressionFunction = expressionFunction; // may be null - this.rowCount = rowCount; // may be null + this.expressionFunction = expressionFunction; + this.rowCount = rowCount; } - public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, List names, Expression expression ) { - return new AlgOptEntityImpl( schema, rowType, null, null, c -> expression, null ); + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Expression expression ) { + return new AlgOptEntityImpl( schema, rowType, null, null, null, c -> expression, null ); } - public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, final PolyphenyDbSchema.TableEntry tableEntry, CatalogEntity catalogEntity, Double count ) { + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, final PolyphenyDbSchema.TableEntry tableEntry, CatalogEntity catalogEntity, CatalogPartitionPlacement placement, Double count ) { final Entity entity = tableEntry.getTable(); Double rowCount; if ( count == null ) { @@ -140,7 +146,7 @@ public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, rowCount = count; } - return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, getClassExpressionFunction( tableEntry, entity ), rowCount ); + return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, placement, getClassExpressionFunction( tableEntry, entity ), rowCount ); } @@ -148,7 +154,7 @@ public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, * Creates a copy of this RelOptTable. The new RelOptTable will have newRowType. */ public AlgOptEntityImpl copy( AlgDataType newRowType ) { - return new AlgOptEntityImpl( this.schema, newRowType, this.entity, this.catalogEntity, this.expressionFunction, this.rowCount ); + return new AlgOptEntityImpl( this.schema, newRowType, this.entity, this.catalogEntity, this.partitionPlacement, this.expressionFunction, this.rowCount ); } @@ -175,11 +181,11 @@ private static Function, Expression> getClassExpressionFunction( final } - public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Entity entity, CatalogEntity catalogEntity ) { + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Entity entity, CatalogEntity catalogEntity, CatalogPartitionPlacement placement ) { assert entity instanceof TranslatableEntity || entity instanceof ScannableEntity || entity instanceof ModifiableEntity; - return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, null, null ); + return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, placement, null, null ); } @@ -221,6 +227,7 @@ protected AlgOptEntity extend( Entity extendedEntity ) { extendedRowType, extendedEntity, null, + null, expressionFunction, getRowCount() ); } @@ -281,7 +288,7 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { } } final AlgOptEntity algOptEntity = - new AlgOptEntityImpl( this.schema, b.build(), this.entity, this.catalogEntity, this.expressionFunction, this.rowCount ) { + new AlgOptEntityImpl( this.schema, b.build(), this.entity, this.catalogEntity, this.partitionPlacement, this.expressionFunction, this.rowCount ) { @Override public T unwrap( Class clazz ) { if ( clazz.isAssignableFrom( InitializerExpressionFactory.class ) ) { diff --git a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java index 1fca0c05ef..2419c228a4 100644 --- a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java +++ b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java @@ -34,7 +34,6 @@ package org.polypheny.db.prepare; -import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -114,7 +113,6 @@ public AlgNode translate( Expression expression ) { AlgOptEntityImpl.create( null, typeFactory.createJavaType( Types.toClass( Types.getElementType( call.targetExpression.getType() ) ) ), - ImmutableList.of(), call.targetExpression ) ); case SCHEMA_GET_TABLE: @@ -123,7 +121,6 @@ public AlgNode translate( Expression expression ) { AlgOptEntityImpl.create( null, typeFactory.createJavaType( (Class) ((ConstantExpression) call.expressions.get( 1 )).value ), - ImmutableList.of(), call.targetExpression ) ); default: diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index 256cd16b24..ea87d6d319 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -45,8 +45,6 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptEntity; @@ -84,14 +82,14 @@ public PreparingEntity getTable( final List names ) { PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); if ( entry != null ) { final Entity entity = entry.getTable(); - CatalogTable catalogTable = Catalog.getInstance().getTable( entity.getId() ); + if ( entity instanceof Wrapper ) { final PreparingEntity algOptTable = ((Wrapper) entity).unwrap( PreparingEntity.class ); if ( algOptTable != null ) { return algOptTable; } } - return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, catalogTable, null ); + return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, entity.getCatalogTable(), entity.getPartitionPlacement(), null ); } return null; } @@ -103,8 +101,8 @@ public AlgOptEntity getCollection( final List names ) { PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); if ( entry != null ) { final Entity entity = entry.getTable(); - CatalogTable catalogTable = Catalog.getInstance().getTable( entity.getId() ); - return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, catalogTable, null ); + + return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, entity.getCatalogTable(), entity.getPartitionPlacement(), null ); } return null; } diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java index 9cb4854594..9116a54e42 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java @@ -196,7 +196,6 @@ public class PolyphenyDbPrepareImpl implements PolyphenyDbPrepare { EnumerableRules.ENUMERABLE_MODIFY_COLLECT_RULE, EnumerableRules.ENUMERABLE_INTERSECT_RULE, EnumerableRules.ENUMERABLE_MINUS_RULE, - EnumerableRules.ENUMERABLE_TABLE_MODIFICATION_RULE, EnumerableRules.ENUMERABLE_VALUES_RULE, EnumerableRules.ENUMERABLE_WINDOW_RULE, EnumerableRules.ENUMERABLE_TABLE_SCAN_RULE, diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index f1d2c369b5..17e8fb7c1e 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -64,8 +64,6 @@ import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.QueryableEntity; @@ -108,7 +106,7 @@ AlgNode toAlg( Queryable queryable ) { if ( queryable instanceof AbstractTableQueryable ) { final AbstractTableQueryable tableQueryable = (AbstractTableQueryable) queryable; final QueryableEntity table = tableQueryable.table; - final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getId() ); + final PolyphenyDbSchema.TableEntry tableEntry = PolyphenyDbSchema .from( tableQueryable.schema ) @@ -117,7 +115,8 @@ AlgNode toAlg( Queryable queryable ) { null, table.getRowType( translator.typeFactory ), tableEntry, - catalogTable, + table.getCatalogTable(), + table.getPartitionPlacement(), null ); if ( table instanceof TranslatableEntity ) { return ((TranslatableEntity) table).toAlg( translator.toAlgContext(), algOptTable, translator.cluster.traitSet() ); diff --git a/core/src/main/java/org/polypheny/db/schema/Entity.java b/core/src/main/java/org/polypheny/db/schema/Entity.java index 19d75ab382..1507917ffa 100644 --- a/core/src/main/java/org/polypheny/db/schema/Entity.java +++ b/core/src/main/java/org/polypheny/db/schema/Entity.java @@ -35,7 +35,10 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; import org.polypheny.db.prepare.JavaTypeFactoryImpl; @@ -85,6 +88,26 @@ default AlgDataTypeFactory getTypeFactory() { */ Long getId(); + @Deprecated // whole entity might get replaced + default CatalogTable getCatalogTable() { + if ( getId() == null ) { + return null; + } + return Catalog.getInstance().getTable( getId() ); + } + + Long getPartitionId(); + + Long getAdapterId(); + + @Deprecated // whole entity might get replaced + default CatalogPartitionPlacement getPartitionPlacement() { + if ( getAdapterId() == null || getPartitionId() == null ) { + return null; + } + return Catalog.getInstance().getPartitionPlacement( Math.toIntExact( getAdapterId() ), getPartitionId() ); + } + /** * Type of table. */ diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java index 138a0d5086..2ec19236dd 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java @@ -70,8 +70,7 @@ public LogicalEntity( List logicalColumnNames, AlgProtoDataType protoRowType, NamespaceType schemaType ) { - super( Object[].class ); - this.id = tableId; + super( Object[].class, tableId, null, null ); this.logicalSchemaName = logicalSchemaName; this.logicalTableName = logicalTableName; this.columnIds = columnIds; @@ -115,12 +114,6 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { } - @Override - public Collection getModifiableCollection() { - throw new RuntimeException( "getModifiableCollection() is not implemented for Logical Tables!" ); - } - - @Override public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { throw new RuntimeException( "asQueryable() is not implemented for Logical Tables!" ); diff --git a/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java b/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java index 0f365b4325..5d8f5f85b3 100644 --- a/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java @@ -48,12 +48,6 @@ */ public interface ModifiableEntity extends QueryableEntity { - /** - * Returns the modifiable collection. - * Modifying the collection will change the table's contents. - */ - Collection getModifiableCollection(); - /** * Creates a relational expression that modifies this table. */ diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractEntity.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractEntity.java index f8a4a01df5..4d4f431a3f 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractEntity.java @@ -56,13 +56,21 @@ @Getter public abstract class AbstractEntity implements Entity, Wrapper { + @Getter + protected final Long partitionId; + @Getter + private final Long id; @Getter - protected Long id; + private final Long adapterId; + protected Statistic statistic = Statistics.UNKNOWN; - protected AbstractEntity() { + protected AbstractEntity( Long id, Long partitionId, Long adapterId ) { + this.id = id; + this.partitionId = partitionId; + this.adapterId = adapterId; } diff --git a/core/src/main/java/org/polypheny/db/tools/Programs.java b/core/src/main/java/org/polypheny/db/tools/Programs.java index 011e874107..870abb6f41 100644 --- a/core/src/main/java/org/polypheny/db/tools/Programs.java +++ b/core/src/main/java/org/polypheny/db/tools/Programs.java @@ -142,7 +142,6 @@ public class Programs { EnumerableRules.ENUMERABLE_MODIFY_COLLECT_RULE, EnumerableRules.ENUMERABLE_INTERSECT_RULE, EnumerableRules.ENUMERABLE_MINUS_RULE, - EnumerableRules.ENUMERABLE_TABLE_MODIFICATION_RULE, EnumerableRules.ENUMERABLE_VALUES_RULE, EnumerableRules.ENUMERABLE_WINDOW_RULE, EnumerableRules.ENUMERABLE_CALC_RULE, diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index d27032fa11..02ad47c736 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -126,7 +126,6 @@ import org.polypheny.db.runtime.functions.Functions.FlatProductInputType; import org.polypheny.db.runtime.functions.MqlFunctions; import org.polypheny.db.schema.FilterableEntity; -import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.ProjectableFilterableEntity; import org.polypheny.db.schema.QueryableEntity; @@ -315,7 +314,6 @@ public enum BuiltInMethod { GREATER( Functions.class, "greater", Comparable.class, Comparable.class ), BIT_AND( Functions.class, "bitAnd", long.class, long.class ), BIT_OR( Functions.class, "bitOr", long.class, long.class ), - MODIFIABLE_TABLE_GET_MODIFIABLE_COLLECTION( ModifiableEntity.class, "getModifiableCollection" ), SCANNABLE_TABLE_SCAN( ScannableEntity.class, "scan", DataContext.class ), STRING_TO_BOOLEAN( Functions.class, "toBoolean", String.class ), INTERNAL_TO_DATE( Functions.class, "internalToDate", int.class ), diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index 80a4a057f7..769c82eef6 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -65,6 +65,7 @@ import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptEntity; @@ -347,12 +348,6 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { } - @Override - public Collection getModifiableCollection() { - return null; - } - - @Override public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { return null; @@ -549,6 +544,12 @@ public CatalogEntity getCatalogEntity() { } + @Override + public CatalogPartitionPlacement getPartitionPlacement() { + return null; + } + + public void addColumn( String name, AlgDataType type ) { addColumn( name, type, false ); } @@ -698,6 +699,18 @@ public Long getId() { } + @Override + public Long getPartitionId() { + throw new RuntimeException( "Method getTableId is not implemented." ); + } + + + @Override + public Long getAdapterId() { + return null; + } + + @Override public boolean isRolledUp( String column ) { return table.rolledUpColumns.contains( column ); diff --git a/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java b/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java index 317611e0af..b654c4cb59 100644 --- a/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java +++ b/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java @@ -121,6 +121,7 @@ private static class PkClusteredEntity extends AbstractEntity implements Scannab PkClusteredEntity( Function dataTypeBuilder, ImmutableBitSet pkColumns, List data ) { + super( null, null, null ); this.data = data; this.typeBuilder = dataTypeBuilder; this.pkColumns = pkColumns; @@ -133,7 +134,7 @@ public Statistic getStatistic() { for ( Integer key : pkColumns ) { collationFields.add( new AlgFieldCollation( key, AlgFieldCollation.Direction.ASCENDING, AlgFieldCollation.NullDirection.LAST ) ); } - return Statistics.of( Double.valueOf( data.size() ), ImmutableList.of( pkColumns ), ImmutableList.of( AlgCollations.of( collationFields ) ) ); + return Statistics.of( (double) data.size(), ImmutableList.of( pkColumns ), ImmutableList.of( AlgCollations.of( collationFields ) ) ); } diff --git a/core/src/test/java/org/polypheny/db/test/JdbcTest.java b/core/src/test/java/org/polypheny/db/test/JdbcTest.java index dad4810614..5d7b1f7740 100644 --- a/core/src/test/java/org/polypheny/db/test/JdbcTest.java +++ b/core/src/test/java/org/polypheny/db/test/JdbcTest.java @@ -40,7 +40,7 @@ public class JdbcTest { public abstract static class AbstractModifiableEntity extends AbstractEntity implements ModifiableEntity { protected AbstractModifiableEntity( String tableName ) { - super(); + super( null, null, null ); } diff --git a/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java b/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java index aa61575d75..b71655cd28 100644 --- a/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java +++ b/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java @@ -436,6 +436,11 @@ private static Integer getFilter( boolean cooperative, List filters ) { */ public static class SimpleEntity extends AbstractEntity implements ScannableEntity { + protected SimpleEntity() { + super( null, null, null ); + } + + @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.builder().add( "i", null, PolyType.INTEGER ).build(); @@ -444,7 +449,7 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Override public Enumerable scan( DataContext root ) { - return new AbstractEnumerable() { + return new AbstractEnumerable<>() { @Override public Enumerator enumerator() { return tens(); @@ -460,6 +465,11 @@ public Enumerator enumerator() { */ public static class BeatlesEntity extends AbstractEntity implements ScannableEntity { + protected BeatlesEntity() { + super( null, null, null ); + } + + @Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.builder() @@ -492,6 +502,7 @@ public static class BeatlesFilterableEntity extends AbstractEntity implements Fi public BeatlesFilterableEntity( StringBuilder buf, boolean cooperative ) { + super( null, null, null ); this.buf = buf; this.cooperative = cooperative; } @@ -531,6 +542,7 @@ public static class BeatlesProjectableFilterableEntity extends AbstractEntity im public BeatlesProjectableFilterableEntity( StringBuilder buf, boolean cooperative ) { + super( null, null, null ); this.buf = buf; this.cooperative = cooperative; } diff --git a/dbms/src/main/java/org/polypheny/db/processing/VolcanoQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/VolcanoQueryProcessor.java index d1d03881b7..9d8ddb016d 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/VolcanoQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/VolcanoQueryProcessor.java @@ -92,7 +92,6 @@ public class VolcanoQueryProcessor extends AbstractQueryProcessor { EnumerableRules.ENUMERABLE_MODIFY_COLLECT_RULE, EnumerableRules.ENUMERABLE_INTERSECT_RULE, EnumerableRules.ENUMERABLE_MINUS_RULE, - EnumerableRules.ENUMERABLE_TABLE_MODIFICATION_RULE, EnumerableRules.ENUMERABLE_TABLE_MODIFY_TO_STREAMER_RULE, EnumerableRules.ENUMERABLE_STREAMER_RULE, EnumerableRules.ENUMERABLE_CONTEXT_SWITCHER_RULE, diff --git a/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java index b93a475477..69a20aadfd 100644 --- a/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ b/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -346,7 +346,7 @@ private void buildPhysicalTables( AbstractPolyphenyDbSchema polyphenyDbSchema, S Catalog.getInstance().getColumnPlacementsOnAdapterSortedByPhysicalPosition( adapter.getAdapterId(), catalogTable.id ), partitionPlacement ); - physicalTables.put( catalog.getTable( tableId ).name + "_" + partitionPlacement.partitionId, entity ); + physicalTables.put( catalogTable.name + "_" + partitionPlacement.partitionId, entity ); rootSchema.add( schemaName, s, catalogSchema.namespaceType ); physicalTables.forEach( rootSchema.getSubNamespace( schemaName )::add ); diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTable.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTable.java index 73ab828340..c47575e828 100644 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTable.java +++ b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTable.java @@ -292,10 +292,6 @@ public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet } - @Override - public Collection getModifiableCollection() { - throw new RuntimeException( "getModifiableCollection() is not implemented for Cassandra Tables!" ); - } @Override diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java index 3693589e5f..1c25b9225d 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java @@ -78,8 +78,9 @@ protected CottontailEntity( String physicalSchemaName, String physicalTableName, List physicalColumnNames, - Long tableId ) { - super( Object[].class ); + Long tableId, + long partitionId, long adapterId ) { + super( Object[].class, tableId, partitionId, adapterId ); this.cottontailSchema = cottontailSchema; this.protoRowType = protoRowType; @@ -88,7 +89,6 @@ protected CottontailEntity( this.physicalSchemaName = physicalSchemaName; this.physicalTableName = physicalTableName; this.physicalColumnNames = physicalColumnNames; - this.id = tableId; this.entity = EntityName.newBuilder() .setName( this.physicalTableName ) @@ -108,12 +108,6 @@ public String toString() { } - @Override - public Collection getModifiableCollection() { - throw new RuntimeException( "getModifiableCollection() is not implemented for Cottontail adapter!" ); - } - - @Override public Modify toModificationAlg( AlgOptCluster cluster, @@ -144,22 +138,6 @@ public Queryable asQueryable( DataContext dataContext, SchemaPlus sche } - /*@Override - public Enumerable scan( DataContext root ) { - Query query = Query.newBuilder() - .setFrom( From.newBuilder().setEntity( this.entity ).build() ) - .build(); - QueryMessage queryMessage = QueryMessage.newBuilder().setQuery( query ).build(); - - - return new CottontailQueryEnumerable<>( - this.cottontailSchema.getWrapper().query( queryMessage ), - new CottontailQueryEnumerable.RowTypeParser( - this.getRowType( root.getTypeFactory() ), - this.physicalColumnNames ) ); - }*/ - - @Override public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { return new CottontailScan( context.getCluster(), algOptEntity, this, traitSet, this.cottontailSchema.getConvention() ); diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java index e6cd075c80..540b8cfdf2 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java @@ -218,20 +218,14 @@ public Entity createTableSchema( CatalogTable combinedTable, List logicalColumnNames = new LinkedList<>(); List physicalColumnNames = new LinkedList<>(); - String physicalSchemaName = null; - String physicalTableName = null; Long tableId = combinedTable.id; - if ( physicalSchemaName == null ) { - physicalSchemaName = partitionPlacement.physicalTableName != null - ? partitionPlacement.physicalSchemaName - : this.dbName; - } - if ( physicalTableName == null ) { - physicalTableName = partitionPlacement.physicalTableName != null - ? partitionPlacement.physicalTableName - : CottontailNameUtil.createPhysicalTableName( combinedTable.id, partitionPlacement.partitionId ); - } + String physicalSchemaName = partitionPlacement.physicalTableName != null + ? partitionPlacement.physicalSchemaName + : this.dbName; + String physicalTableName = partitionPlacement.physicalTableName != null + ? partitionPlacement.physicalTableName + : CottontailNameUtil.createPhysicalTableName( combinedTable.id, partitionPlacement.partitionId ); for ( CatalogColumnPlacement placement : columnPlacementsOnStore ) { CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); @@ -244,7 +238,7 @@ public Entity createTableSchema( CatalogTable combinedTable, List fieldTypes, int[] fields, CsvSource csvSource, Long tableId ) { + super( tableId, null, null ); this.source = source; this.protoRowType = protoRowType; this.fieldTypes = fieldTypes; this.fields = fields; this.csvSource = csvSource; - this.id = tableId; } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java index 1d682d3abc..bb13a7bc13 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java @@ -102,6 +102,7 @@ public class DruidEntity extends AbstractEntity implements TranslatableEntity { * @param timestampFieldName Name of the column that contains the time */ public DruidEntity( DruidSchema schema, String dataSource, AlgProtoDataType protoRowType, Set metricFieldNames, String timestampFieldName, List intervals, Map> complexMetrics, Map allFields ) { + super( null, null, null ); this.timestampFieldName = Objects.requireNonNull( timestampFieldName ); this.schema = Objects.requireNonNull( schema ); this.dataSource = Objects.requireNonNull( dataSource ); diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java index 802e55dde9..a6e3efb43f 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java @@ -95,8 +95,8 @@ public class ElasticsearchEntity extends AbstractQueryableEntity implements Tran /** * Creates an ElasticsearchTable. */ - ElasticsearchEntity( ElasticsearchTransport transport ) { - super( Object[].class ); + ElasticsearchEntity( ElasticsearchTransport transport, Long id, Long partitionId, Long adapterId ) { + super( Object[].class, id, partitionId, adapterId ); this.transport = Objects.requireNonNull( transport, "transport" ); this.version = transport.version; this.indexName = transport.indexName; diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java index dcdce77455..e1a1b51574 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java @@ -103,12 +103,12 @@ public ElasticsearchSchema( long id, RestClient client, ObjectMapper mapper, Str this.fetchSize = fetchSize; if ( type == null ) { try { - this.tableMap = createTables( listTypesFromElastic() ); + this.tableMap = createTables( listTypesFromElastic(), null, null, null ); } catch ( IOException e ) { throw new UncheckedIOException( "Couldn't get types for " + index, e ); } } else { - this.tableMap = createTables( Collections.singleton( type ) ); + this.tableMap = createTables( Collections.singleton( type ), null, null, null ); } } @@ -120,11 +120,11 @@ protected Map getTableMap() { } - private Map createTables( Iterable types ) { + private Map createTables( Iterable types, Long id, Long partitionId, Long adapterId ) { final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( String type : types ) { final ElasticsearchTransport transport = new ElasticsearchTransport( client, mapper, index, type, fetchSize ); - builder.put( type, new ElasticsearchEntity( transport ) ); + builder.put( type, new ElasticsearchEntity( transport, id, partitionId, adapterId ) ); } return builder.build(); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java index c17a822b8c..1a1c0660cc 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java @@ -134,6 +134,7 @@ public Entity createFileTable( catalogTable.name + "_" + partitionPlacement.partitionId, catalogTable.id, partitionPlacement.partitionId, + getAdapterId(), columnIds, columnTypes, columnNames, diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java index 04d7e332f1..5b2e4231da 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java @@ -55,7 +55,7 @@ public class FileTranslatableEntity extends AbstractQueryableEntity implements T @Getter private final String tableName; @Getter - private final long partitionId; + private final Long partitionId; @Getter private final List columnNames; @Getter @@ -65,7 +65,7 @@ public class FileTranslatableEntity extends AbstractQueryableEntity implements T @Getter private final List pkIds; // Ids of the columns that are part of the primary key @Getter - private final int adapterId; + private final Long adapterId; @Getter private final FileSchema fileSchema; private final AlgProtoDataType protoRowType; @@ -76,18 +76,17 @@ public FileTranslatableEntity( final String tableName, final Long tableId, final long partitionId, - final List columnIds, + long adapterId, final List columnIds, final ArrayList columnTypes, final List columnNames, final List pkIds, final AlgProtoDataType protoRowType ) { - super( Object[].class ); + super( Object[].class, tableId, partitionId, adapterId ); this.fileSchema = fileSchema; this.rootDir = fileSchema.getRootDir(); this.tableName = tableName; - this.id = tableId; this.partitionId = partitionId; - this.adapterId = fileSchema.getAdapterId(); + this.adapterId = (long) fileSchema.getAdapterId(); this.pkIds = pkIds; this.protoRowType = protoRowType; @@ -116,13 +115,6 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { } - @Override - public Collection getModifiableCollection() { - throw new UnsupportedOperationException( "getModifiableCollection not implemented" ); - //return new ArrayList<>(); - } - - @Override public Modify toModificationAlg( AlgOptCluster cluster, diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java index 6ebec61d37..84fe4b5a68 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java @@ -137,6 +137,7 @@ public Entity createFileTable( CatalogTable catalogTable, List region ) { - super( Object[].class ); + GeodeEntity( Region region, Long id, Long partitionId, Long adapterId ) { + super( Object[].class, id, partitionId, adapterId ); this.regionName = region.getName(); this.rowType = GeodeUtils.autodetectRelTypeFromRegion( region ); } diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java index b647fc4528..ed9a78b588 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java @@ -75,7 +75,7 @@ protected Map getTableMap() { final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( String regionName : regionNames ) { Region region = GeodeUtils.createRegion( cache, regionName ); - Entity entity = new GeodeEntity( region ); + Entity entity = new GeodeEntity( region, null, null, null ); builder.put( regionName, entity ); } tableMap = builder.build(); diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java index fa0f943f19..d2650db1f0 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java @@ -58,7 +58,7 @@ public class GeodeSimpleScannableEntity extends AbstractEntity implements Scanna public GeodeSimpleScannableEntity( String regionName, AlgDataType algDataType, ClientCache clientCache ) { - super(); + super( null, null, null ); this.regionName = regionName; this.clientCache = clientCache; this.algDataType = algDataType; diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java index 83761be7ec..33dbda1f0b 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java @@ -73,8 +73,8 @@ class HtmlEntity extends AbstractQueryableEntity implements TranslatableEntity { /** * Creates a HtmlTable. */ - private HtmlEntity( Source source, String selector, Integer index, AlgProtoDataType protoRowType, List> fieldConfigs ) throws Exception { - super( Object[].class ); + private HtmlEntity( Source source, String selector, Integer index, AlgProtoDataType protoRowType, List> fieldConfigs, Long id, Long partitionId, Long adapterId ) throws Exception { + super( Object[].class, id, partitionId, adapterId ); this.protoRowType = protoRowType; this.reader = new HtmlReader( source, selector, index ); @@ -89,7 +89,7 @@ static HtmlEntity create( Source source, Map tableDef ) throws E @SuppressWarnings("unchecked") List> fieldConfigs = (List>) tableDef.get( "fields" ); String selector = (String) tableDef.get( "selector" ); Integer index = (Integer) tableDef.get( "index" ); - return new HtmlEntity( source, selector, index, null, fieldConfigs ); + return new HtmlEntity( source, selector, index, null, fieldConfigs, null, null, null ); } diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonEntity.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonEntity.java index 4a1eb98a9f..adea2284ec 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonEntity.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/JsonEntity.java @@ -41,6 +41,7 @@ public class JsonEntity extends AbstractEntity implements ScannableEntity { * Creates a JsonTable. */ public JsonEntity( Source source ) { + super( null, null, null ); this.source = source; } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 46e2195bd9..20b1f738db 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -36,6 +36,7 @@ import com.google.common.collect.Lists; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Objects; import org.apache.calcite.avatica.ColumnMetaData; @@ -112,8 +113,10 @@ public JdbcEntity( String physicalSchemaName, String physicalTableName, List physicalColumnNames, - Long tableId ) { - super( Object[].class ); + long tableId, + long partitionId, + long adapterId ) { + super( Object[].class, tableId, partitionId, adapterId ); this.jdbcSchema = jdbcSchema; this.logicalColumnNames = logicalColumnNames; this.physicalSchemaName = physicalSchemaName; @@ -121,7 +124,6 @@ public JdbcEntity( this.physicalColumnNames = physicalColumnNames; this.jdbcTableType = Objects.requireNonNull( jdbcTableType ); this.protoRowType = protoRowType; - this.id = tableId; } @@ -146,7 +148,7 @@ private List> fieldClasses( final JavaTypeFact final AlgDataType rowType = protoRowType.apply( typeFactory ); return Lists.transform( rowType.getFieldList(), f -> { final AlgDataType type = f.getType(); - final Class clazz = (Class) typeFactory.getJavaClass( type ); + final Class clazz = (Class) typeFactory.getJavaClass( type ); final ColumnMetaData.Rep rep = Util.first( ColumnMetaData.Rep.of( clazz ), ColumnMetaData.Rep.OBJECT ); return Pair.of( rep, type.getPolyType().getJdbcOrdinal() ); } ); @@ -158,7 +160,7 @@ SqlString generateSql() { for ( String str : physicalColumnNames ) { pcnl.add( new SqlIdentifier( Arrays.asList( physicalTableName, str ), ParserPos.ZERO ) ); } - //final SqlNodeList selectList = new SqlNodeList( Collections.singletonList( SqlIdentifier.star( SqlParserPos.ZERO ) ), SqlParserPos.ZERO ); + final SqlNodeList selectList = new SqlNodeList( pcnl, ParserPos.ZERO ); SqlIdentifier physicalTableName = new SqlIdentifier( Arrays.asList( physicalSchemaName, this.physicalTableName ), ParserPos.ZERO ); SqlSelect node = new SqlSelect( @@ -186,7 +188,7 @@ public SqlIdentifier physicalTableName() { public SqlIdentifier physicalColumnName( String logicalColumnName ) { String physicalName = physicalColumnNames.get( logicalColumnNames.indexOf( logicalColumnName ) ); - return new SqlIdentifier( Arrays.asList( physicalName ), ParserPos.ZERO ); + return new SqlIdentifier( Collections.singletonList( physicalName ), ParserPos.ZERO ); } @@ -232,12 +234,6 @@ public Enumerable scan( DataContext root ) { } - @Override - public Collection getModifiableCollection() { - throw new RuntimeException( "getModifiableCollection() is not implemented for JDBC adapter!" ); - } - - @Override public Modify toModificationAlg( AlgOptCluster cluster, diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java index 844c32d2f2..d2721e8a05 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java @@ -34,10 +34,12 @@ package org.polypheny.db.adapter.jdbc; import com.google.common.collect.ImmutableList; +import java.util.Arrays; import java.util.List; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.adapter.jdbc.rel2sql.AlgToSqlConverter; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlIdentifier; @@ -73,24 +75,8 @@ public Result implement( AlgNode node ) { @Override - public SqlIdentifier getPhysicalTableName( List tableNames ) { - JdbcEntity table; - if ( tableNames.size() == 1 ) { - // only table name - // NOTICE MV: I think, this case should no longer happen because there should always be a schema in the form - // __ be set. - // TODO MV: Consider removing this case - table = schema.getTableMap().get( tableNames.get( 0 ) ); - } else if ( tableNames.size() == 2 ) { - // schema name and table name - table = schema.getTableMap().get( tableNames.get( 1 ) ); - } else { - throw new RuntimeException( "Unexpected number of names: " + tableNames.size() ); - } - if ( table == null ) { - throw new RuntimeException( "Unknown table: [ " + String.join( ", ", tableNames ) + " ] | Table Map : [ " + String.join( ", ", schema.getTableMap().keySet() ) ); - } - return table.physicalTableName(); + public SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement placement ) { + return new SqlIdentifier( Arrays.asList( placement.physicalSchemaName, placement.physicalTableName ), ParserPos.ZERO ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index caebbd5645..1662df9252 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -176,7 +176,9 @@ public JdbcEntity createJdbcTable( physicalSchemaName, partitionPlacement.physicalTableName, physicalColumnNames, - catalogTable.id + catalogTable.id, + partitionPlacement.partitionId, + adapter.getAdapterId() ); tableMap.put( catalogTable.name + "_" + partitionPlacement.partitionId, table ); physicalToLogicalTableNameMap.put( partitionPlacement.physicalTableName, catalogTable.name ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java index d9bc0b0359..5c1a406f8e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java @@ -69,6 +69,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; @@ -452,8 +453,7 @@ public Result visit( Modify modify ) { final Context context = aliasContext( pairs, false ); // Target Table Name - //final SqlIdentifier sqlTargetTable = new SqlIdentifier( modify.getTable().getQualifiedName(), POS ); - final SqlIdentifier sqlTargetTable = getPhysicalTableName( List.of( modify.getTable().getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), modify.getTable().getCatalogEntity().name ) ); + final SqlIdentifier sqlTargetTable = getPhysicalTableName( modify.getTable().getPartitionPlacement() ); switch ( modify.getOperation() ) { case INSERT: { @@ -660,7 +660,7 @@ private void parseCorrelTable( AlgNode algNode, Result x ) { } - public abstract SqlIdentifier getPhysicalTableName( List tableName ); + public abstract SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement tableName ); public abstract SqlIdentifier getPhysicalColumnName( List tableName, String columnName ); @@ -694,8 +694,8 @@ public PlainAlgToSqlConverter( SqlDialect dialect ) { @Override - public SqlIdentifier getPhysicalTableName( List tableNames ) { - return new SqlIdentifier( tableNames, POS ); + public SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement placement ) { + return new SqlIdentifier( placement.physicalTableName, POS ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index 5e9c6509e6..3e9d265e70 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -123,14 +123,14 @@ public class MongoEntity extends AbstractQueryableEntity implements Translatable @Getter private final TransactionProvider transactionProvider; @Getter - private final int storeId; + private final long storeId; /** * Creates a MongoTable. */ - MongoEntity( CatalogTable catalogTable, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, int storeId, CatalogPartitionPlacement partitionPlacement ) { - super( Object[].class ); + MongoEntity( CatalogTable catalogTable, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long storeId, CatalogPartitionPlacement partitionPlacement ) { + super( Object[].class, catalogTable.id, partitionPlacement.partitionId, storeId ); this.collectionName = MongoStore.getPhysicalTableName( catalogTable.id, partitionPlacement.partitionId ); this.transactionProvider = transactionProvider; this.catalogTable = catalogTable; @@ -139,12 +139,11 @@ public class MongoEntity extends AbstractQueryableEntity implements Translatable this.mongoSchema = schema; this.collection = schema.database.getCollection( collectionName ); this.storeId = storeId; - this.id = catalogTable.id; } - public MongoEntity( CatalogCollection catalogEntity, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, int adapter, CatalogCollectionPlacement partitionPlacement ) { - super( Object[].class ); + public MongoEntity( CatalogCollection catalogEntity, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long adapter, CatalogCollectionPlacement partitionPlacement ) { + super( Object[].class, catalogEntity.id, partitionPlacement.id, adapter ); this.collectionName = MongoStore.getPhysicalTableName( catalogEntity.id, partitionPlacement.id ); this.transactionProvider = transactionProvider; this.catalogTable = null; @@ -153,7 +152,6 @@ public MongoEntity( CatalogCollection catalogEntity, MongoSchema schema, AlgProt this.mongoSchema = schema; this.collection = schema.database.getCollection( collectionName ); this.storeId = adapter; - this.id = catalogEntity.id; } @@ -234,7 +232,6 @@ private Enumerable aggregate( List> arrayFields, final List operations, Map parameterValues, - //BsonDocument filter, List preOps, List logicalCols ) { final List list = new ArrayList<>(); @@ -272,7 +269,7 @@ private Enumerable aggregate( if ( list.isEmpty() ) { list.add( new BsonDocument( "$match", new BsonDocument() ) ); } - //list.forEach( el -> System.out.println( el.toBsonDocument().toJson( JsonWriterSettings.builder().outputMode( JsonMode.SHELL ).build() ) ) ); + return new AbstractEnumerable<>() { @Override public Enumerator enumerator() { @@ -317,12 +314,6 @@ private static Integer parseIntString( String valueString ) { } - @Override - public Collection getModifiableCollection() { - throw new RuntimeException( "getModifiableCollection() is not implemented for MongoDB adapter!" ); - } - - @Override public Modify toModificationAlg( AlgOptCluster cluster, @@ -399,7 +390,7 @@ private MongoEntity getTable() { @SuppressWarnings("UnusedDeclaration") public Enumerable aggregate( List> fields, List> arrayClass, List operations, List preProjections, List logicalCols ) { ClientSession session = getTable().getTransactionProvider().getSession( dataContext.getStatement().getTransaction().getXid() ); - dataContext.getStatement().getTransaction().registerInvolvedAdapter( AdapterManager.getInstance().getStore( this.getTable().getStoreId() ) ); + dataContext.getStatement().getTransaction().registerInvolvedAdapter( AdapterManager.getInstance().getStore( (int) this.getTable().getStoreId() ) ); Map values = new HashMap<>(); if ( dataContext.getParameterValues().size() == 1 ) { @@ -447,7 +438,7 @@ public Enumerable find( String filterJson, String projectJson, List handleDirectDML( Operation operation, String filter, List operations, boolean onlyOne, boolean needsDocument ) { MongoEntity mongoEntity = getTable(); PolyXid xid = dataContext.getStatement().getTransaction().getXid(); - dataContext.getStatement().getTransaction().registerInvolvedAdapter( AdapterManager.getInstance().getStore( mongoEntity.getStoreId() ) ); + dataContext.getStatement().getTransaction().registerInvolvedAdapter( AdapterManager.getInstance().getStore( (int) mongoEntity.getStoreId() ) ); GridFSBucket bucket = mongoEntity.getMongoSchema().getBucket(); try { diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 794d70c3d2..21c217d356 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -70,7 +70,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.languages.QueryParameters; @@ -343,7 +342,6 @@ private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaNam final Builder fieldInfo = typeFactory.builder(); fieldInfo.add( new AlgDataTypeFieldImpl( "d", 0, typeFactory.createPolyType( PolyType.DOCUMENT ) ) ); AlgDataType rowType = fieldInfo.build(); - CatalogTable catalogTable = Catalog.getInstance().getTable( table.getTable().getId() ); return AlgOptEntityImpl.create( table.getRelOptSchema(), @@ -351,7 +349,8 @@ private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaNam new TableEntryImpl( catalogReader.getRootSchema(), names.get( names.size() - 1 ), new LogicalEntity( Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, new Pattern( dbSchemaName ) ).get( 0 ).id, names.get( 0 ), names.get( names.size() - 1 ), List.of(), List.of(), AlgDataTypeImpl.proto( rowType ), NamespaceType.GRAPH ) ), - catalogTable, + table.getCatalogEntity(), + table.getPartitionPlacement(), 1.0 ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java index 52edeb7df5..0b9a28e942 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java @@ -63,21 +63,13 @@ public class NeoEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity { public final String physicalEntityName; - public final long id; public final AlgProtoDataType rowType; - protected NeoEntity( String physicalEntityName, AlgProtoDataType proto, long id ) { - super( Object[].class ); + protected NeoEntity( String physicalEntityName, AlgProtoDataType proto, long id, long partitionId, long adapterId ) { + super( Object[].class, id, partitionId, adapterId ); this.physicalEntityName = physicalEntityName; this.rowType = proto; - this.id = id; - } - - - @Override - public Long getId() { - return id; } @@ -100,12 +92,6 @@ public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitS } - @Override - public Collection getModifiableCollection() { - throw new UnsupportedOperationException( "getModifiableCollection is not supported by the NEO4j adapter." ); - } - - /** * Creates an {@link org.polypheny.db.algebra.core.Modify} algebra object, which is modifies this relational entity. * diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java index ea797700a9..f132e242aa 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java @@ -84,7 +84,9 @@ public Entity createTable( CatalogTable combinedTable, List names, L path = path.plus( null, -1, entry.name, StructKind.NONE ); remainingNames = Util.skip( remainingNames ); final Entity entity = entry.getTable(); - final CatalogTable catalogTable = Catalog.getInstance().getTable( entity.getId() ); + ValidatorTable table2 = null; if ( entity instanceof Wrapper ) { table2 = ((Wrapper) entity).unwrap( PreparingEntity.class ); @@ -157,7 +155,7 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L if ( table2 == null ) { final AlgOptSchema algOptSchema = validator.catalogReader.unwrap( AlgOptSchema.class ); final AlgDataType rowType = entity.getRowType( validator.typeFactory ); - table2 = AlgOptEntityImpl.create( algOptSchema, rowType, entry, catalogTable, null ); + table2 = AlgOptEntityImpl.create( algOptSchema, rowType, entry, entity.getCatalogTable(), entity.getPartitionPlacement(), null ); } namespace = new TableNamespace( validator, table2 ); resolved.found( namespace, false, null, path, remainingNames ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java index 190f3a47f3..57e25c67ee 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java @@ -167,7 +167,7 @@ public interface SqlValidatorScope extends ValidatorScope { /** * Looks up a table in this scope from its name. If found, calls {@link Resolved#resolve(List, NameMatcher, boolean, Resolved)}. {@link TableNamespace} that wraps it. If the "table" is defined in a {@code WITH} clause it may be a query, not a table after all. * - * The name matcher is not null, and one typically uses {@link ValidatorCatalogReader#nameMatcher()}. + * The name matcher is not null, and one typically uses {@link ValidatorCatalogReader#nameMatcher}. * * @param names Name of table, may be qualified or fully-qualified * @param nameMatcher Name matcher diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 5d526fd885..093512c9b6 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -2163,7 +2163,7 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { final TranslatableEntity table = udf.getTable( typeFactory, callBinding.sqlOperands() ); final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getId() ); final AlgDataType rowType = table.getRowType( typeFactory ); - AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable ); + AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, null ); AlgNode converted = toAlg( algOptEntity ); bb.setRoot( converted, true ); return; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java index a6f792e59b..1a74ab3a7e 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java @@ -410,12 +410,6 @@ public Enumerable scan( DataContext root, List filters, int[] } - @Override - public Collection getModifiableCollection() { - throw new UnsupportedOperationException(); - } - - @Override public Modify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode child, Modify.Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { return LogicalModify.create( table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java index df516b2d99..e69de29bb2 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/Smalls.java @@ -1,860 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.sql; - - -import com.google.common.collect.ImmutableList; -import java.lang.reflect.Method; -import java.util.AbstractList; -import java.util.List; -import java.util.Locale; -import java.util.concurrent.atomic.AtomicInteger; -import org.apache.calcite.linq4j.AbstractEnumerable; -import org.apache.calcite.linq4j.BaseQueryable; -import org.apache.calcite.linq4j.Enumerable; -import org.apache.calcite.linq4j.Enumerator; -import org.apache.calcite.linq4j.Linq4j; -import org.apache.calcite.linq4j.Queryable; -import org.apache.calcite.linq4j.function.Deterministic; -import org.apache.calcite.linq4j.function.Parameter; -import org.apache.calcite.linq4j.function.SemiStrict; -import org.apache.calcite.linq4j.tree.Types; -import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.nodes.Call; -import org.polypheny.db.nodes.Node; -import org.polypheny.db.schema.QueryableEntity; -import org.polypheny.db.schema.ScannableEntity; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Statistic; -import org.polypheny.db.schema.Statistics; -import org.polypheny.db.schema.TableType; -import org.polypheny.db.schema.impl.AbstractEntity; -import org.polypheny.db.type.PolyType; - - -/** - * Holder for various classes and functions used in tests as user-defined functions and so forth. - */ -public class Smalls { - - public static final Method GENERATE_STRINGS_METHOD = Types.lookupMethod( Smalls.class, "generateStrings", Integer.class ); - public static final Method MAZE_METHOD = Types.lookupMethod( MazeEntity.class, "generate", int.class, int.class, int.class ); - public static final Method MAZE2_METHOD = Types.lookupMethod( MazeEntity.class, "generate2", int.class, int.class, Integer.class ); - public static final Method MAZE3_METHOD = Types.lookupMethod( MazeEntity.class, "generate3", String.class ); - public static final Method MULTIPLICATION_TABLE_METHOD = Types.lookupMethod( Smalls.class, "multiplicationTable", int.class, int.class, Integer.class ); - public static final Method FIBONACCI_TABLE_METHOD = Types.lookupMethod( Smalls.class, "fibonacciTable" ); - public static final Method FIBONACCI2_TABLE_METHOD = Types.lookupMethod( Smalls.class, "fibonacciTableWithLimit", long.class ); - public static final Method VIEW_METHOD = Types.lookupMethod( Smalls.class, "view", String.class ); - public static final Method STR_METHOD = Types.lookupMethod( Smalls.class, "str", Object.class, Object.class ); - public static final Method STRING_UNION_METHOD = Types.lookupMethod( Smalls.class, "stringUnion", Queryable.class, Queryable.class ); - public static final Method PROCESS_CURSOR_METHOD = Types.lookupMethod( Smalls.class, "processCursor", int.class, Enumerable.class ); - public static final Method PROCESS_CURSORS_METHOD = Types.lookupMethod( Smalls.class, "processCursors", int.class, Enumerable.class, Enumerable.class ); - - - private Smalls() { - } - - - private static QueryableEntity oneThreePlus( String s ) { - List items; - // Argument is null in case SQL contains function call with expression. Then the engine calls a function with null arguments to get getRowType. - if ( s == null ) { - items = ImmutableList.of(); - } else { - Integer latest = Integer.parseInt( s.substring( 1, s.length() - 1 ) ); - items = ImmutableList.of( 1, 3, latest ); - } - final Enumerable enumerable = Linq4j.asEnumerable( items ); - return new AbstractQueryableEntity( Integer.class ) { - @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - //noinspection unchecked - return (Queryable) enumerable.asQueryable(); - } - - - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return typeFactory.builder().add( "c", null, PolyType.INTEGER ).build(); - } - }; - } - - - public static Queryable stringUnion( Queryable q0, Queryable q1 ) { - return q0.concat( q1 ); - } - - - /** - * A function that generates a table that generates a sequence of {@link IntString} values. - */ - public static QueryableEntity generateStrings( final Integer count ) { - return new AbstractQueryableEntity( IntString.class ) { - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return typeFactory.createJavaType( IntString.class ); - } - - - @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - BaseQueryable queryable = - new BaseQueryable( null, IntString.class, null ) { - @Override - public Enumerator enumerator() { - return new Enumerator() { - static final String Z = "abcdefghijklm"; - - int i = 0; - int curI; - String curS; - - - @Override - public IntString current() { - return new IntString( curI, curS ); - } - - - @Override - public boolean moveNext() { - if ( i < count ) { - curI = i; - curS = Z.substring( 0, i % Z.length() ); - ++i; - return true; - } else { - return false; - } - } - - - @Override - public void reset() { - i = 0; - } - - - @Override - public void close() { - } - }; - } - }; - //noinspection unchecked - return (Queryable) queryable; - } - }; - } - - - /** - * A function that generates multiplication table of {@code ncol} columns x {@code nrow} rows. - */ - public static QueryableEntity multiplicationTable( final int ncol, final int nrow, Integer offset ) { - final int offs = offset == null ? 0 : offset; - return new AbstractQueryableEntity( Object[].class ) { - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - final AlgDataTypeFactory.Builder builder = typeFactory.builder(); - builder.add( "row_name", null, typeFactory.createJavaType( String.class ) ); - final AlgDataType int_ = typeFactory.createJavaType( int.class ); - for ( int i = 1; i <= ncol; i++ ) { - builder.add( "c" + i, null, int_ ); - } - return builder.build(); - } - - - @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - final List table = new AbstractList() { - @Override - public Object[] get( int index ) { - Object[] cur = new Object[ncol + 1]; - cur[0] = "row " + index; - for ( int j = 1; j <= ncol; j++ ) { - cur[j] = j * (index + 1) + offs; - } - return cur; - } - - - @Override - public int size() { - return nrow; - } - }; - return Linq4j.asEnumerable( table ).asQueryable(); - } - }; - } - - - /** - * A function that generates the Fibonacci sequence. Interesting because it has one column and no arguments. - */ - public static ScannableEntity fibonacciTable() { - return fibonacciTableWithLimit( -1L ); - } - - - /** - * A function that generates the Fibonacci sequence. Interesting because it has one column and no arguments. - */ - public static ScannableEntity fibonacciTableWithLimit( final long limit ) { - return new ScannableEntity() { - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return typeFactory.builder().add( "N", null, PolyType.BIGINT ).build(); - } - - - @Override - public Enumerable scan( DataContext root ) { - return new AbstractEnumerable() { - @Override - public Enumerator enumerator() { - return new Enumerator() { - private long prev = 1; - private long current = 0; - - - @Override - public Object[] current() { - return new Object[]{ current }; - } - - - @Override - public boolean moveNext() { - final long next = current + prev; - if ( limit >= 0 && next > limit ) { - return false; - } - prev = current; - current = next; - return true; - } - - - @Override - public void reset() { - prev = 0; - current = 1; - } - - - @Override - public void close() { - } - }; - } - }; - } - - - @Override - public Statistic getStatistic() { - return Statistics.UNKNOWN; - } - - - @Override - public Long getId() { - return null; - } - - - @Override - public TableType getJdbcTableType() { - return TableType.TABLE; - } - - - @Override - public boolean isRolledUp( String column ) { - return false; - } - - - @Override - public boolean rolledUpColumnValidInsideAgg( String column, Call call, Node parent ) { - return true; - } - - }; - } - - - /** - * A function that adds a number to the first column of input cursor - */ - public static QueryableEntity processCursor( final int offset, final Enumerable a ) { - return new AbstractQueryableEntity( Object[].class ) { - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return typeFactory.builder() - .add( "result", null, PolyType.INTEGER ) - .build(); - } - - - @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - final Enumerable enumerable = a.select( a0 -> offset + ((Integer) a0[0]) ); - //noinspection unchecked - return (Queryable) enumerable.asQueryable(); - } - }; - } - - - /** - * A function that sums the second column of first input cursor, second column of first input and the given int. - */ - public static QueryableEntity processCursors( final int offset, final Enumerable a, final Enumerable b ) { - return new AbstractQueryableEntity( Object[].class ) { - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return typeFactory.builder() - .add( "result", null, PolyType.INTEGER ) - .build(); - } - - - @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - final Enumerable enumerable = a.zip( b, ( v0, v1 ) -> ((Integer) v0[1]) + v1.n + offset ); - //noinspection unchecked - return (Queryable) enumerable.asQueryable(); - } - }; - } - - - /** - * Class with int and String fields. - */ - public static class IntString { - - public final int n; - public final String s; - - - public IntString( int n, String s ) { - this.n = n; - this.s = s; - } - - - public String toString() { - return "{n=" + n + ", s=" + s + "}"; - } - - } - - - /** - * Example of a UDF with a non-static {@code eval} method, and named parameters. - */ - public static class MyPlusFunction { - - public static final AtomicInteger INSTANCE_COUNT = new AtomicInteger( 0 ); - - - // Note: Not marked @Deterministic - public MyPlusFunction() { - INSTANCE_COUNT.incrementAndGet(); - } - - - public int eval( @Parameter(name = "x") int x, @Parameter(name = "y") int y ) { - return x + y; - } - - } - - - /** - * As {@link MyPlusFunction} but declared to be deterministic. - */ - public static class MyDeterministicPlusFunction { - - public static final AtomicInteger INSTANCE_COUNT = new AtomicInteger( 0 ); - - - @Deterministic - public MyDeterministicPlusFunction() { - INSTANCE_COUNT.incrementAndGet(); - } - - - public int eval( @Parameter(name = "x") int x, @Parameter(name = "y") int y ) { - return x + y; - } - - } - - - /** - * Example of a UDF with named parameters. - */ - public static class MyLeftFunction { - - public String eval( @Parameter(name = "s") String s, @Parameter(name = "n") int n ) { - return s.substring( 0, n ); - } - - } - - - /** - * Example of a UDF with named parameters, some of them optional. - */ - public static class MyAbcdeFunction { - - public String eval( - @Parameter(name = "A", optional = false) Integer a, - @Parameter(name = "B", optional = true) Integer b, - @Parameter(name = "C", optional = false) Integer c, - @Parameter(name = "D", optional = true) Integer d, - @Parameter(name = "E", optional = true) Integer e ) { - return "{a: " + a + ", b: " + b + ", c: " + c + ", d: " + d + ", e: " + e + "}"; - } - - } - - - /** - * Example of a non-strict UDF. (Does something useful when passed NULL.) - */ - public static class MyToStringFunction { - - public static String eval( @Parameter(name = "o") Object o ) { - if ( o == null ) { - return ""; - } - return "<" + o.toString() + ">"; - } - - } - - - /** - * Example of a semi-strict UDF. (Returns null if its parameter is null or if its length is 4.) - */ - public static class Null4Function { - - @SemiStrict - public static String eval( @Parameter(name = "s") String s ) { - if ( s == null || s.length() == 4 ) { - return null; - } - return s; - } - - } - - - /** - * Example of a picky, semi-strict UDF. Throws {@link NullPointerException} if argument is null. - * Returns null if its argument's length is 8. - */ - public static class Null8Function { - - @SemiStrict - public static String eval( @Parameter(name = "s") String s ) { - if ( s.length() == 8 ) { - return null; - } - return s; - } - - } - - - /** - * Example of a UDF with a static {@code eval} method. Class is abstract, but code-generator should not need to instantiate it. - */ - public abstract static class MyDoubleFunction { - - private MyDoubleFunction() { - } - - - public static int eval( int x ) { - return x * 2; - } - - } - - - /** - * User-defined function with two arguments. - */ - public static class MyIncrement { - - public float eval( int x, int y ) { - return x + x * y / 100; - } - - } - - - /** - * Example of a UDF that has overloaded UDFs (same name, different args). - */ - public abstract static class CountArgs0Function { - - private CountArgs0Function() { - } - - - public static int eval() { - return 0; - } - - } - - - /** - * See {@link CountArgs0Function}. - */ - public abstract static class CountArgs1Function { - - private CountArgs1Function() { - } - - - public static int eval( int x ) { - return 1; - } - - } - - - /** - * See {@link CountArgs0Function}. - */ - public abstract static class CountArgs1NullableFunction { - - private CountArgs1NullableFunction() { - } - - - public static int eval( Short x ) { - return -1; - } - - } - - - /** - * See {@link CountArgs0Function}. - */ - public abstract static class CountArgs2Function { - - private CountArgs2Function() { - } - - - public static int eval( int x, int y ) { - return 2; - } - - } - - - - /** - * A generic interface for defining user defined aggregate functions - * - * @param accumulator type - * @param value type - * @param result type - */ - private interface MyGenericAggFunction { - - A init(); - - A add( A accumulator, V val ); - - A merge( A accumulator1, A accumulator2 ); - - R result( A accumulator ); - - } - - - /** - * The real MazeTable may be found in example/function. This is a cut-down version to support a test. - */ - public static class MazeEntity extends AbstractEntity implements ScannableEntity { - - private final String content; - - - public MazeEntity( String content ) { - this.content = content; - } - - - public static ScannableEntity generate( int width, int height, int seed ) { - return new MazeEntity( String.format( Locale.ROOT, "generate(w=%d, h=%d, s=%d)", width, height, seed ) ); - } - - - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return typeFactory.builder() - .add( "S", null, PolyType.VARCHAR, 12 ) - .build(); - } - - - @Override - public Enumerable scan( DataContext root ) { - Object[][] rows = { { "abcde" }, { "xyz" }, { content } }; - return Linq4j.asEnumerable( rows ); - } - - } - - - /** - * Table with a lot of columns. - */ - @SuppressWarnings("unused") - public static class WideProductSale { - - public final int prodId; - public final double sale0; - public final double sale1 = 10; - public final double sale2 = 10; - public final double sale3 = 10; - public final double sale4 = 10; - public final double sale5 = 10; - public final double sale6 = 10; - public final double sale7 = 10; - public final double sale8 = 10; - public final double sale9 = 10; - public final double sale10 = 10; - public final double sale11 = 10; - public final double sale12 = 10; - public final double sale13 = 10; - public final double sale14 = 10; - public final double sale15 = 10; - public final double sale16 = 10; - public final double sale17 = 10; - public final double sale18 = 10; - public final double sale19 = 10; - public final double sale20 = 10; - public final double sale21 = 10; - public final double sale22 = 10; - public final double sale23 = 10; - public final double sale24 = 10; - public final double sale25 = 10; - public final double sale26 = 10; - public final double sale27 = 10; - public final double sale28 = 10; - public final double sale29 = 10; - public final double sale30 = 10; - public final double sale31 = 10; - public final double sale32 = 10; - public final double sale33 = 10; - public final double sale34 = 10; - public final double sale35 = 10; - public final double sale36 = 10; - public final double sale37 = 10; - public final double sale38 = 10; - public final double sale39 = 10; - public final double sale40 = 10; - public final double sale41 = 10; - public final double sale42 = 10; - public final double sale43 = 10; - public final double sale44 = 10; - public final double sale45 = 10; - public final double sale46 = 10; - public final double sale47 = 10; - public final double sale48 = 10; - public final double sale49 = 10; - public final double sale50 = 10; - public final double sale51 = 10; - public final double sale52 = 10; - public final double sale53 = 10; - public final double sale54 = 10; - public final double sale55 = 10; - public final double sale56 = 10; - public final double sale57 = 10; - public final double sale58 = 10; - public final double sale59 = 10; - public final double sale60 = 10; - public final double sale61 = 10; - public final double sale62 = 10; - public final double sale63 = 10; - public final double sale64 = 10; - public final double sale65 = 10; - public final double sale66 = 10; - public final double sale67 = 10; - public final double sale68 = 10; - public final double sale69 = 10; - public final double sale70 = 10; - public final double sale71 = 10; - public final double sale72 = 10; - public final double sale73 = 10; - public final double sale74 = 10; - public final double sale75 = 10; - public final double sale76 = 10; - public final double sale77 = 10; - public final double sale78 = 10; - public final double sale79 = 10; - public final double sale80 = 10; - public final double sale81 = 10; - public final double sale82 = 10; - public final double sale83 = 10; - public final double sale84 = 10; - public final double sale85 = 10; - public final double sale86 = 10; - public final double sale87 = 10; - public final double sale88 = 10; - public final double sale89 = 10; - public final double sale90 = 10; - public final double sale91 = 10; - public final double sale92 = 10; - public final double sale93 = 10; - public final double sale94 = 10; - public final double sale95 = 10; - public final double sale96 = 10; - public final double sale97 = 10; - public final double sale98 = 10; - public final double sale99 = 10; - public final double sale100 = 10; - public final double sale101 = 10; - public final double sale102 = 10; - public final double sale103 = 10; - public final double sale104 = 10; - public final double sale105 = 10; - public final double sale106 = 10; - public final double sale107 = 10; - public final double sale108 = 10; - public final double sale109 = 10; - public final double sale110 = 10; - public final double sale111 = 10; - public final double sale112 = 10; - public final double sale113 = 10; - public final double sale114 = 10; - public final double sale115 = 10; - public final double sale116 = 10; - public final double sale117 = 10; - public final double sale118 = 10; - public final double sale119 = 10; - public final double sale120 = 10; - public final double sale121 = 10; - public final double sale122 = 10; - public final double sale123 = 10; - public final double sale124 = 10; - public final double sale125 = 10; - public final double sale126 = 10; - public final double sale127 = 10; - public final double sale128 = 10; - public final double sale129 = 10; - public final double sale130 = 10; - public final double sale131 = 10; - public final double sale132 = 10; - public final double sale133 = 10; - public final double sale134 = 10; - public final double sale135 = 10; - public final double sale136 = 10; - public final double sale137 = 10; - public final double sale138 = 10; - public final double sale139 = 10; - public final double sale140 = 10; - public final double sale141 = 10; - public final double sale142 = 10; - public final double sale143 = 10; - public final double sale144 = 10; - public final double sale145 = 10; - public final double sale146 = 10; - public final double sale147 = 10; - public final double sale148 = 10; - public final double sale149 = 10; - public final double sale150 = 10; - public final double sale151 = 10; - public final double sale152 = 10; - public final double sale153 = 10; - public final double sale154 = 10; - public final double sale155 = 10; - public final double sale156 = 10; - public final double sale157 = 10; - public final double sale158 = 10; - public final double sale159 = 10; - public final double sale160 = 10; - public final double sale161 = 10; - public final double sale162 = 10; - public final double sale163 = 10; - public final double sale164 = 10; - public final double sale165 = 10; - public final double sale166 = 10; - public final double sale167 = 10; - public final double sale168 = 10; - public final double sale169 = 10; - public final double sale170 = 10; - public final double sale171 = 10; - public final double sale172 = 10; - public final double sale173 = 10; - public final double sale174 = 10; - public final double sale175 = 10; - public final double sale176 = 10; - public final double sale177 = 10; - public final double sale178 = 10; - public final double sale179 = 10; - public final double sale180 = 10; - public final double sale181 = 10; - public final double sale182 = 10; - public final double sale183 = 10; - public final double sale184 = 10; - public final double sale185 = 10; - public final double sale186 = 10; - public final double sale187 = 10; - public final double sale188 = 10; - public final double sale189 = 10; - public final double sale190 = 10; - public final double sale191 = 10; - public final double sale192 = 10; - public final double sale193 = 10; - public final double sale194 = 10; - public final double sale195 = 10; - public final double sale196 = 10; - public final double sale197 = 10; - public final double sale198 = 10; - public final double sale199 = 10; - - - public WideProductSale( int prodId, double sale ) { - this.prodId = prodId; - this.sale0 = sale; - } - - } - -} - From 605d7759d56f117d02c5fa0f6486714c6526b54f Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 22 Feb 2023 22:50:48 +0100 Subject: [PATCH 018/436] fixing missing names --- .../org/polypheny/db/PolyImplementation.java | 19 +++++++++++----- .../enumerable/EnumerableScanRule.java | 2 +- .../polypheny/db/algebra/AbstractAlgNode.java | 2 +- .../org/polypheny/db/algebra/AlgNode.java | 2 +- .../algebra/AlgStructuredTypeFlattener.java | 6 ++--- .../algebra/UnsupportedFromInsertShuttle.java | 5 ++--- .../org/polypheny/db/algebra/core/Modify.java | 6 +++++ .../org/polypheny/db/algebra/core/Scan.java | 2 +- .../db/algebra/core/document/DocumentAlg.java | 2 +- .../algebra/core/document/DocumentModify.java | 6 +++++ .../common/LogicalConstraintEnforcer.java | 8 +++---- .../logical/common/LogicalStreamer.java | 14 ++++++------ .../db/algebra/metadata/AlgMdCollation.java | 2 +- .../algebra/metadata/AlgMdColumnOrigins.java | 2 +- .../metadata/AlgMdColumnUniqueness.java | 2 +- .../algebra/metadata/AlgMdDistribution.java | 2 +- .../metadata/AlgMdExpressionLineage.java | 2 +- .../metadata/AlgMdTableReferences.java | 2 +- .../db/algebra/mutable/MutableAlgs.java | 2 +- .../db/algebra/mutable/MutableScan.java | 2 +- .../db/algebra/rules/FilterScanRule.java | 6 ++--- .../db/algebra/rules/ProjectScanRule.java | 6 ++--- .../polypheny/db/algebra/rules/ScanRule.java | 4 ++-- .../db/algebra/stream/StreamRules.java | 4 ++-- .../db/algebra/type/DocumentType.java | 18 +++++++-------- .../polypheny/db/interpreter/Bindables.java | 2 +- .../polypheny/db/interpreter/ScanNode.java | 10 ++++----- .../org/polypheny/db/plan/AlgOptEntity.java | 2 +- .../org/polypheny/db/plan/AlgOptUtil.java | 2 +- .../db/prepare/PolyphenyDbCatalogReader.java | 4 ++-- .../org/polypheny/db/prepare/Prepare.java | 2 +- .../db/prepare/QueryableAlgBuilder.java | 2 +- .../db/processing/DeepCopyShuttle.java | 2 +- .../processing/LogicalAlgAnalyzeShuttle.java | 14 ++++++------ .../java/org/polypheny/db/schema/Entity.java | 14 +++++++++--- .../polypheny/db/schema/LogicalEntity.java | 6 ++--- .../org/polypheny/db/tools/AlgBuilder.java | 2 +- .../db/view/MaterializedViewManager.java | 4 ++-- .../org/polypheny/db/view/ViewManager.java | 4 ++-- .../org/polypheny/db/ddl/DdlManagerImpl.java | 6 ++--- .../db/processing/AbstractQueryProcessor.java | 10 ++++----- .../processing/ConstraintEnforceAttacher.java | 4 ++-- .../db/processing/DataMigratorImpl.java | 2 +- .../shuttles/QueryParameterizer.java | 4 ++-- .../db/routing/routers/AbstractDqlRouter.java | 4 ++-- .../db/routing/routers/CachedPlanRouter.java | 4 ++-- .../db/routing/routers/DmlRouterImpl.java | 12 +++++----- .../db/transaction/EntityAccessMap.java | 12 +++++++--- .../db/view/MaterializedViewManagerImpl.java | 2 +- .../algebra/CottontailTableModify.java | 2 +- .../CottontailTableModificationRule.java | 10 ++++----- .../db/adapter/csv/CsvProjectScanRule.java | 5 ++--- .../db/adapter/druid/DruidQuery.java | 10 ++++----- .../db/adapter/druid/DruidRules.java | 4 ++-- .../org/polypheny/db/test/DruidAdapterIT.java | 2 +- .../polypheny/db/test/DruidAdapterIT2.java | 2 +- .../db/adapter/file/algebra/FileRules.java | 8 +++---- .../adapter/file/algebra/FileTableModify.java | 4 ++-- .../db/adapter/jdbc/JdbcImplementor.java | 22 +++++++------------ .../polypheny/db/adapter/jdbc/JdbcRules.java | 10 ++++----- .../jdbc/rel2sql/AlgToSqlConverter.java | 16 +++++++------- .../adapter/jdbc/rel2sql/SqlImplementor.java | 6 ++--- .../db/adapter/jdbc/rel2sql/PlannerTest.java | 2 +- .../org/polypheny/db/test/CatalogTest.java | 2 +- .../db/adapter/mongodb/MongoAlg.java | 5 +++-- .../db/adapter/mongodb/MongoRules.java | 18 +++++++-------- .../languages/mql2alg/MqlToAlgConverter.java | 6 ++--- .../neo4j/NeoRelationalImplementor.java | 2 +- .../db/adapter/neo4j/rules/NeoRules.java | 2 +- .../neo4j/rules/relational/NeoModify.java | 4 ++-- .../neo4j/rules/relational/NeoScan.java | 6 ++--- .../db/adapter/pig/PigAggregate.java | 4 ++-- .../org/polypheny/db/adapter/pig/PigAlg.java | 2 +- .../polypheny/db/adapter/pig/PigFilter.java | 4 ++-- .../org/polypheny/db/adapter/pig/PigJoin.java | 4 ++-- .../polypheny/db/adapter/pig/PigProject.java | 6 ++--- .../polypheny/db/adapter/pig/PigRules.java | 2 +- .../org/polypheny/db/adapter/pig/PigScan.java | 6 ++--- .../org/polypheny/db/tools/PigAlgBuilder.java | 2 +- .../db/sql/language/ddl/SqlCreateTable.java | 2 +- .../db/sql/language/validate/EmptyScope.java | 2 +- .../SqlToRelConverterExtendedTest.java | 2 +- .../polypheny/db/webui/crud/LanguageCrud.java | 2 +- 83 files changed, 236 insertions(+), 208 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/PolyImplementation.java b/core/src/main/java/org/polypheny/db/PolyImplementation.java index 136385c0d4..a7ff5fc30b 100644 --- a/core/src/main/java/org/polypheny/db/PolyImplementation.java +++ b/core/src/main/java/org/polypheny/db/PolyImplementation.java @@ -21,10 +21,12 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import javax.annotation.Nullable; @@ -60,7 +62,7 @@ public class PolyImplementation { public final AlgDataType rowType; private final long maxRowCount = -1; private final Kind kind; - private Bindable bindable; + private Bindable bindable; private final NamespaceType namespaceType; private final ExecutionTimeMonitor executionTimeMonitor; private CursorFactory cursorFactory; @@ -367,18 +369,25 @@ public static void addMonitoringInformation( Statement statement, String kind, i public List> getDocRows( Statement statement, boolean noLimit ) { - bindable = null; - if ( !Kind.DDL.contains( kind ) ) { + cursorFactory = CursorFactory.OBJECT; + Function> transformer; + if ( cursorFactory == CursorFactory.ARRAY ) { bindable = preparedResult.getBindable( CursorFactory.ARRAY ); + transformer = o -> Arrays.asList( (Object[]) o ); + } else if ( cursorFactory == CursorFactory.OBJECT || Kind.DML.contains( kind ) ) { + bindable = preparedResult.getBindable( CursorFactory.OBJECT ); + transformer = Collections::singletonList; + } else { + throw new RuntimeException( "Error for result format" ); } - Iterator iterator = createIterator( getBindable(), statement, true ); + Iterator iterator = createIterator( bindable, statement, true ); final Iterable iterable = () -> iterator; return StreamSupport .stream( iterable.spliterator(), false ) - .map( d -> Arrays.asList( (Object[]) d ) ) + .map( transformer ) .collect( Collectors.toList() ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java index 8c9fa61a00..a04cd654a0 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java @@ -64,7 +64,7 @@ public EnumerableScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public AlgNode convert( AlgNode alg ) { LogicalRelScan scan = (LogicalRelScan) alg; - final AlgOptEntity algOptEntity = scan.getTable(); + final AlgOptEntity algOptEntity = scan.getEntity(); final Entity entity = algOptEntity.unwrap( Entity.class ); if ( !EnumerableScan.canHandle( entity ) ) { return null; diff --git a/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java index bb9c1c81aa..ffbb9f17f2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java @@ -387,7 +387,7 @@ public final String getDescription() { @Override - public AlgOptEntity getTable() { + public AlgOptEntity getEntity() { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java index c902c57739..d2373ce61b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java @@ -259,7 +259,7 @@ public interface AlgNode extends AlgOptNode, Cloneable { * * @return If this relational expression represents an access to a table, returns that table, otherwise returns null */ - AlgOptEntity getTable(); + AlgOptEntity getEntity(); /** * Returns the name of this relational expression's class, sans package name, for use in explain. For example, for a diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java index b7912e7489..c9b2177309 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java @@ -449,7 +449,7 @@ public void rewriteAlg( LogicalDocumentSort alg ) { @SuppressWarnings("unused") public void rewriteAlg( LogicalDocumentScan scan ) { AlgNode alg = scan; - if ( !(scan.getTable() instanceof LogicalCollection) ) { + if ( !(scan.getEntity() instanceof LogicalCollection) ) { alg = scan.getCollection().toAlg( toAlgContext, scan.traitSet ); } setNewForOldRel( scan, alg ); @@ -529,7 +529,7 @@ public void rewriteAlg( LogicalLpgUnwind unwind ) { public void rewriteAlg( LogicalModify alg ) { LogicalModify newAlg = LogicalModify.create( - alg.getTable(), + alg.getEntity(), alg.getCatalogReader(), getNewForOldRel( alg.getInput() ), alg.getOperation(), @@ -864,7 +864,7 @@ private boolean isConstructor( RexNode rexNode ) { public void rewriteAlg( Scan alg ) { - AlgNode newAlg = alg.getTable().toAlg( toAlgContext, alg.traitSet ); + AlgNode newAlg = alg.getEntity().toAlg( toAlgContext, alg.traitSet ); if ( !PolyTypeUtil.isFlat( alg.getRowType() ) ) { final List> flattenedExpList = new ArrayList<>(); flattenInputs( diff --git a/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java b/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java index b3f37152ba..501b7bf9b4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java +++ b/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java @@ -19,7 +19,6 @@ import java.util.Objects; import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.core.Scan; -import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.plan.volcano.AlgSubset; public class UnsupportedFromInsertShuttle extends AlgShuttleImpl { @@ -34,7 +33,7 @@ private UnsupportedFromInsertShuttle( Long tableId ) { public static boolean contains( Modify modify ) { - long id = modify.getTable().getCatalogEntity().id; + long id = modify.getEntity().getCatalogEntity().id; UnsupportedFromInsertShuttle shuttle = new UnsupportedFromInsertShuttle( id ); modify.accept( shuttle ); return shuttle.containsOtherTableId; @@ -43,7 +42,7 @@ public static boolean contains( Modify modify ) { @Override public AlgNode visit( Scan scan ) { - if ( !Objects.equals( scan.getTable().getCatalogEntity().id, tableId ) ) { + if ( !Objects.equals( scan.getEntity().getCatalogEntity().id, tableId ) ) { containsOtherTableId = true; } return super.visit( scan ); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Modify.java b/core/src/main/java/org/polypheny/db/algebra/core/Modify.java index c880f1a7e9..4d3cd81f3a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Modify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Modify.java @@ -237,5 +237,11 @@ public String algCompareString() { isFlattened() + "&"; } + + @Override + public AlgOptEntity getEntity() { + return table; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Scan.java b/core/src/main/java/org/polypheny/db/algebra/core/Scan.java index ee33cc8a1a..f422172179 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Scan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Scan.java @@ -92,7 +92,7 @@ public double estimateRowCount( AlgMetadataQuery mq ) { @Override - public AlgOptEntity getTable() { + public AlgOptEntity getEntity() { return table; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java index fb9d24dbb2..f8d274a2b8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java @@ -29,7 +29,7 @@ public interface DocumentAlg { default AlgOptEntity getCollection() { assert this instanceof AlgNode; - return ((AlgNode) this).getTable(); + return ((AlgNode) this).getEntity(); } enum DocType { diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java index 2544d91074..f1da086882 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java @@ -58,6 +58,12 @@ protected DocumentModify( AlgTraitSet traits, AlgOptEntity collection, CatalogRe } + @Override + public AlgOptEntity getEntity() { + return collection; + } + + @Override public AlgDataType deriveRowType() { return AlgOptUtil.createDmlRowType( Kind.INSERT, getCluster().getTypeFactory() ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 3595fa6c6e..d9b5bbd00b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -131,7 +131,7 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem //builder.scan( table.getSchemaName(), table.name ); for ( CatalogConstraint constraint : constraints ) { builder.clear(); - final AlgNode scan = LogicalRelScan.create( modify.getCluster(), modify.getTable() ); + final AlgNode scan = LogicalRelScan.create( modify.getCluster(), modify.getEntity() ); builder.push( scan ); // Enforce uniqueness between the already existing values and the new values List keys = constraint.key @@ -258,7 +258,7 @@ public static EnforcementInformation getControl( CatalogTable table, Statement s //builder.scan( table.getSchemaName(), table.name ); for ( CatalogConstraint constraint : constraints ) { builder.clear(); - builder.scan( table.getNamespaceName(), table.name );//LogicalTableScan.create( modify.getCluster(), modify.getTable() ); + builder.scan( table.getNamespaceName(), table.name );//LogicalTableScan.create( modify.getCluster(), modify.getEntity() ); // Enforce uniqueness between the already existing values and the new values List keys = constraint.key .getColumnNames() @@ -399,11 +399,11 @@ public AlgNode accept( AlgShuttle shuttle ) { public static CatalogTable getCatalogTable( Modify modify ) { - if ( modify.getTable() == null ) { + if ( modify.getEntity() == null ) { throw new RuntimeException( "The table was not found in the catalog!" ); } - return (CatalogTable) modify.getTable().getCatalogEntity(); + return (CatalogTable) modify.getEntity().getCatalogEntity(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java index 89360b1bd8..b5a1d2c13d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java @@ -104,13 +104,13 @@ private static LogicalStreamer getLogicalStreamer( Modify modify, AlgBuilder alg if ( !modify.isInsert() ) { // get collection, which is modified - algBuilder.scan( modify.getTable() ); + algBuilder.scan( modify.getEntity() ); // at the moment no data model is able to conditionally insert attachFilter( modify, algBuilder, rexBuilder ); } else { //algBuilder.push( LogicalValues.createOneRow( input.getCluster() ) ); - assert input.getRowType().getFieldCount() == modify.getTable().getRowType().getFieldCount(); + assert input.getRowType().getFieldCount() == modify.getEntity().getRowType().getFieldCount(); // attach a projection, so the values can be inserted on execution algBuilder.push( LogicalProject.create( @@ -125,7 +125,7 @@ private static LogicalStreamer getLogicalStreamer( Modify modify, AlgBuilder alg } LogicalModify prepared = LogicalModify.create( - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), algBuilder.build(), modify.getOperation(), @@ -142,9 +142,9 @@ private static List createSourceList( Modify modify, RexBuilder rexBuil .stream() .map( name -> { int size = modify.getRowType().getFieldList().size(); - int index = modify.getTable().getRowType().getFieldNames().indexOf( name ); + int index = modify.getEntity().getRowType().getFieldNames().indexOf( name ); return rexBuilder.makeDynamicParam( - modify.getTable().getRowType().getFieldList().get( index ).getType(), size + index ); + modify.getEntity().getRowType().getFieldList().get( index ).getType(), size + index ); } ).collect( Collectors.toList() ); } @@ -152,10 +152,10 @@ private static List createSourceList( Modify modify, RexBuilder rexBuil private static void attachFilter( Modify modify, AlgBuilder algBuilder, RexBuilder rexBuilder ) { List fields = new ArrayList<>(); int i = 0; - for ( AlgDataTypeField field : modify.getTable().getRowType().getFieldList() ) { + for ( AlgDataTypeField field : modify.getEntity().getRowType().getFieldList() ) { fields.add( algBuilder.equals( - rexBuilder.makeInputRef( modify.getTable().getRowType(), i ), + rexBuilder.makeInputRef( modify.getEntity().getRowType(), i ), rexBuilder.makeDynamicParam( field.getType(), i ) ) ); i++; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java index 96993add7e..5fa6746635 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java @@ -136,7 +136,7 @@ public ImmutableList collations( Filter alg, AlgMetadataQuery mq ) public ImmutableList collations( Scan scan, AlgMetadataQuery mq ) { - return ImmutableList.copyOf( table( scan.getTable() ) ); + return ImmutableList.copyOf( table( scan.getEntity() ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java index 25a68fc745..461176afac 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java @@ -222,7 +222,7 @@ public Set getColumnOrigins( AlgNode alg, AlgMetadataQuery mq, final Set set = new HashSet<>(); - AlgOptEntity table = alg.getTable(); + AlgOptEntity table = alg.getEntity(); if ( table == null ) { // Somebody is making column values up out of thin air, like a VALUES clause, so we return an empty set. return set; diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java index ed0a508281..2f327c91bc 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java @@ -89,7 +89,7 @@ public MetadataDef getDef() { public Boolean areColumnsUnique( Scan alg, AlgMetadataQuery mq, ImmutableBitSet columns, boolean ignoreNulls ) { - return alg.getTable().isKey( columns ); + return alg.getEntity().isKey( columns ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java index 81a620298b..27f4c33bc4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java @@ -103,7 +103,7 @@ public AlgDistribution distribution( SetOp alg, AlgMetadataQuery mq ) { public AlgDistribution distribution( Scan scan, AlgMetadataQuery mq ) { - return table( scan.getTable() ); + return table( scan.getEntity() ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdExpressionLineage.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdExpressionLineage.java index d9b538e744..1a393c2b47 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdExpressionLineage.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdExpressionLineage.java @@ -131,7 +131,7 @@ public Set getExpressionLineage( Scan alg, AlgMetadataQuery mq, RexNode final Map> mapping = new LinkedHashMap<>(); for ( int idx : inputFieldsUsed ) { final RexNode inputRef = RexTableInputRef.of( - AlgTableRef.of( alg.getTable(), 0 ), + AlgTableRef.of( alg.getEntity(), 0 ), RexInputRef.of( idx, alg.getRowType().getFieldList() ) ); final RexInputRef ref = RexInputRef.of( idx, alg.getRowType().getFieldList() ); mapping.put( ref, ImmutableSet.of( inputRef ) ); diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java index 2f956501b0..57d8a66f2e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java @@ -103,7 +103,7 @@ public Set getTableReferences( AlgSubset alg, AlgMetadataQuery mq ) * Scan table reference. */ public Set getTableReferences( Scan alg, AlgMetadataQuery mq ) { - return ImmutableSet.of( AlgTableRef.of( alg.getTable(), 0 ) ); + return ImmutableSet.of( AlgTableRef.of( alg.getEntity(), 0 ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java index 11e15c7a86..169a81593d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java @@ -380,7 +380,7 @@ public static MutableAlg toMutable( AlgNode alg ) { return MutableTableModify.of( modify.getRowType(), input, - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), modify.getOperation(), modify.getUpdateColumnList(), diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java index a226a45420..d6d9a1436f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java @@ -73,7 +73,7 @@ public int hashCode() { @Override public StringBuilder digest( StringBuilder buf ) { - return buf.append( "Scan(table: " ).append( alg.getTable().getCatalogEntity().name ).append( ")" ); + return buf.append( "Scan(table: " ).append( alg.getEntity().getCatalogEntity().name ).append( ")" ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java index b9f7c3b299..385b532606 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java @@ -117,7 +117,7 @@ protected FilterScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilde public static boolean test( Scan scan ) { // We can only push filters into a FilterableTable or ProjectableFilterableTable. - final AlgOptEntity table = scan.getTable(); + final AlgOptEntity table = scan.getEntity(); return table.unwrap( FilterableEntity.class ) != null || table.unwrap( ProjectableFilterableEntity.class ) != null; } @@ -133,10 +133,10 @@ protected void apply( AlgOptRuleCall call, Filter filter, Scan scan ) { projects = scan.identity(); } - final Mapping mapping = Mappings.target( projects, scan.getTable().getRowType().getFieldCount() ); + final Mapping mapping = Mappings.target( projects, scan.getEntity().getRowType().getFieldCount() ); filters.add( RexUtil.apply( mapping, filter.getCondition() ) ); - call.transformTo( BindableScan.create( scan.getCluster(), scan.getTable(), filters.build(), projects ) ); + call.transformTo( BindableScan.create( scan.getCluster(), scan.getEntity(), filters.build(), projects ) ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java index 21c1cde9ae..b05ff0dc37 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java @@ -110,13 +110,13 @@ public ProjectScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilderF protected static boolean test( Scan scan ) { // We can only push projects into a ProjectableFilterableTable. - final AlgOptEntity table = scan.getTable(); + final AlgOptEntity table = scan.getEntity(); return table.unwrap( ProjectableFilterableEntity.class ) != null; } protected void apply( AlgOptRuleCall call, Project project, Scan scan ) { - final AlgOptEntity table = scan.getTable(); + final AlgOptEntity table = scan.getEntity(); assert table.unwrap( ProjectableFilterableEntity.class ) != null; final TargetMapping mapping = project.getMapping(); @@ -136,7 +136,7 @@ protected void apply( AlgOptRuleCall call, Project project, Scan scan ) { } final List projects2 = Mappings.apply( (Mapping) mapping, projects ); - call.transformTo( BindableScan.create( scan.getCluster(), scan.getTable(), filters, projects2 ) ); + call.transformTo( BindableScan.create( scan.getCluster(), scan.getEntity(), filters, projects2 ) ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java index 4c72682c72..f16d54bc4b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java @@ -20,9 +20,9 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.tools.AlgBuilderFactory; @@ -47,7 +47,7 @@ public ScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final LogicalRelScan oldAlg = call.alg( 0 ); - AlgNode newAlg = oldAlg.getTable().toAlg( oldAlg::getCluster, oldAlg.getTraitSet() ); + AlgNode newAlg = oldAlg.getEntity().toAlg( oldAlg::getCluster, oldAlg.getTraitSet() ); call.transformTo( newAlg ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java index 064df1f31b..55e8101383 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java @@ -269,7 +269,7 @@ public void onMatch( AlgOptRuleCall call ) { final Delta delta = call.alg( 0 ); final Scan scan = call.alg( 1 ); final AlgOptCluster cluster = delta.getCluster(); - final AlgOptEntity algOptEntity = scan.getTable(); + final AlgOptEntity algOptEntity = scan.getEntity(); final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); if ( streamableTable != null ) { final Entity entity1 = streamableTable.stream(); @@ -310,7 +310,7 @@ public DeltaScanToEmptyRule( AlgBuilderFactory algBuilderFactory ) { public void onMatch( AlgOptRuleCall call ) { final Delta delta = call.alg( 0 ); final Scan scan = call.alg( 1 ); - final AlgOptEntity algOptEntity = scan.getTable(); + final AlgOptEntity algOptEntity = scan.getEntity(); final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); final AlgBuilder builder = call.builder(); if ( streamableTable == null ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java b/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java index bf64671e36..e8dba4b6e7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java @@ -55,34 +55,34 @@ public DocumentType() { public AlgDataType asRelational() { - return new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "_data_", 1, AlgDataTypeFactory.DEFAULT.createPolyType( PolyType.VARCHAR, 2024 ) ) ) ); + return new AlgRecordType( List.of( + new AlgDataTypeFieldImpl( "_id_", 1, AlgDataTypeFactory.DEFAULT.createPolyType( PolyType.VARCHAR, 2024 ) ), + new AlgDataTypeFieldImpl( "_data_", 1, AlgDataTypeFactory.DEFAULT.createPolyType( PolyType.VARCHAR, 2024 ) ) + ) ); } @Override public boolean isStruct() { - return false; + return true; } @Override public List getFieldList() { - return List.of( this ); + throw new RuntimeException( "getFieldList" ); } @Override public List getFieldNames() { - if ( name == null ) { - return List.of( "$d" ); - } - return List.of( name ); + throw new RuntimeException( "getFieldList" ); } @Override public int getFieldCount() { - return 1; + throw new RuntimeException( "getFieldList" ); } @@ -212,7 +212,7 @@ public AlgDataType getValue() { @Override public AlgDataType setValue( AlgDataType value ) { - throw new RuntimeException( "Error while setting field on AlgDocumentType" ); + throw new RuntimeException( "Error while setting field on DocumentType" ); } } diff --git a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java index d276c40fdb..e09999a506 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java @@ -169,7 +169,7 @@ public BindableScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final LogicalRelScan scan = call.alg( 0 ); - final AlgOptEntity table = scan.getTable(); + final AlgOptEntity table = scan.getEntity(); if ( BindableScan.canHandle( table ) ) { call.transformTo( BindableScan.create( scan.getCluster(), table ) ); } diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index 316c363469..f379602871 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -91,7 +91,7 @@ public void run() { * Tries various table SPIs, and negotiates with the table which filters and projects it can implement. Adds to the Enumerable implementations of any filters and projects that cannot be implemented by the table. */ static ScanNode create( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects ) { - final AlgOptEntity algOptEntity = alg.getTable(); + final AlgOptEntity algOptEntity = alg.getEntity(); final ProjectableFilterableEntity pfTable = algOptEntity.unwrap( ProjectableFilterableEntity.class ); if ( pfTable != null ) { return createProjectableFilterable( compiler, alg, filters, projects, pfTable ); @@ -125,7 +125,7 @@ private static ScanNode createScannable( Compiler compiler, Scan alg, ImmutableL private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, QueryableEntity queryableTable ) { final DataContext root = compiler.getDataContext(); - final AlgOptEntity algOptEntity = alg.getTable(); + final AlgOptEntity algOptEntity = alg.getEntity(); final Type elementType = queryableTable.getElementType(); final Enumerable rowEnumerable; @@ -179,7 +179,7 @@ private static ScanNode createProjectableFilterable( Compiler compiler, Scan alg for ( ; ; ) { final List mutableFilters = Lists.newArrayList( filters ); final int[] projectInts; - if ( projects == null || projects.equals( Scan.identity( alg.getTable() ) ) ) { + if ( projects == null || projects.equals( Scan.identity( alg.getEntity() ) ) ) { projectInts = null; } else { projectInts = projects.toIntArray(); @@ -228,10 +228,10 @@ private static ScanNode createEnumerable( Compiler compiler, Scan alg, Enumerabl filter2 = filter; inputRowType = alg.getRowType(); } else { - final Mapping mapping = Mappings.target( acceptedProjects, alg.getTable().getRowType().getFieldCount() ); + final Mapping mapping = Mappings.target( acceptedProjects, alg.getEntity().getRowType().getFieldCount() ); filter2 = RexUtil.apply( mapping, filter ); final AlgDataTypeFactory.Builder builder = alg.getCluster().getTypeFactory().builder(); - final List fieldList = alg.getTable().getRowType().getFieldList(); + final List fieldList = alg.getEntity().getRowType().getFieldList(); for ( int acceptedProject : acceptedProjects ) { builder.add( fieldList.get( acceptedProject ) ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java index 7f27baa1ec..bb2dbbe89d 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java @@ -131,7 +131,7 @@ public interface AlgOptEntity extends Wrapper { List getColumnStrategies(); @Deprecated - default Entity getTable() { + default Entity getEntity() { return null; } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java index c2d1f91959..a977d3dd63 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java @@ -201,7 +201,7 @@ public static List findAllTables( AlgNode alg ) { for ( Entry, Collection> e : nodes.asMap().entrySet() ) { if ( Scan.class.isAssignableFrom( e.getKey() ) ) { for ( AlgNode node : e.getValue() ) { - usedTables.add( node.getTable() ); + usedTables.add( node.getEntity() ); } } } diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index ea87d6d319..9e4b0526b2 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -89,7 +89,7 @@ public PreparingEntity getTable( final List names ) { return algOptTable; } } - return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, entity.getCatalogTable(), entity.getPartitionPlacement(), null ); + return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, entity.getCatalogEntity(), entity.getPartitionPlacement(), null ); } return null; } @@ -102,7 +102,7 @@ public AlgOptEntity getCollection( final List names ) { if ( entry != null ) { final Entity entity = entry.getTable(); - return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, entity.getCatalogTable(), entity.getPartitionPlacement(), null ); + return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, entity.getCatalogEntity(), entity.getPartitionPlacement(), null ); } return null; } diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index 76db022daa..1e705f3512 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -152,7 +152,7 @@ public void visit( AlgNode node, int ordinal, AlgNode parent ) { if ( node instanceof Scan ) { final AlgOptCluster cluster = node.getCluster(); final ToAlgContext context = () -> cluster; - final AlgNode r = node.getTable().toAlg( context, node.getTraitSet() ); + final AlgNode r = node.getEntity().toAlg( context, node.getTraitSet() ); planner.registerClass( r ); } super.visit( node, ordinal, parent ); diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index 17e8fb7c1e..0e78239f8c 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -115,7 +115,7 @@ AlgNode toAlg( Queryable queryable ) { null, table.getRowType( translator.typeFactory ), tableEntry, - table.getCatalogTable(), + table.getCatalogEntity(), table.getPartitionPlacement(), null ); if ( table instanceof TranslatableEntity ) { diff --git a/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java b/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java index d75ecf4583..8affb22d05 100644 --- a/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java @@ -52,7 +52,7 @@ private AlgTraitSet copy( final AlgTraitSet other ) { @Override public AlgNode visit( Scan scan ) { final AlgNode node = super.visit( scan ); - return new LogicalRelScan( node.getCluster(), copy( node.getTraitSet() ), node.getTable() ); + return new LogicalRelScan( node.getCluster(), copy( node.getTraitSet() ), node.getEntity() ); } diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 6aa900cb8a..27f3668a1e 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -280,14 +280,14 @@ public AlgNode visit( LogicalConstraintEnforcer enforcer ) { @Override public AlgNode visit( LogicalMatch match ) { - hashBasis.add( "LogicalMatch#" + match.getTable().getCatalogEntity().id ); + hashBasis.add( "LogicalMatch#" + match.getEntity().getCatalogEntity().id ); return visitChild( match, 0, match.getInput() ); } @Override public AlgNode visit( Scan scan ) { - hashBasis.add( "Scan#" + scan.getTable().getCatalogEntity().id ); + hashBasis.add( "Scan#" + scan.getEntity().getCatalogEntity().id ); // get available columns for every table scan this.getAvailableColumns( scan ); @@ -326,7 +326,7 @@ public AlgNode visit( LogicalCorrelate correlate ) { @Override public AlgNode visit( LogicalJoin join ) { if ( join.getLeft() instanceof LogicalRelScan && join.getRight() instanceof LogicalRelScan ) { - hashBasis.add( "LogicalJoin#" + join.getLeft().getTable().getCatalogEntity().id + "#" + join.getRight().getTable().getCatalogEntity().id ); + hashBasis.add( "LogicalJoin#" + join.getLeft().getEntity().getCatalogEntity().id + "#" + join.getRight().getEntity().getCatalogEntity().id ); } super.visit( join ); @@ -391,8 +391,8 @@ public AlgNode visit( AlgNode other ) { private void getAvailableColumns( AlgNode scan ) { - this.entityId.add( scan.getTable().getCatalogEntity().id ); - final CatalogTable table = (CatalogTable) scan.getTable().getCatalogEntity(); + this.entityId.add( scan.getEntity().getCatalogEntity().id ); + final CatalogTable table = (CatalogTable) scan.getEntity().getCatalogEntity(); if ( table != null ) { final List ids = table.fieldIds; final List names = table.getColumnNames(); @@ -407,7 +407,7 @@ private void getAvailableColumns( AlgNode scan ) { private void getPartitioningInfo( LogicalFilter filter ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) filter.getInput().getTable(); + AlgOptEntityImpl table = (AlgOptEntityImpl) filter.getInput().getEntity(); if ( table == null ) { return; } @@ -442,7 +442,7 @@ private void handleIfPartitioned( AlgNode node, CatalogTable catalogTable ) { private void getPartitioningInfo( LogicalDocumentFilter filter ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) filter.getInput().getTable(); + AlgOptEntityImpl table = (AlgOptEntityImpl) filter.getInput().getEntity(); if ( table == null ) { return; } diff --git a/core/src/main/java/org/polypheny/db/schema/Entity.java b/core/src/main/java/org/polypheny/db/schema/Entity.java index 1507917ffa..513fc7a0aa 100644 --- a/core/src/main/java/org/polypheny/db/schema/Entity.java +++ b/core/src/main/java/org/polypheny/db/schema/Entity.java @@ -37,8 +37,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; import org.polypheny.db.prepare.JavaTypeFactoryImpl; @@ -89,11 +89,19 @@ default AlgDataTypeFactory getTypeFactory() { Long getId(); @Deprecated // whole entity might get replaced - default CatalogTable getCatalogTable() { + default CatalogEntity getCatalogEntity() { if ( getId() == null ) { return null; } - return Catalog.getInstance().getTable( getId() ); + switch ( getNamespaceType() ) { + case RELATIONAL: + return Catalog.getInstance().getTable( getId() ); + case DOCUMENT: + return Catalog.getInstance().getCollection( getId() ); + case GRAPH: + return Catalog.getInstance().getGraph( getId() ); + } + return null; } Long getPartitionId(); diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java index 2ec19236dd..0c8038f61e 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java @@ -46,7 +46,7 @@ public class LogicalEntity extends AbstractQueryableEntity implements Translatab private AlgProtoDataType protoRowType; @Getter - private final NamespaceType schemaType; + private final NamespaceType namespaceType; @Getter private final String logicalSchemaName; @@ -69,14 +69,14 @@ public LogicalEntity( List columnIds, List logicalColumnNames, AlgProtoDataType protoRowType, - NamespaceType schemaType ) { + NamespaceType namespaceType ) { super( Object[].class, tableId, null, null ); this.logicalSchemaName = logicalSchemaName; this.logicalTableName = logicalTableName; this.columnIds = columnIds; this.logicalColumnNames = logicalColumnNames; this.protoRowType = protoRowType; - this.schemaType = schemaType; + this.namespaceType = namespaceType; } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 3fc1f087d6..5aed7900c9 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -2919,7 +2919,7 @@ private Frame( AlgNode alg ) { private static String deriveAlias( AlgNode alg ) { if ( alg instanceof Scan ) { - return alg.getTable().getCatalogEntity().name; + return alg.getEntity().getCatalogEntity().name; } return null; } diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index ea7d1a3a8c..3a8c80e340 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -91,8 +91,8 @@ public static class TableUpdateVisitor extends AlgShuttleImpl { @Override public AlgNode visit( LogicalModify modify ) { if ( modify.getOperation() != Operation.MERGE ) { - if ( (modify.getTable().getCatalogEntity() != null) ) { - CatalogTable table = modify.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + if ( (modify.getEntity().getCatalogEntity() != null) ) { + CatalogTable table = modify.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); names.add( table.getNamespaceName() ); names.add( table.name ); } diff --git a/core/src/main/java/org/polypheny/db/view/ViewManager.java b/core/src/main/java/org/polypheny/db/view/ViewManager.java index bbd8a9d8d0..355b65ecad 100644 --- a/core/src/main/java/org/polypheny/db/view/ViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/ViewManager.java @@ -252,8 +252,8 @@ public AlgNode checkNode( AlgNode other ) { if ( other instanceof LogicalRelViewScan ) { return expandViewNode( other ); } else if ( doesSubstituteOrderBy && other instanceof LogicalRelScan ) { - if ( other.getTable() instanceof AlgOptEntityImpl ) { - CatalogTable catalogTable = other.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + if ( other.getEntity() instanceof AlgOptEntityImpl ) { + CatalogTable catalogTable = other.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((CatalogMaterializedView) catalogTable).isOrdered() ) { return orderMaterialized( other ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 5f1b68418d..46ebd34dc4 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -2134,10 +2134,10 @@ private List getColumnInformation( List projectedColum private Map> findUnderlyingTablesOfView( AlgNode algNode, Map> underlyingTables, AlgDataType fieldList ) { if ( algNode instanceof LogicalRelScan ) { List underlyingColumns = getUnderlyingColumns( algNode, fieldList ); - underlyingTables.put( algNode.getTable().getCatalogEntity().id, underlyingColumns ); + underlyingTables.put( algNode.getEntity().getCatalogEntity().id, underlyingColumns ); } else if ( algNode instanceof LogicalRelViewScan ) { List underlyingColumns = getUnderlyingColumns( algNode, fieldList ); - underlyingTables.put( algNode.getTable().getCatalogEntity().id, underlyingColumns ); + underlyingTables.put( algNode.getEntity().getCatalogEntity().id, underlyingColumns ); } if ( algNode instanceof BiAlg ) { findUnderlyingTablesOfView( ((BiAlg) algNode).getLeft(), underlyingTables, fieldList ); @@ -2150,7 +2150,7 @@ private Map> findUnderlyingTablesOfView( AlgNode algNode, Map getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList ) { - CatalogTable table = algNode.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + CatalogTable table = algNode.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); List columnIds = table.fieldIds; List logicalColumnNames = table.getColumnNames(); List underlyingColumns = new ArrayList<>(); diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index ee57be1715..392056b45e 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -616,7 +616,7 @@ public AlgNode visit( AlgNode node ) { if ( node instanceof LogicalModify ) { final Catalog catalog = Catalog.getInstance(); final LogicalModify ltm = (LogicalModify) node; - final CatalogTable table = ltm.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + final CatalogTable table = ltm.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); final CatalogSchema schema = catalog.getSchema( table.namespaceId ); final List indices = IndexManager.getInstance().getIndices( schema, table ); @@ -768,7 +768,7 @@ public AlgNode visit( AlgNode node ) { // .collect( Collectors.toList() ); // } // final {@link AlgNode} replacement = LogicalModify.create( -// ltm.getTable(), +// ltm.getEntity(), // transaction.getCatalogReader(), // newProject, // ltm.getOperation(), @@ -911,7 +911,7 @@ public AlgNode visit( LogicalProject project ) { } // Retrieve the catalog schema and database representations required for index lookup final CatalogSchema schema = statement.getTransaction().getDefaultSchema(); - final CatalogTable ctable = scan.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + final CatalogTable ctable = scan.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); // Retrieve any index and use for simplification final Index idx = IndexManager.getInstance().getIndex( schema, ctable, columns ); if ( idx == null ) { @@ -1305,8 +1305,8 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< } } else { boolean fallback = false; - if ( alg.getTable() != null ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) alg.getTable(); + if ( alg.getEntity() != null ) { + AlgOptEntityImpl table = (AlgOptEntityImpl) alg.getEntity(); int scanId = alg.getId(); diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 25036ec09e..151b6a30ea 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -205,7 +205,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final List constraints; final List foreignKeys; final List exportedKeys; - table = root.getTable().getCatalogEntity().unwrap( CatalogTable.class ); + table = root.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); primaryKey = catalog.getPrimaryKey( table.primaryKey ); constraints = new ArrayList<>( Catalog.getInstance().getConstraints( table.id ) ); foreignKeys = Catalog.getInstance().getForeignKeys( table.id ); @@ -230,7 +230,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme continue; } // Enforce uniqueness between the already existing values and the new values - final AlgNode scan = LogicalRelScan.create( root.getCluster(), root.getTable() ); + final AlgNode scan = LogicalRelScan.create( root.getCluster(), root.getEntity() ); RexNode joinCondition = rexBuilder.makeLiteral( true ); // // TODO: Here we get issues with batch queries diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index f9b6cb2f8f..ad1b45965e 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -285,7 +285,7 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl } List fields; if ( isMaterializedView ) { - fields = targetAlg.alg.getTable().getRowType().getFieldList(); + fields = targetAlg.alg.getEntity().getRowType().getFieldList(); } else { fields = sourceAlg.validatedRowType.getFieldList(); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java b/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java index 34dd8d8573..48cb668779 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java +++ b/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java @@ -291,7 +291,7 @@ public AlgNode visit( LogicalModify initial ) { return new LogicalModify( modify.getCluster(), modify.getTraitSet(), - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), input, modify.getOperation(), @@ -360,7 +360,7 @@ public AlgNode visitAsymmetricModify( LogicalModify initial ) { return new LogicalModify( modify.getCluster(), modify.getTraitSet(), - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), input, modify.getOperation(), diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index d61d9871d5..0609869fd1 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -212,8 +212,8 @@ protected List buildSelect( AlgNode node, List partitionIds = catalogTable.partitionProperty.partitionIds; diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index b57c1c612b..d29b25c718 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -131,11 +131,11 @@ public class DmlRouterImpl extends BaseRouter implements DmlRouter { public AlgNode routeDml( LogicalModify modify, Statement statement ) { AlgOptCluster cluster = modify.getCluster(); - if ( modify.getTable() == null ) { + if ( modify.getEntity() == null ) { throw new RuntimeException( "Unexpected operator!" ); } - AlgOptEntityImpl table = (AlgOptEntityImpl) modify.getTable(); + AlgOptEntityImpl table = (AlgOptEntityImpl) modify.getEntity(); // Get placements of this table CatalogTable catalogTable = table.getCatalogEntity().unwrap( CatalogTable.class ); @@ -720,7 +720,7 @@ public AlgNode handleBatchIterator( AlgNode alg, Statement statement, LogicalQue public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Integer adapterId ) { PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); - CatalogCollection collection = alg.getTable().getCatalogEntity().unwrap( CatalogCollection.class ); + CatalogCollection collection = alg.getEntity().getCatalogEntity().unwrap( CatalogCollection.class ); List modifies = new ArrayList<>(); @@ -743,7 +743,7 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, continue; } - modifies.add( ((ModifiableCollection) document.getTable()).toModificationAlg( + modifies.add( ((ModifiableCollection) document.getEntity()).toModificationAlg( alg.getCluster(), document, statement.getTransaction().getCatalogReader(), @@ -1252,8 +1252,8 @@ private AlgBuilder buildDml( LogicalRelScan scan = (LogicalRelScan) builder.build(); builder.push( scan.copy( scan.getTraitSet().replace( ModelTrait.DOCUMENT ), scan.getInputs() ) ); return builder; - } else if ( node instanceof LogicalRelScan && node.getTable() != null ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) node.getTable(); + } else if ( node instanceof LogicalRelScan && node.getEntity() != null ) { + AlgOptEntityImpl table = (AlgOptEntityImpl) node.getEntity(); // Special handling for INSERT INTO foo SELECT * FROM foo2 if ( table.getCatalogEntity().id != catalogTable.id ) { diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 98b4b7cff1..25405c6609 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -39,6 +39,7 @@ import org.polypheny.db.algebra.core.lpg.LpgAlg; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plan.AlgOptEntity; @@ -238,7 +239,7 @@ private class TableRelVisitor extends AlgVisitor { @Override public void visit( AlgNode p, int ordinal, AlgNode parent ) { super.visit( p, ordinal, parent ); - AlgOptEntity table = p.getTable(); + AlgOptEntity table = p.getEntity(); if ( table == null ) { if ( p instanceof LpgAlg ) { attachGraph( (AlgNode & LpgAlg) p ); @@ -257,7 +258,7 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { if ( p instanceof Modify ) { newAccess = Mode.WRITE_ACCESS; if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { - extractWriteConstraints( (LogicalEntity) table.getTable() ); + extractWriteConstraints( (LogicalEntity) table.getEntity() ); } } else { newAccess = Mode.READ_ACCESS; @@ -270,7 +271,12 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { if ( accessedPartitions.containsKey( p.getId() ) ) { relevantPartitions = accessedPartitions.get( p.getId() ); } else if ( table.getCatalogEntity() != null ) { - relevantPartitions = table.getCatalogEntity().unwrap( CatalogTable.class ).partitionProperty.partitionIds; + if ( table.getCatalogEntity().namespaceType == NamespaceType.RELATIONAL ) { + relevantPartitions = table.getCatalogEntity().unwrap( CatalogTable.class ).partitionProperty.partitionIds; + } else { + relevantPartitions = List.of(); + } + } else { relevantPartitions = List.of(); } diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index d4fe0f0f03..9e16ab613b 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -184,7 +184,7 @@ public void addTables( Transaction transaction, List tableNames ) { updateCandidates.put( transaction.getXid(), id ); } } catch ( UnknownTableException e ) { - throw new RuntimeException( "Not possible to getTable to update which Tables were changed.", e ); + throw new RuntimeException( "Not possible to getEntity to update which Tables were changed.", e ); } } } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java index 0cac3ab142..df2df8fe07 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java @@ -93,7 +93,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new CottontailTableModify( getCluster(), traitSet, - getTable(), + getEntity(), getCatalogReader(), AbstractAlgNode.sole( inputs ), getOperation(), diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java index 7cabc6b275..7c4a6b15ab 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java @@ -47,11 +47,11 @@ private static boolean supports( Modify modify ) { @Override public boolean matches( AlgOptRuleCall call ) { final Modify modify = call.alg( 0 ); - if ( modify.getTable().unwrap( CottontailEntity.class ) == null ) { + if ( modify.getEntity().unwrap( CottontailEntity.class ) == null ) { return false; } - if ( !modify.getTable().unwrap( CottontailEntity.class ).getUnderlyingConvention().equals( this.out ) ) { + if ( !modify.getEntity().unwrap( CottontailEntity.class ).getUnderlyingConvention().equals( this.out ) ) { return false; } return modify.getOperation() != Operation.MERGE; @@ -62,12 +62,12 @@ public boolean matches( AlgOptRuleCall call ) { public AlgNode convert( AlgNode alg ) { final Modify modify = (Modify) alg; - final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); + final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; } - if ( modify.getTable().unwrap( CottontailEntity.class ) == null ) { + if ( modify.getEntity().unwrap( CottontailEntity.class ) == null ) { return null; } @@ -76,7 +76,7 @@ public AlgNode convert( AlgNode alg ) { return new CottontailTableModify( modify.getCluster(), traitSet, - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), AlgOptRule.convert( modify.getInput(), traitSet ), modify.getOperation(), diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvProjectScanRule.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvProjectScanRule.java index 38ed880b6b..5a12ed1da1 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvProjectScanRule.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvProjectScanRule.java @@ -33,6 +33,7 @@ package org.polypheny.db.adapter.csv; +import java.util.List; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.plan.AlgOptRule; @@ -41,8 +42,6 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.tools.AlgBuilderFactory; -import java.util.List; - /** * Planner rule that projects from a {@link CsvScan} scan just the columns needed to satisfy a projection. If the @@ -77,7 +76,7 @@ public void onMatch( AlgOptRuleCall call ) { return; } call.transformTo( - new CsvScan( scan.getCluster(), scan.getTable(), scan.csvTable, fields ) ); + new CsvScan( scan.getCluster(), scan.getEntity(), scan.csvTable, fields ) ); } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java index 643beb36ab..71ba25a7c8 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java @@ -245,7 +245,7 @@ private static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, A */ public static DruidQuery extendQuery( DruidQuery query, AlgNode r ) { final ImmutableList.Builder builder = ImmutableList.builder(); - return DruidQuery.create( query.getCluster(), r.getTraitSet().replace( query.getConvention() ), query.getTable(), query.druidTable, query.intervals, builder.addAll( query.algs ).add( r ).build(), query.getOperatorConversionMap() ); + return DruidQuery.create( query.getCluster(), r.getTraitSet().replace( query.getConvention() ), query.getEntity(), query.druidTable, query.intervals, builder.addAll( query.algs ).add( r ).build(), query.getOperatorConversionMap() ); } @@ -253,7 +253,7 @@ public static DruidQuery extendQuery( DruidQuery query, AlgNode r ) { * Extends a DruidQuery. */ public static DruidQuery extendQuery( DruidQuery query, List intervals ) { - return DruidQuery.create( query.getCluster(), query.getTraitSet(), query.getTable(), query.druidTable, intervals, query.algs, query.getOperatorConversionMap() ); + return DruidQuery.create( query.getCluster(), query.getTraitSet(), query.getEntity(), query.druidTable, intervals, query.algs, query.getOperatorConversionMap() ); } @@ -461,7 +461,7 @@ public boolean isValid( Litmus litmus, Context context ) { if ( !(r instanceof Scan) ) { return litmus.fail( "first alg must be Scan, was ", r ); } - if ( r.getTable() != table ) { + if ( r.getEntity() != table ) { return litmus.fail( "first alg must be based on table table" ); } } else { @@ -525,7 +525,7 @@ public AlgNode getTopNode() { @Override - public AlgOptEntity getTable() { + public AlgOptEntity getEntity() { return table; } @@ -540,7 +540,7 @@ public AlgWriter explainTerms( AlgWriter pw ) { for ( AlgNode alg : algs ) { if ( alg instanceof Scan ) { Scan scan = (Scan) alg; - pw.item( "table", scan.getTable().getCatalogEntity().id ); + pw.item( "table", scan.getEntity().getCatalogEntity().id ); pw.item( "intervals", intervals ); } else if ( alg instanceof Filter ) { pw.item( "filter", ((Filter) alg).getCondition() ); diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidRules.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidRules.java index 18567e34ad..c65d33bcde 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidRules.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidRules.java @@ -292,7 +292,7 @@ public void onMatch( AlgOptRuleCall call ) { return; } - if ( DruidQuery.computeProjectAsScan( project, query.getTable().getRowType(), query ) != null ) { + if ( DruidQuery.computeProjectAsScan( project, query.getEntity().getRowType(), query ) != null ) { // All expressions can be pushed to Druid in their entirety. final AlgNode newProject = project.copy( project.getTraitSet(), ImmutableList.of( Util.last( query.algs ) ) ); AlgNode newNode = DruidQuery.extendQuery( query, newProject ); @@ -603,7 +603,7 @@ private DruidQuery optimizeFilteredAggregations( AlgOptRuleCall call, DruidQuery List newNodes = constructNewNodes( query.algs, addNewFilter, startIndex, filter, project, aggregate ); - return DruidQuery.create( query.getCluster(), aggregate.getTraitSet().replace( query.getConvention() ), query.getTable(), query.druidTable, newNodes ); + return DruidQuery.create( query.getCluster(), aggregate.getTraitSet().replace( query.getConvention() ), query.getEntity(), query.druidTable, newNodes ); } diff --git a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java index b87c9b1e1f..87cbe22896 100644 --- a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java +++ b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java @@ -3117,7 +3117,7 @@ public class DruidAdapterIT { // @Test // public void testTableMapReused() { // AbstractSchema schema = new DruidSchema( "http://localhost:8082", "http://localhost:8081", true ); -// assertSame( schema.getTable( "wikiticker" ), schema.getTable( "wikiticker" ) ); +// assertSame( schema.getEntity( "wikiticker" ), schema.getEntity( "wikiticker" ) ); // } // // diff --git a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java index 8ccfec8940..3c8df50d87 100644 --- a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java +++ b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java @@ -2717,7 +2717,7 @@ public class DruidAdapterIT2 { // @Test // public void testTableMapReused() { // AbstractSchema schema = new DruidSchema( "http://localhost:8082", "http://localhost:8081", true ); -// assertSame( schema.getTable( "wikiticker" ), schema.getTable( "wikiticker" ) ); +// assertSame( schema.getEntity( "wikiticker" ), schema.getEntity( "wikiticker" ) ); // } // // diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java index e607b749de..4fcff08d4a 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java @@ -90,7 +90,7 @@ private static boolean supports( Modify node ) { @Override public boolean matches( AlgOptRuleCall call ) { final Modify modify = call.alg( 0 ); - if ( modify.getTable().unwrap( FileTranslatableEntity.class ) == null ) { + if ( modify.getEntity().unwrap( FileTranslatableEntity.class ) == null ) { // todo insert from select is not correctly implemented return false; } @@ -99,7 +99,7 @@ public boolean matches( AlgOptRuleCall call ) { return false; } - FileTranslatableEntity table = modify.getTable().unwrap( FileTranslatableEntity.class ); + FileTranslatableEntity table = modify.getEntity().unwrap( FileTranslatableEntity.class ); convention.setModification( true ); return true; } @@ -108,7 +108,7 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { final Modify modify = (Modify) alg; - final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); + final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { log.warn( "Returning null during conversion" ); @@ -118,7 +118,7 @@ public AlgNode convert( AlgNode alg ) { return new FileTableModify( modify.getCluster(), traitSet, - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), AlgOptRule.convert( modify.getInput(), traitSet ), modify.getOperation(), diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java index 63f8b6b629..fd0771c547 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java @@ -57,7 +57,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new FileTableModify( getCluster(), traitSet, - getTable(), + getEntity(), getCatalogReader(), AbstractAlgNode.sole( inputs ), getOperation(), @@ -77,7 +77,7 @@ public void register( AlgOptPlanner planner ) { public void implement( final FileImplementor implementor ) { setOperation( implementor );//do it first, so children know that we have an insert/update/delete implementor.visitChild( 0, getInput() ); - FileTranslatableEntity fileTable = (FileTranslatableEntity) getTable().getTable(); + FileTranslatableEntity fileTable = (FileTranslatableEntity) getEntity().getEntity(); implementor.setFileTable( fileTable ); if ( getOperation() == Operation.UPDATE ) { if ( getSourceExpressionList() != null ) { diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java index d2721e8a05..7b7658d69d 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java @@ -35,11 +35,12 @@ import com.google.common.collect.ImmutableList; import java.util.Arrays; -import java.util.List; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.adapter.jdbc.rel2sql.AlgToSqlConverter; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlIdentifier; @@ -81,20 +82,13 @@ public SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement placement ) @Override - public SqlIdentifier getPhysicalColumnName( List tableNames, String columnName ) { - if ( tableNames.size() == 1 ) { - // only column name - return schema.getTableMap().get( tableNames.get( 0 ) ).physicalColumnName( columnName ); - } else if ( tableNames.size() == 2 ) { - // table name and column name - JdbcEntity table = schema.getTableMap().get( tableNames.get( 1 ) ); - if ( table.hasPhysicalColumnName( columnName ) ) { - return schema.getTableMap().get( tableNames.get( 1 ) ).physicalColumnName( columnName ); - } else { - return new SqlIdentifier( "_" + columnName, ParserPos.ZERO ); - } + public SqlIdentifier getPhysicalColumnName( CatalogPartitionPlacement placement, String columnName ) { + CatalogTable catalogTable = Catalog.getInstance().getTable( placement.tableId ); + JdbcEntity table = schema.getTableMap().get( catalogTable.name + "_" + placement.partitionId ); + if ( table.hasPhysicalColumnName( columnName ) ) { + return table.physicalColumnName( columnName ); } else { - throw new RuntimeException( "Unexpected number of names: " + tableNames.size() ); + return new SqlIdentifier( "_" + columnName, ParserPos.ZERO ); } } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java index eff132060a..f2b6de603e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java @@ -1007,8 +1007,8 @@ private JdbcTableModificationRule( JdbcConvention out, AlgBuilderFactory algBuil @Override public boolean matches( AlgOptRuleCall call ) { final Modify modify = call.alg( 0 ); - if ( modify.getTable().unwrap( JdbcEntity.class ) != null ) { - JdbcEntity table = modify.getTable().unwrap( JdbcEntity.class ); + if ( modify.getEntity().unwrap( JdbcEntity.class ) != null ) { + JdbcEntity table = modify.getEntity().unwrap( JdbcEntity.class ); if ( out.getJdbcSchema() == table.getSchema() ) { return true; } @@ -1020,7 +1020,7 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { final Modify modify = (Modify) alg; - final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); + final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; } @@ -1028,7 +1028,7 @@ public AlgNode convert( AlgNode alg ) { return new JdbcTableModify( modify.getCluster(), traitSet, - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), AlgOptRule.convert( modify.getInput(), traitSet ), modify.getOperation(), @@ -1084,7 +1084,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new JdbcTableModify( getCluster(), traitSet, - getTable(), + getEntity(), getCatalogReader(), AbstractAlgNode.sole( inputs ), getOperation(), diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java index 5c1a406f8e..2acbee33ba 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java @@ -275,7 +275,7 @@ public Result visit( Aggregate e ) { */ public Result visit( Scan e ) { return result( - new SqlIdentifier( List.of( e.getTable().unwrap( CatalogTable.class ).getNamespaceName(), e.getTable().getCatalogEntity().name ), ParserPos.ZERO ), + new SqlIdentifier( List.of( e.getEntity().unwrap( CatalogTable.class ).getNamespaceName(), e.getEntity().getCatalogEntity().name ), ParserPos.ZERO ), ImmutableList.of( Clause.FROM ), e, null ); @@ -453,7 +453,7 @@ public Result visit( Modify modify ) { final Context context = aliasContext( pairs, false ); // Target Table Name - final SqlIdentifier sqlTargetTable = getPhysicalTableName( modify.getTable().getPartitionPlacement() ); + final SqlIdentifier sqlTargetTable = getPhysicalTableName( modify.getEntity().getPartitionPlacement() ); switch ( modify.getOperation() ) { case INSERT: { @@ -466,7 +466,7 @@ public Result visit( Modify modify ) { sqlTargetTable, sqlSource, physicalIdentifierList( - List.of( modify.getTable().getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), modify.getTable().getCatalogEntity().name ), + modify.getEntity().getPartitionPlacement(), modify.getInput().getRowType().getFieldNames() ) ); return result( sqlInsert, ImmutableList.of(), modify, null ); } @@ -475,7 +475,7 @@ public Result visit( Modify modify ) { final SqlUpdate sqlUpdate = new SqlUpdate( POS, sqlTargetTable, - physicalIdentifierList( List.of( modify.getTable().getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), modify.getTable().getCatalogEntity().name ), modify.getUpdateColumnList() ), + physicalIdentifierList( modify.getEntity().getPartitionPlacement(), modify.getUpdateColumnList() ), exprList( context, modify.getSourceExpressionList() ), ((SqlSelect) input.node).getWhere(), input.asSelect(), @@ -518,8 +518,8 @@ private SqlNodeList identifierList( List names ) { /** * Converts a list of names expressions to a list of single-part {@link SqlIdentifier}s. */ - private SqlNodeList physicalIdentifierList( List tableName, List columnNames ) { - return new SqlNodeList( columnNames.stream().map( columnName -> getPhysicalColumnName( tableName, columnName ) ).collect( Collectors.toList() ), POS ); + private SqlNodeList physicalIdentifierList( CatalogPartitionPlacement partitionPlacement, List columnNames ) { + return new SqlNodeList( columnNames.stream().map( columnName -> getPhysicalColumnName( partitionPlacement, columnName ) ).collect( Collectors.toList() ), POS ); } @@ -663,7 +663,7 @@ private void parseCorrelTable( AlgNode algNode, Result x ) { public abstract SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement tableName ); - public abstract SqlIdentifier getPhysicalColumnName( List tableName, String columnName ); + public abstract SqlIdentifier getPhysicalColumnName( CatalogPartitionPlacement tableName, String columnName ); /** @@ -700,7 +700,7 @@ public SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement placement ) @Override - public SqlIdentifier getPhysicalColumnName( List tableName, String columnName ) { + public SqlIdentifier getPhysicalColumnName( CatalogPartitionPlacement placement, String columnName ) { return new SqlIdentifier( columnName, POS ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java index b8c8001d16..edde48c4ea 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java @@ -194,13 +194,13 @@ public Result setOpToSql( SqlSetOperator operator, AlgNode alg ) { final Result result = visitChild( input.i, input.e ); if ( node == null ) { if ( input.getValue() instanceof JdbcScan ) { - node = result.asSelect( ((JdbcEntity) ((AlgOptEntityImpl) input.getValue().getTable()).getEntity()).getNodeList() ); + node = result.asSelect( ((JdbcEntity) ((AlgOptEntityImpl) input.getValue().getEntity()).getEntity()).getNodeList() ); } else { node = result.asSelect(); } } else { if ( input.getValue() instanceof JdbcScan ) { - node = (SqlNode) operator.createCall( POS, node, result.asSelect( ((JdbcEntity) ((AlgOptEntityImpl) input.getValue().getTable()).getEntity()).getNodeList() ) ); + node = (SqlNode) operator.createCall( POS, node, result.asSelect( ((JdbcEntity) ((AlgOptEntityImpl) input.getValue().getEntity()).getEntity()).getNodeList() ) ); } else { node = (SqlNode) operator.createCall( POS, node, result.asSelect() ); } @@ -1176,7 +1176,7 @@ && hasNestedAggregations( (LogicalAggregate) alg ) ) { select = subSelect(); } else { if ( explicitColumnNames && alg.getInputs().size() == 1 && alg.getInput( 0 ) instanceof JdbcScan ) { - select = asSelect( ((JdbcEntity) ((AlgOptEntityImpl) alg.getInput( 0 ).getTable()).getEntity()).getNodeList() ); + select = asSelect( ((JdbcEntity) ((AlgOptEntityImpl) alg.getInput( 0 ).getEntity()).getEntity()).getNodeList() ); } else { select = asSelect(); } diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java index 109ce8edb5..d24d61616e 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java @@ -170,7 +170,7 @@ private MockJdbcTableRule( JdbcConvention out ) { @Override public AlgNode convert( AlgNode alg ) { final EnumerableScan scan = (EnumerableScan) alg; - return new PlannerTest.MockJdbcScan( scan.getCluster(), scan.getTable(), (JdbcConvention) getOutConvention() ); + return new PlannerTest.MockJdbcScan( scan.getCluster(), scan.getEntity(), (JdbcConvention) getOutConvention() ); } } diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index 6cdd9bea5c..9ad2aafa72 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -359,7 +359,7 @@ public void testKey() throws GenericCatalogException { assertTrue( catalog.getPrimaryKey( catalog.getTable( tableId ).primaryKey ).columnIds.contains( columnId1 ) ); //catalog.deletePrimaryKey( tableId ); - //assertNull( catalog.getTable( tableId ).primaryKey ); + //assertNull( catalog.getEntity( tableId ).primaryKey ); catalog.addPrimaryKey( tableId, Arrays.asList( columnId1, columnId2 ) ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java index 3d05eceac8..cc0a439ffa 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java @@ -54,6 +54,7 @@ import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; import org.polypheny.db.util.Pair; @@ -143,9 +144,9 @@ public void setStaticRowType( AlgRecordType staticRowType ) { public String getPhysicalName( String name ) { - int index = mongoEntity.getCatalogTable().getColumnNames().indexOf( name ); + int index = mongoEntity.getCatalogEntity().unwrap( CatalogTable.class ).getColumnNames().indexOf( name ); if ( index != -1 ) { - return MongoStore.getPhysicalColumnName( name, mongoEntity.getCatalogTable().fieldIds.get( index ) ); + return MongoStore.getPhysicalColumnName( name, mongoEntity.getCatalogEntity().unwrap( CatalogTable.class ).fieldIds.get( index ) ); } throw new RuntimeException( "This column is not part of the table." ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java index 402472f1d8..dfd84d9384 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java @@ -914,11 +914,11 @@ public AlgNode visit( AlgNode other ) { @Override public AlgNode convert( AlgNode alg ) { final Modify modify = (Modify) alg; - final ModifiableEntity modifiableTable = modify.getTable().unwrap( ModifiableEntity.class ); + final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; } - if ( modify.getTable().unwrap( MongoEntity.class ) == null ) { + if ( modify.getEntity().unwrap( MongoEntity.class ) == null ) { return null; } @@ -926,7 +926,7 @@ public AlgNode convert( AlgNode alg ) { return new MongoEntityModify( modify.getCluster(), traitSet, - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), AlgOptRule.convert( modify.getInput(), traitSet ), modify.getOperation(), @@ -1008,7 +1008,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new MongoEntityModify( getCluster(), traitSet, - getTable(), + getEntity(), getCatalogReader(), AbstractAlgNode.sole( inputs ), getOperation(), @@ -1021,7 +1021,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { @Override public void implement( Implementor implementor ) { implementor.setDML( true ); - Entity preEntity = table.getTable(); + Entity preEntity = table.getEntity(); this.implementor = implementor; if ( !(preEntity instanceof MongoEntity) ) { @@ -1302,10 +1302,10 @@ private void handlePreparedInsert( Implementor implementor, MongoProject input ) } BsonDocument doc = new BsonDocument(); - CatalogTable catalogTable = implementor.mongoEntity.getCatalogTable(); + CatalogTable catalogTable = implementor.mongoEntity.getCatalogEntity().unwrap( CatalogTable.class ); GridFSBucket bucket = implementor.mongoEntity.getMongoSchema().getBucket(); //noinspection AssertWithSideEffects - assert input.getRowType().getFieldCount() == this.getTable().getRowType().getFieldCount(); + assert input.getRowType().getFieldCount() == this.getEntity().getRowType().getFieldCount(); Map physicalMapping; if ( input.getInput() instanceof MongoValues ) { physicalMapping = getPhysicalMap( input.getRowType().getFieldList(), catalogTable ); @@ -1326,7 +1326,7 @@ private void handlePreparedInsert( Implementor implementor, MongoProject input ) doc.append( getPhysicalName( input, catalogTable, pos ), BsonUtil.getAsBson( (RexLiteral) rexNode, bucket ) ); } else if ( rexNode instanceof RexCall ) { PolyType type = table - .getTable() + .getEntity() .getRowType( getCluster().getTypeFactory() ) .getFieldList() .get( pos ) @@ -1395,7 +1395,7 @@ private BsonValue getBsonArray( RexCall el, PolyType type, GridFSBucket bucket ) private void handleDirectInsert( Implementor implementor, MongoValues values ) { List docs = new ArrayList<>(); - CatalogTable catalogTable = implementor.mongoEntity.getCatalogTable(); + CatalogTable catalogTable = implementor.mongoEntity.getCatalogEntity().unwrap( CatalogTable.class ); GridFSBucket bucket = implementor.mongoEntity.getMongoSchema().getBucket(); AlgDataType valRowType = rowType; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 21c217d356..64d54c5ab8 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -281,7 +281,7 @@ public AlgRoot convert( MqlCollectionStatement query ) { AlgNode node; - if ( entity.getTable().getNamespaceType() == NamespaceType.RELATIONAL ) { + if ( entity.getCatalogEntity().namespaceType == NamespaceType.RELATIONAL ) { _dataExists = false; } @@ -333,9 +333,9 @@ private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaNam PreparingEntity table = catalogReader.getTable( names ); - if ( table == null ) { + if ( table == null || table.getEntity() == null ) { return catalogReader.getCollection( names ); - } else if ( table.getTable().getNamespaceType() == NamespaceType.GRAPH ) { + } else if ( table.getCatalogEntity().namespaceType == NamespaceType.GRAPH ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java index 91fdbced41..f2f5c49c5e 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java @@ -111,7 +111,7 @@ public void addAll( List statements ) { public void setTable( AlgOptEntity table ) { this.table = table; - this.entity = (NeoEntity) table.getTable(); + this.entity = (NeoEntity) table.getEntity(); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/NeoRules.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/NeoRules.java index 7413d06105..60aeae6601 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/NeoRules.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/NeoRules.java @@ -81,7 +81,7 @@ public AlgNode convert( AlgNode alg ) { return new NeoModify( modify.getCluster(), modify.getTraitSet().replace( NeoConvention.INSTANCE ), - modify.getTable(), + modify.getEntity(), modify.getCatalogReader(), convert( modify.getInput(), NeoConvention.INSTANCE ), modify.getOperation(), diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java index 16793a3353..4025eb9d5e 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java @@ -54,8 +54,8 @@ public NeoModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity tabl @Override public void implement( NeoRelationalImplementor implementor ) { - assert getTable() != null; - implementor.setTable( getTable() ); + assert getEntity() != null; + implementor.setTable( getEntity() ); implementor.setDml( true ); implementor.visitChild( 0, getInput() ); diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java index 4de637c958..ae33d7a479 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java @@ -51,7 +51,7 @@ public NeoScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, @Override public void implement( NeoRelationalImplementor implementor ) { - if ( implementor.getTable() != null && !Objects.equals( table.getTable().getId(), implementor.getTable().getTable().getId() ) ) { + if ( implementor.getTable() != null && !Objects.equals( table.getEntity().getId(), implementor.getTable().getEntity().getId() ) ) { handleInsertFromOther( implementor ); return; } @@ -73,13 +73,13 @@ public void implement( NeoRelationalImplementor implementor ) { private void handleInsertFromOther( NeoRelationalImplementor implementor ) { - implementor.selectFromTable = (NeoEntity) table.getTable(); + implementor.selectFromTable = (NeoEntity) table.getEntity(); } @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new NeoScan( getCluster(), traitSet, getTable(), neoEntity ); + return new NeoScan( getCluster(), traitSet, this.getEntity(), neoEntity ); } } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java index 17556c53d1..a61397440f 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java @@ -96,8 +96,8 @@ private String getPigAggregateStatement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptEntity getTable() { - return getInput().getTable(); + public AlgOptEntity getEntity() { + return getInput().getEntity(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlg.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlg.java index 2e230f97fb..794cfc0818 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlg.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlg.java @@ -74,7 +74,7 @@ class Implementor { public String getTableName( AlgNode input ) { - return input.getTable().getCatalogEntity().name; + return input.getEntity().getCatalogEntity().name; } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java index ce335eb42d..62ef4111ab 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java @@ -83,8 +83,8 @@ public void implement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptEntity getTable() { - return getInput().getTable(); + public AlgOptEntity getEntity() { + return getInput().getEntity(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java index 1d50debc17..6f527cd310 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java @@ -82,8 +82,8 @@ public void implement( Implementor implementor ) { * The Pig alias of the joined relation will have the same name as one from the left side of the join. */ @Override - public AlgOptEntity getTable() { - return getLeft().getTable(); + public AlgOptEntity getEntity() { + return getLeft().getEntity(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java index 35a4bdc323..cb72573c14 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java @@ -66,7 +66,7 @@ public Project copy( AlgTraitSet traitSet, AlgNode input, List projects @Override public void implement( Implementor implementor ) { - System.out.println( getTable() ); + System.out.println( getEntity() ); } @@ -74,8 +74,8 @@ public void implement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptEntity getTable() { - return getInput().getTable(); + public AlgOptEntity getEntity() { + return getInput().getEntity(); } } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigRules.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigRules.java index 69c31cc524..cc024b072f 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigRules.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigRules.java @@ -106,7 +106,7 @@ private PigScanRule() { public AlgNode convert( AlgNode alg ) { final LogicalRelScan scan = (LogicalRelScan) alg; final AlgTraitSet traitSet = scan.getTraitSet().replace( PigAlg.CONVENTION ); - return new PigScan( alg.getCluster(), traitSet, scan.getTable() ); + return new PigScan( alg.getCluster(), traitSet, scan.getEntity() ); } } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java index 7f2a10ace6..b32c85a40f 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java @@ -74,14 +74,14 @@ public void implement( Implementor implementor ) { private PigEntity getPigTable( String name ) { - final PolyphenyDbSchema schema = getTable().unwrap( PolyphenyDbSchema.class ); + final PolyphenyDbSchema schema = this.getEntity().unwrap( PolyphenyDbSchema.class ); return (PigEntity) schema.getTable( name ).getTable(); } private String getSchemaForPigStatement( Implementor implementor ) { - final List fieldNamesAndTypes = new ArrayList<>( getTable().getRowType().getFieldList().size() ); - for ( AlgDataTypeField f : getTable().getRowType().getFieldList() ) { + final List fieldNamesAndTypes = new ArrayList<>( this.getEntity().getRowType().getFieldList().size() ); + for ( AlgDataTypeField f : this.getEntity().getRowType().getFieldList() ) { fieldNamesAndTypes.add( getConcatenatedFieldNameAndTypeForPigSchema( implementor, f ) ); } return String.join( ", ", fieldNamesAndTypes ); diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java index d7f3e474b6..94327885b2 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java @@ -204,7 +204,7 @@ String getAlias() { } else { AlgNode top = peek(); if ( top instanceof Scan ) { - return top.getTable().getCatalogEntity().name; + return top.getEntity().getCatalogEntity().name; } else { return null; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 08efa10ed4..6dd7b0934d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -209,7 +209,7 @@ public void execute( Context context, Statement statement, QueryParameters param long schemaId; try { - // Cannot use getTable() here since table does not yet exist + // Cannot use getEntity() here since table does not yet exist if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName schemaId = catalog.getSchema( name.names.get( 0 ), name.names.get( 1 ) ).id; tableName = name.names.get( 2 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index fc0adf0ca5..9f188b62ae 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -155,7 +155,7 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L if ( table2 == null ) { final AlgOptSchema algOptSchema = validator.catalogReader.unwrap( AlgOptSchema.class ); final AlgDataType rowType = entity.getRowType( validator.typeFactory ); - table2 = AlgOptEntityImpl.create( algOptSchema, rowType, entry, entity.getCatalogTable(), entity.getPartitionPlacement(), null ); + table2 = AlgOptEntityImpl.create( algOptSchema, rowType, entry, entity.getCatalogEntity(), entity.getPartitionPlacement(), null ); } namespace = new TableNamespace( validator, table2 ); resolved.found( namespace, false, null, path, remainingNames ); diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java index aa0c02f122..63ce37ccf6 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java @@ -65,7 +65,7 @@ public static void foo( AlgNode alg ) { alg.accept( new AlgShuttleImpl() { @Override public AlgNode visit( Scan scan ) { - schemas[0] = scan.getTable().getRelOptSchema(); + schemas[0] = scan.getEntity().getRelOptSchema(); return super.visit( scan ); } } ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 7e969738f7..e8268e0775 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -261,7 +261,7 @@ private static Result getDocResult( Statement statement, QueryRequest request, S header.add( new FieldDef() ); - return new Result( header.toArray( new FieldDef[0] ), data.toArray( new String[0][] ) ) + return new Result( header.toArray( new FieldDef[0] ), data.stream().map( d -> d.stream().map( Object::toString ).toArray( String[]::new ) ).toArray( String[][]::new ) ) .setNamespaceType( implementation.getNamespaceType() ) .setNamespaceName( request.database ) .setLanguage( QueryLanguage.from( "mql" ) ) From 506e1f15a41ee2a1f0be9edf2e388561a6f18c88 Mon Sep 17 00:00:00 2001 From: datomo Date: Thu, 23 Feb 2023 15:06:07 +0100 Subject: [PATCH 019/436] PolySchemaBuilder to CatalogSnapshot --- .../common/LogicalConstraintEnforcer.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 11 +- .../entity/CatalogCollectionPlacement.java | 3 +- .../entity/CatalogColumnPlacement.java | 3 + .../db/catalog/entity/CatalogEntity.java | 18 + .../entity/CatalogEntityPlacement.java | 31 + .../catalog/entity/CatalogGraphPlacement.java | 3 +- .../entity/CatalogPartitionPlacement.java | 6 +- .../org/polypheny/db/plan/AlgOptEntity.java | 4 +- .../db/prepare/AlgOptEntityImpl.java | 63 +- .../db/prepare/LixToAlgTranslator.java | 8 +- .../db/prepare/PolyphenyDbCatalogReader.java | 31 +- .../db/schema/AbstractPolyphenyDbSchema.java | 596 +----------------- .../db/schema/PolySchemaBuilder.java | 342 ++++++++++ .../db/schema/PolyphenyDbSchema.java | 238 +------ .../db/schema/SimplePolyphenyDbSchema.java | 198 +----- .../org/polypheny/db/util/BuiltInMethod.java | 2 +- .../java/org/polypheny/db/util/Triple.java | 60 ++ .../org/polypheny/db/catalog/MockCatalog.java | 5 +- .../db/catalog/MockCatalogReader.java | 12 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 20 +- .../db/partition/FrequencyMapImpl.java | 4 +- .../db/processing/AbstractQueryProcessor.java | 2 +- .../db/schema/PolySchemaBuilder.java | 444 ------------- .../db/view/MaterializedViewManagerImpl.java | 2 +- .../cottontail/algebra/CottontailScan.java | 2 +- .../org/polypheny/db/test/DruidAdapterIT.java | 2 +- .../polypheny/db/test/DruidAdapterIT2.java | 2 +- .../adapter/elasticsearch/ScrollingTest.java | 2 +- .../org/polypheny/db/catalog/CatalogImpl.java | 13 +- .../org/polypheny/db/test/CatalogTest.java | 2 +- .../db/sql/language/ddl/SqlCreateTable.java | 2 +- .../language/validate/SqlValidatorImpl.java | 53 +- 33 files changed, 639 insertions(+), 1547 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntityPlacement.java create mode 100644 core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java create mode 100644 core/src/main/java/org/polypheny/db/util/Triple.java delete mode 100644 dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index d9b5bbd00b..e0906cff51 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -258,7 +258,7 @@ public static EnforcementInformation getControl( CatalogTable table, Statement s //builder.scan( table.getSchemaName(), table.name ); for ( CatalogConstraint constraint : constraints ) { builder.clear(); - builder.scan( table.getNamespaceName(), table.name );//LogicalTableScan.create( modify.getCluster(), modify.getEntity() ); + builder.scan( table.getNamespaceName(), table.name );//LogicalTableScan.create( modify.getCluster(), modify.getTable() ); // Enforce uniqueness between the already existing values and the new values List keys = constraint.key .getColumnNames() diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 644086c921..29db1ded33 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -1551,6 +1551,7 @@ protected final boolean isValidIdentifier( final String str ) { /** * Adds a placement for a partition. * + * @param namespaceId * @param adapterId The adapter on which the table should be placed on * @param tableId The table for which a partition placement shall be created * @param partitionId The id of a specific partition that shall create a new placement @@ -1558,7 +1559,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param physicalSchemaName The schema name on the adapter * @param physicalTableName The table name on the adapter */ - public abstract void addPartitionPlacement( int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); + public abstract void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); /** * Adds a new DataPlacement for a given table on a specific store @@ -1846,12 +1847,13 @@ protected final boolean isValidIdentifier( final String str ) { /** * Adds a new placement for a given collection. * + * @param namespaceId * @param adapterId The id of the adapter on which the placement is added * @param collectionId The id of the collection for which the placement is added * @param placementType The type of placement * @return The id of the newly added placement */ - public abstract long addCollectionPlacement( int adapterId, long collectionId, PlacementType placementType ); + public abstract long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ); /** * Get the mapping for the collection, which points to the substitution entities in other data models. @@ -2344,6 +2346,11 @@ public Pattern toLowerCase() { } + public static Pattern of( String pattern ) { + return new Pattern( pattern ); + } + + public String toRegex() { return pattern.replace( "_", "(.)" ).replace( "%", "(.*)" ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java index 4bfc2c17ee..371de7a650 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java @@ -20,7 +20,7 @@ import javax.annotation.Nullable; -public class CatalogCollectionPlacement implements CatalogObject { +public class CatalogCollectionPlacement extends CatalogEntityPlacement { private static final long serialVersionUID = 4227137255905904785L; @@ -32,6 +32,7 @@ public class CatalogCollectionPlacement implements CatalogObject { public CatalogCollectionPlacement( int adapterId, long collectionId, @Nullable String physicalName, String physicalNamespaceName, long id ) { + super(); this.adapter = adapterId; this.collectionId = collectionId; this.physicalName = physicalName; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java index aba5dd116d..bb1af8813e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java @@ -29,6 +29,7 @@ public class CatalogColumnPlacement implements CatalogObject { private static final long serialVersionUID = -1909757888176291095L; + public final long namespaceId; public final long tableId; public final long columnId; public final int adapterId; @@ -42,6 +43,7 @@ public class CatalogColumnPlacement implements CatalogObject { public CatalogColumnPlacement( + final long namespaceId, final long tableId, final long columnId, final int adapterId, @@ -50,6 +52,7 @@ public CatalogColumnPlacement( final String physicalSchemaName, final String physicalColumnName, final long physicalPosition ) { + this.namespaceId = namespaceId; this.tableId = tableId; this.columnId = columnId; this.adapterId = adapterId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index f075f7c178..f204ff11e7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.entity; import java.io.Serializable; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.schema.Wrapper; @@ -36,4 +37,21 @@ protected CatalogEntity( long id, String name, EntityType type, NamespaceType na this.namespaceType = namespaceType; } + + public AlgDataType getRowType() { + return null; + } + + + @Deprecated + public boolean rolledUpColumnValidInsideAgg() { + return true; + } + + + @Deprecated + public boolean isRolledUp( String fieldName ) { + return false; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntityPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntityPlacement.java new file mode 100644 index 0000000000..f0102fd8aa --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntityPlacement.java @@ -0,0 +1,31 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity; + +import java.io.Serializable; +import lombok.AllArgsConstructor; +import org.polypheny.db.schema.Wrapper; + +@AllArgsConstructor +public abstract class CatalogEntityPlacement implements CatalogObject, Serializable, Wrapper { + + public final Long namespaceId; + public final Long adapterId; + public final Long entityId; + + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java index d265d5276b..49620e3dc8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java @@ -20,7 +20,7 @@ import javax.annotation.Nullable; -public class CatalogGraphPlacement implements CatalogObject { +public class CatalogGraphPlacement extends CatalogEntityPlacement { private static final long serialVersionUID = 5889825050034392549L; @@ -31,6 +31,7 @@ public class CatalogGraphPlacement implements CatalogObject { public CatalogGraphPlacement( int adapterId, long graphId, @Nullable String physicalName, long partitionId ) { + super( graphId, (long) adapterId, graphId ); this.adapterId = adapterId; this.graphId = graphId; this.physicalName = physicalName; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionPlacement.java index e0bbbde804..37bb908fb5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionPlacement.java @@ -26,7 +26,7 @@ /** * This class is considered the logical representation of a physical table on a specific store. */ -public class CatalogPartitionPlacement implements CatalogObject { +public class CatalogPartitionPlacement extends CatalogEntityPlacement { private static final long serialVersionUID = 8835793248417591036L; @@ -52,9 +52,11 @@ public class CatalogPartitionPlacement implements CatalogObject { // A DataPlacement can directly forbid that any Placements within this DataPlacement container can get outdated. // Therefore, the role at the DataPlacement specifies if underlying placements can even be outdated.s public final DataPlacementRole role; + public final long namespaceId; public CatalogPartitionPlacement( + long namespaceId, final long tableId, final int adapterId, @NonNull final String adapterUniqueName, @@ -63,6 +65,8 @@ public CatalogPartitionPlacement( final String physicalTableName, final long partitionId, DataPlacementRole role ) { + super( namespaceId, (long) adapterId, tableId ); + this.namespaceId = namespaceId; this.tableId = tableId; this.adapterId = adapterId; this.adapterUniqueName = adapterUniqueName; diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java index bb2dbbe89d..f33ea81a62 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptEntity.java @@ -45,7 +45,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogEntityPlacement; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Wrapper; @@ -137,7 +137,7 @@ default Entity getEntity() { CatalogEntity getCatalogEntity(); - CatalogPartitionPlacement getPartitionPlacement(); + CatalogEntityPlacement getPartitionPlacement(); /** diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index 5b8d586fb2..14e775a554 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -38,10 +38,11 @@ import java.util.AbstractList; import java.util.List; import java.util.Objects; -import java.util.function.Function; import javax.annotation.Nullable; import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.StatisticsManager; import org.polypheny.db.adapter.enumerable.EnumerableScan; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgDistribution; @@ -57,6 +58,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactoryImpl; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; @@ -74,7 +76,6 @@ import org.polypheny.db.schema.ProjectableFilterableEntity; import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.ScannableEntity; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.schema.TranslatableEntity; @@ -100,8 +101,6 @@ public class AlgOptEntityImpl extends AbstractPreparingEntity { @Getter @Nullable private final CatalogEntity catalogEntity; - @Nullable - private final transient Function, Expression> expressionFunction; /** * Estimate for the row count, or null. @@ -120,33 +119,30 @@ private AlgOptEntityImpl( @Nullable Entity entity, @Nullable CatalogEntity catalogEntity, @Nullable CatalogPartitionPlacement placement, - @Nullable Function, Expression> expressionFunction, @Nullable Double rowCount ) { this.schema = schema; this.rowType = Objects.requireNonNull( rowType ); this.entity = entity; this.partitionPlacement = placement; this.catalogEntity = catalogEntity; - this.expressionFunction = expressionFunction; this.rowCount = rowCount; } - public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, Expression expression ) { - return new AlgOptEntityImpl( schema, rowType, null, null, null, c -> expression, null ); + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType ) { + return new AlgOptEntityImpl( schema, rowType, null, null, null, null ); } - public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, final PolyphenyDbSchema.TableEntry tableEntry, CatalogEntity catalogEntity, CatalogPartitionPlacement placement, Double count ) { - final Entity entity = tableEntry.getTable(); + public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, CatalogEntity catalogEntity, CatalogPartitionPlacement placement, Double count ) { Double rowCount; if ( count == null ) { - rowCount = entity.getStatistic().getRowCount(); + rowCount = Double.valueOf( StatisticsManager.getInstance().rowCountPerTable( catalogEntity.id ) ); } else { rowCount = count; } - return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, placement, getClassExpressionFunction( tableEntry, entity ), rowCount ); + return new AlgOptEntityImpl( schema, rowType, null, catalogEntity, placement, rowCount ); } @@ -154,30 +150,7 @@ public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, * Creates a copy of this RelOptTable. The new RelOptTable will have newRowType. */ public AlgOptEntityImpl copy( AlgDataType newRowType ) { - return new AlgOptEntityImpl( this.schema, newRowType, this.entity, this.catalogEntity, this.partitionPlacement, this.expressionFunction, this.rowCount ); - } - - - private static Function, Expression> getClassExpressionFunction( PolyphenyDbSchema.TableEntry tableEntry, Entity entity ) { - return getClassExpressionFunction( tableEntry.schema.plus(), tableEntry.name, entity ); - } - - - private static Function, Expression> getClassExpressionFunction( final SchemaPlus schema, final String tableName, final Entity entity ) { - if ( entity instanceof QueryableEntity ) { - final QueryableEntity queryableTable = (QueryableEntity) entity; - return clazz -> queryableTable.getExpression( schema, tableName, clazz ); - } else if ( entity instanceof ScannableEntity - || entity instanceof FilterableEntity - || entity instanceof ProjectableFilterableEntity ) { - return clazz -> Schemas.tableExpression( schema, Object[].class, tableName, entity.getClass() ); - } else if ( entity instanceof StreamableEntity ) { - return getClassExpressionFunction( schema, tableName, ((StreamableEntity) entity).stream() ); - } else { - return input -> { - throw new UnsupportedOperationException(); - }; - } + return new AlgOptEntityImpl( this.schema, newRowType, this.entity, this.catalogEntity, this.partitionPlacement, this.rowCount ); } @@ -185,7 +158,7 @@ public static AlgOptEntityImpl create( AlgOptSchema schema, AlgDataType rowType, assert entity instanceof TranslatableEntity || entity instanceof ScannableEntity || entity instanceof ModifiableEntity; - return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, placement, null, null ); + return new AlgOptEntityImpl( schema, rowType, entity, catalogEntity, placement, null ); } @@ -212,10 +185,20 @@ public T unwrap( Class clazz ) { @Override public Expression getExpression( Class clazz ) { - if ( expressionFunction == null ) { - return null; + if ( partitionPlacement != null ) { + return Expressions.call( + Expressions.call( Catalog.class, "getInstance" ), + "getPartitionPlacement", + Expressions.constant( partitionPlacement.adapterId ), + Expressions.constant( partitionPlacement.partitionId ) ); + } else if ( catalogEntity != null ) { + return Expressions.call( + Expressions.call( Catalog.class, "getInstance" ), + "getTable", + Expressions.constant( catalogEntity.id ) ); } - return expressionFunction.apply( clazz ); + + return null; } diff --git a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java index 2419c228a4..a910cbd4b0 100644 --- a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java +++ b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java @@ -112,16 +112,16 @@ public AlgNode translate( Expression expression ) { cluster, AlgOptEntityImpl.create( null, - typeFactory.createJavaType( Types.toClass( Types.getElementType( call.targetExpression.getType() ) ) ), - call.targetExpression ) ); + typeFactory.createJavaType( Types.toClass( Types.getElementType( call.targetExpression.getType() ) ) ) + ) ); case SCHEMA_GET_TABLE: return LogicalRelScan.create( cluster, AlgOptEntityImpl.create( null, - typeFactory.createJavaType( (Class) ((ConstantExpression) call.expressions.get( 1 )).value ), - call.targetExpression ) ); + typeFactory.createJavaType( (Class) ((ConstantExpression) call.expressions.get( 1 )).value ) + ) ); default: throw new UnsupportedOperationException( "unknown method " + call.method ); diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index 9e4b0526b2..2fd4b636e7 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -45,11 +45,12 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.PreparingEntity; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.schema.graph.Graph; @@ -79,30 +80,18 @@ public PolyphenyDbCatalogReader( PolyphenyDbSchema rootSchema, List defa @Override public PreparingEntity getTable( final List names ) { // First look in the default schema, if any. If not found, look in the root schema. - PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); - if ( entry != null ) { - final Entity entity = entry.getTable(); - - if ( entity instanceof Wrapper ) { - final PreparingEntity algOptTable = ((Wrapper) entity).unwrap( PreparingEntity.class ); - if ( algOptTable != null ) { - return algOptTable; - } - } - return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, entity.getCatalogEntity(), entity.getPartitionPlacement(), null ); - } - return null; + CatalogEntity entity = rootSchema.getTable( names ); + return AlgOptEntityImpl.create( this, entity.getRowType(), entity, null, null ); + } @Override public AlgOptEntity getCollection( final List names ) { // First look in the default schema, if any. If not found, look in the root schema. - PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( this, names ); - if ( entry != null ) { - final Entity entity = entry.getTable(); - - return AlgOptEntityImpl.create( this, entity.getRowType( typeFactory ), entry, entity.getCatalogEntity(), entity.getPartitionPlacement(), null ); + CatalogCollection collection = rootSchema.getCollection( names ); + if ( collection != null ) { + return AlgOptEntityImpl.create( this, collection.getRowType(), collection, null, null ); } return null; } @@ -135,9 +124,9 @@ public List getAllSchemaObjectNames( List names ) { final List result = new ArrayList<>(); // Add root schema if not anonymous - if ( !schema.getName().equals( "" ) ) { + /*if ( !schema.getName().equals( "" ) ) { result.add( moniker( schema, null, MonikerType.SCHEMA ) ); - } + }*/ final Map schemaMap = schema.getSubSchemaMap(); diff --git a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java index ec98e6fe32..e70b3f5261 100644 --- a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java @@ -34,35 +34,13 @@ package org.polypheny.db.schema; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSortedMap; -import com.google.common.collect.ImmutableSortedSet; -import com.google.common.collect.Lists; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; import java.util.Map; -import java.util.NavigableMap; -import java.util.NavigableSet; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; +import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; -import lombok.Setter; -import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; -import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; -import org.polypheny.db.util.NameMap; -import org.polypheny.db.util.NameMultimap; -import org.polypheny.db.util.NameSet; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogEntityPlacement; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; /** @@ -70,145 +48,43 @@ * * Wrapper around user-defined schema used internally. */ +@Getter public abstract class AbstractPolyphenyDbSchema implements PolyphenyDbSchema { - @Getter - private final AbstractPolyphenyDbSchema parent; - @Getter - private final boolean caseSensitive; - @Getter - @Setter - public Namespace namespace; - @Getter - public final String name; - @Getter - public final NamespaceType namespaceType; - /** - * Tables explicitly defined in this schema. Does not include tables in {@link #namespace}. - */ - @Getter - protected final NameMap tableMap; - protected final NameMultimap functionMap; - protected final NameMap typeMap; - protected final NameSet functionNames; - protected final NameMap nullaryFunctionMap; - protected transient final NameMap subSchemaMap; - private List> path; + private final ConcurrentHashMap, CatalogEntity> logicalRelational; + private final ConcurrentHashMap, CatalogEntity> logicalDocument; + private final ConcurrentHashMap, CatalogEntity> logicalGraph; + private final ConcurrentHashMap, CatalogEntityPlacement> physicalRelational; + private final ConcurrentHashMap, CatalogEntityPlacement> physicalDocument; + private final ConcurrentHashMap, CatalogEntityPlacement> physicalGraph; + + public AbstractPolyphenyDbSchema( + Map, CatalogEntity> logicalRelational, + Map, CatalogEntity> logicalDocument, + Map, CatalogEntity> logicalGraph, + Map, CatalogEntityPlacement> physicalRelational, + Map, CatalogEntityPlacement> physicalDocument, + Map, CatalogEntityPlacement> physicalGraph ) { + this.logicalRelational = new ConcurrentHashMap<>( logicalRelational ); + this.logicalDocument = new ConcurrentHashMap<>( logicalDocument ); + this.logicalGraph = new ConcurrentHashMap<>( logicalGraph ); + this.physicalRelational = new ConcurrentHashMap<>( physicalRelational ); + this.physicalDocument = new ConcurrentHashMap<>( physicalDocument ); + this.physicalGraph = new ConcurrentHashMap<>( physicalGraph ); - protected AbstractPolyphenyDbSchema( - AbstractPolyphenyDbSchema parent, - Namespace namespace, - String name, - NamespaceType type, - boolean caseSensitive, - NameMap subSchemaMap, - NameMap tableMap, - NameMap typeMap, - NameMultimap functionMap, - NameSet functionNames, - NameMap nullaryFunctionMap, - List> path ) { - this.parent = parent; - this.namespace = namespace; - this.name = name; - this.namespaceType = type; - if ( tableMap == null ) { - this.tableMap = new NameMap<>(); - } else { - this.tableMap = Objects.requireNonNull( tableMap ); - } - if ( subSchemaMap == null ) { - this.subSchemaMap = new NameMap<>(); - } else { - this.subSchemaMap = Objects.requireNonNull( subSchemaMap ); - } - if ( functionMap == null ) { - this.functionMap = new NameMultimap<>(); - this.functionNames = new NameSet(); - this.nullaryFunctionMap = new NameMap<>(); - } else { - // If you specify functionMap, you must also specify functionNames and nullaryFunctionMap. - this.functionMap = Objects.requireNonNull( functionMap ); - this.functionNames = Objects.requireNonNull( functionNames ); - this.nullaryFunctionMap = Objects.requireNonNull( nullaryFunctionMap ); - } - if ( typeMap == null ) { - this.typeMap = new NameMap<>(); - } else { - this.typeMap = Objects.requireNonNull( typeMap ); - } - this.path = path; - this.caseSensitive = caseSensitive; } /** * Creates a root schema. - * - * @param name Schema name */ - public static PolyphenyDbSchema createRootSchema( String name ) { - final Namespace namespace = new RootSchema(); - return new SimplePolyphenyDbSchema( null, namespace, name, NamespaceType.getDefault(), false ); + public static PolyphenyDbSchema createRootSchema() { + return PolySchemaBuilder.getInstance().getCurrent(); } - /** - * Returns a sub-schema with a given name that is defined implicitly (that is, by the underlying {@link Namespace} object, - * not explicitly by a call to {@link #add(String, Namespace, NamespaceType)}), or null. - */ - protected abstract PolyphenyDbSchema getImplicitSubSchema( String schemaName, boolean caseSensitive ); - - /** - * Returns a table with a given name that is defined implicitly (that is, by the underlying {@link Namespace} object, - * not explicitly by a call to {@link #add(String, Entity)}), or null. - */ - protected abstract TableEntry getImplicitTable( String tableName ); - - /** - * Returns a type with a given name that is defined implicitly (that is, by the underlying {@link Namespace} object, - * not explicitly by a call to {@link #add(String, AlgProtoDataType)}), or null. - */ - protected abstract TypeEntry getImplicitType( String name, boolean caseSensitive ); - - /** - * Returns table function with a given name and zero arguments that is defined implicitly (that is, by the underlying - * {@link Namespace} object, not explicitly by a call to {@link #add(String, Function)}), or null. - */ - protected abstract TableEntry getImplicitTableBasedOnNullaryFunction( String tableName, boolean caseSensitive ); - - /** - * Adds implicit sub-schemas to a builder. - */ - protected abstract void addImplicitSubSchemaToBuilder( ImmutableSortedMap.Builder builder ); - - /** - * Adds implicit tables to a builder. - */ - protected abstract void addImplicitTableToBuilder( ImmutableSortedSet.Builder builder ); - - /** - * Adds implicit functions to a builder. - */ - protected abstract void addImplicitFunctionsToBuilder( ImmutableList.Builder builder, String name, boolean caseSensitive ); - - /** - * Adds implicit function names to a builder. - */ - protected abstract void addImplicitFuncNamesToBuilder( ImmutableSortedSet.Builder builder ); - - /** - * Adds implicit type names to a builder. - */ - protected abstract void addImplicitTypeNamesToBuilder( ImmutableSortedSet.Builder builder ); - - /** - * Adds implicit table functions to a builder. - */ - protected abstract void addImplicitTablesBasedOnNullaryFunctionsToBuilder( ImmutableSortedMap.Builder builder ); - /** * Returns a snapshot representation of this PolyphenyDbSchema. */ @@ -217,422 +93,4 @@ public static PolyphenyDbSchema createRootSchema( String name ) { protected abstract boolean isCacheEnabled(); - /** - * Creates a TableEntryImpl with no SQLs. - */ - protected TableEntryImpl tableEntry( String name, Entity entity ) { - return new TableEntryImpl( this, name, entity ); - } - - - /** - * Creates a TableEntryImpl with no SQLs. - */ - protected TypeEntryImpl typeEntry( String name, AlgProtoDataType algProtoDataType ) { - return new TypeEntryImpl( this, name, algProtoDataType ); - } - - - /** - * Defines a table within this schema. - */ - @Override - public TableEntry add( String tableName, Entity entity ) { - final TableEntryImpl entry = new TableEntryImpl( this, tableName, entity ); - tableMap.put( tableName, entry ); - return entry; - } - - - /** - * Defines a type within this schema. - */ - @Override - public TypeEntry add( String name, AlgProtoDataType type ) { - final TypeEntry entry = new TypeEntryImpl( this, name, type ); - typeMap.put( name, entry ); - return entry; - } - - - private FunctionEntry add( String name, Function function ) { - final FunctionEntryImpl entry = new FunctionEntryImpl( this, name, function ); - functionMap.put( name, entry ); - functionNames.add( name ); - if ( function.getParameters().isEmpty() ) { - nullaryFunctionMap.put( name, entry ); - } - return entry; - } - - - @Override - public AbstractPolyphenyDbSchema root() { - for ( AbstractPolyphenyDbSchema schema = this; ; ) { - if ( schema.parent == null ) { - return schema; - } - schema = schema.parent; - } - } - - - /** - * Returns whether this is a root schema. - */ - @Override - public boolean isRoot() { - return parent == null; - } - - - /** - * Returns the path of an object in this schema. - */ - @Override - public List path( String name ) { - final List list = new ArrayList<>(); - if ( name != null ) { - list.add( name ); - } - for ( AbstractPolyphenyDbSchema s = this; s != null; s = s.parent ) { - if ( s.parent != null || !s.name.equals( "" ) ) { - // Omit the root schema's name from the path if it's the empty string, which it usually is. - list.add( s.name ); - } - } - return ImmutableList.copyOf( Lists.reverse( list ) ); - } - - - @Override - public final PolyphenyDbSchema getSubNamespace( String namespaceName, boolean caseSensitive ) { - // Check explicit schemas. - for ( Map.Entry entry : subSchemaMap.range( namespaceName, caseSensitive ).entrySet() ) { - return entry.getValue(); - } - return getImplicitSubSchema( namespaceName, caseSensitive ); - } - - - /** - * Returns a table with the given name. Does not look for views. - */ - @Override - public final TableEntry getTable( String originalTableName ) { - String tableName = caseSensitive ? originalTableName : originalTableName.toLowerCase(); - // Check explicit tables. - for ( Map.Entry entry : tableMap.range( tableName, caseSensitive ).entrySet() ) { - return entry.getValue(); - } - TableEntry table = getImplicitTable( originalTableName ); - - if ( table != null ) { - return table; - } else if ( namespaceType == NamespaceType.GRAPH ) { - // label table for cross model queries - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - - final Builder fieldInfo = typeFactory.builder(); - fieldInfo.add( new AlgDataTypeFieldImpl( "id", 0, typeFactory.createPolyType( PolyType.VARCHAR, 255 ) ) ); - fieldInfo.add( new AlgDataTypeFieldImpl( "properties", 1, typeFactory.createPolyType( PolyType.VARCHAR, 2064 ) ) ); - fieldInfo.add( new AlgDataTypeFieldImpl( "labels", 2, typeFactory.createArrayType( typeFactory.createPolyType( PolyType.VARCHAR, 255 ), -1 ) ) ); - - return new TableEntryImpl( this, tableName, new LogicalEntity( -1, name, tableName, List.of(), List.of(), AlgDataTypeImpl.proto( fieldInfo.build() ), NamespaceType.GRAPH ) ); - } else if ( namespaceType == NamespaceType.DOCUMENT ) { - for ( Map.Entry entry : tableMap.map().entrySet().stream().filter( t -> t.getKey().split( "_" )[0].equalsIgnoreCase( tableName.split( "_" )[0] ) ).collect( Collectors.toList() ) ) { - return entry.getValue(); - } - } - return null; - } - - - @Override - public SchemaPlus plus() { - return new SchemaPlusImpl( getNamespace().getId() ); - } - - - /** - * Returns the default path resolving functions from this schema. - *

    - * The path consists is a list of lists of strings. - * Each list of strings represents the path of a schema from the root schema. For example, [[], [foo], [foo, bar, baz]] - * represents three schemas: the root schema "/" (level 0), "/foo" (level 1) and "/foo/bar/baz" (level 3). - * - * @return Path of this schema; never null, may be empty - */ - @Override - public List> getPath() { - if ( path != null ) { - return path; - } - // Return a path consisting of just this schema. - return ImmutableList.of( path( null ) ); - } - - - /** - * Returns a collection of sub-schemas, both explicit (defined using {@link #add(String, Namespace, NamespaceType)}) - * and implicit (defined using {@link Namespace#getSubNamespaceNames()} and {@link Namespace#getSubNamespace(String)}). - */ - @Override - public final NavigableMap getSubSchemaMap() { - // Build a map of implicit sub-schemas first, then explicit sub-schemas. - // If there are implicit and explicit with the same name, explicit wins. - final ImmutableSortedMap.Builder builder = new ImmutableSortedMap.Builder<>( NameSet.COMPARATOR ); - builder.putAll( subSchemaMap.map() ); - addImplicitSubSchemaToBuilder( builder ); - return builder.build(); - } - - - /** - * Returns the set of all table names. Includes implicit and explicit tables and functions with zero parameters. - */ - @Override - public final NavigableSet getTableNames() { - final ImmutableSortedSet.Builder builder = new ImmutableSortedSet.Builder<>( NameSet.COMPARATOR ); - // Add explicit tables, case-sensitive. - builder.addAll( tableMap.map().keySet() ); - // Add implicit tables, case-sensitive. - addImplicitTableToBuilder( builder ); - return builder.build(); - } - - - /** - * Returns the set of all types names. - */ - @Override - public final NavigableSet getTypeNames() { - final ImmutableSortedSet.Builder builder = new ImmutableSortedSet.Builder<>( NameSet.COMPARATOR ); - // Add explicit types. - builder.addAll( typeMap.map().keySet() ); - // Add implicit types. - addImplicitTypeNamesToBuilder( builder ); - return builder.build(); - } - - - /** - * Returns a type, explicit and implicit, with a given name. Never null. - */ - @Override - public final TypeEntry getType( String name, boolean caseSensitive ) { - for ( Map.Entry entry : typeMap.range( name, caseSensitive ).entrySet() ) { - return entry.getValue(); - } - return getImplicitType( name, caseSensitive ); - } - - - /** - * Returns a collection of all functions, explicit and implicit, with a given name. Never null. - */ - @Override - public final Collection getFunctions( String name, boolean caseSensitive ) { - final ImmutableList.Builder builder = ImmutableList.builder(); - // Add explicit functions. - for ( FunctionEntry functionEntry : Pair.right( functionMap.range( name, caseSensitive ) ) ) { - builder.add( functionEntry.getFunction() ); - } - // Add implicit functions. - addImplicitFunctionsToBuilder( builder, name, caseSensitive ); - return builder.build(); - } - - - /** - * Returns the list of function names in this schema, both implicit and explicit, never null. - */ - @Override - public final NavigableSet getFunctionNames() { - final ImmutableSortedSet.Builder builder = new ImmutableSortedSet.Builder<>( NameSet.COMPARATOR ); - // Add explicit functions, case-sensitive. - builder.addAll( functionMap.map().keySet() ); - // Add implicit functions, case-sensitive. - addImplicitFuncNamesToBuilder( builder ); - return builder.build(); - } - - - /** - * Returns a tables derived from explicit and implicit functions that take zero parameters. - */ - @Override - public final TableEntry getTableBasedOnNullaryFunction( String tableName, boolean caseSensitive ) { - for ( Map.Entry entry : nullaryFunctionMap.range( tableName, caseSensitive ).entrySet() ) { - final Function function = entry.getValue().getFunction(); - if ( function instanceof TableMacro ) { - assert function.getParameters().isEmpty(); - final Entity entity = ((TableMacro) function).apply( ImmutableList.of() ); - return tableEntry( tableName, entity ); - } - } - return getImplicitTableBasedOnNullaryFunction( tableName, caseSensitive ); - } - - - /** - * Implementation of {@link SchemaPlus} based on a {@link AbstractPolyphenyDbSchema}. - */ - private class SchemaPlusImpl implements SchemaPlus { - - @Getter - private final long id; - - - public SchemaPlusImpl( long id ) { - this.id = id; - } - - - @Override - public AbstractPolyphenyDbSchema polyphenyDbSchema() { - return AbstractPolyphenyDbSchema.this; - } - - - @Override - public SchemaPlus getParentSchema() { - return parent == null ? null : parent.plus(); - } - - - @Override - public String getName() { - return AbstractPolyphenyDbSchema.this.getName(); - } - - - @Override - public boolean isMutable() { - return namespace.isMutable(); - } - - - @Override - public void setCacheEnabled( boolean cache ) { - AbstractPolyphenyDbSchema.this.setCache( cache ); - } - - - @Override - public boolean isCacheEnabled() { - return AbstractPolyphenyDbSchema.this.isCacheEnabled(); - } - - - @Override - public Namespace snapshot( SchemaVersion version ) { - throw new UnsupportedOperationException(); - } - - - @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { - return namespace.getExpression( parentSchema, name ); - } - - - @Override - public Entity getEntity( String name ) { - final TableEntry entry = AbstractPolyphenyDbSchema.this.getTable( name ); - return entry == null ? null : entry.getTable(); - } - - - @Override - public NavigableSet getEntityNames() { - return AbstractPolyphenyDbSchema.this.getTableNames(); - } - - - @Override - public AlgProtoDataType getType( String name ) { - final TypeEntry entry = AbstractPolyphenyDbSchema.this.getType( name, true ); - return entry == null ? null : entry.getType(); - } - - - @Override - public Set getTypeNames() { - return AbstractPolyphenyDbSchema.this.getTypeNames(); - } - - - @Override - public Collection getFunctions( String name ) { - return AbstractPolyphenyDbSchema.this.getFunctions( name, true ); - } - - - @Override - public NavigableSet getFunctionNames() { - return AbstractPolyphenyDbSchema.this.getFunctionNames(); - } - - - @Override - public SchemaPlus getSubNamespace( String name ) { - final PolyphenyDbSchema subSchema = AbstractPolyphenyDbSchema.this.getSubNamespace( name, true ); - return subSchema == null ? null : subSchema.plus(); - } - - - @Override - public Set getSubNamespaceNames() { - return AbstractPolyphenyDbSchema.this.getSubSchemaMap().keySet(); - } - - - @Override - public SchemaPlus add( String name, Namespace namespace, NamespaceType namespaceType ) { - final PolyphenyDbSchema polyphenyDbSchema = AbstractPolyphenyDbSchema.this.add( name, namespace, namespaceType ); - return polyphenyDbSchema.plus(); - } - - - @Override - public T unwrap( Class clazz ) { - if ( clazz.isInstance( this ) ) { - return clazz.cast( this ); - } - if ( clazz.isInstance( AbstractPolyphenyDbSchema.this ) ) { - return clazz.cast( AbstractPolyphenyDbSchema.this ); - } - if ( clazz.isInstance( AbstractPolyphenyDbSchema.this.namespace ) ) { - return clazz.cast( AbstractPolyphenyDbSchema.this.namespace ); - } - throw new ClassCastException( "not a " + clazz ); - } - - - @Override - public void setPath( ImmutableList> path ) { - AbstractPolyphenyDbSchema.this.path = path; - } - - - @Override - public void add( String name, Entity entity ) { - AbstractPolyphenyDbSchema.this.add( name, entity ); - } - - - @Override - public void add( String name, Function function ) { - AbstractPolyphenyDbSchema.this.add( name, function ); - } - - - @Override - public void add( String name, AlgProtoDataType type ) { - AbstractPolyphenyDbSchema.this.add( name, type ); - } - - } - } diff --git a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java new file mode 100644 index 0000000000..73048f221b --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -0,0 +1,342 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schema; + + +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.DataContext; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; +import org.polypheny.db.algebra.type.AlgDataTypeImpl; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogDatabase; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogEntityPlacement; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.impl.AbstractNamespace; +import org.polypheny.db.util.BuiltInMethod; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; + + +public class PolySchemaBuilder implements PropertyChangeListener { + + private final static PolySchemaBuilder INSTANCE = new PolySchemaBuilder(); + + private AbstractPolyphenyDbSchema current; + private boolean isOutdated = true; + + + private PolySchemaBuilder() { + Catalog.getInstance().addObserver( this ); + } + + + public static PolySchemaBuilder getInstance() { + return INSTANCE; + } + + + public AbstractPolyphenyDbSchema getCurrent() { + if ( !RuntimeConfig.SCHEMA_CACHING.getBoolean() ) { + return buildSchema(); + } + if ( current == null || isOutdated ) { + current = buildSchema(); + } + return current; + } + + + private synchronized AbstractPolyphenyDbSchema buildSchema() { + + Catalog catalog = Catalog.getInstance(); + CatalogDatabase catalogDatabase = catalog.getDatabase( Catalog.defaultDatabaseId ); + + // Build logical namespaces + Map, CatalogEntity> logicalRelational = buildRelationalLogical( catalog, catalogDatabase ); + + Map, CatalogEntity> logicalDocument = buildDocumentLogical( catalog, catalogDatabase ); + + Map, CatalogEntity> logicalGraph = buildGraphLogical( catalog, catalogDatabase ); + + // Build mapping structures + + // Build physical namespaces + List adapters = Catalog.getInstance().getAdapters(); + + Map, CatalogEntityPlacement> physicalRelational = buildPhysicalTables( catalog, catalogDatabase, adapters ); + + Map, CatalogEntityPlacement> physicalDocument = buildPhysicalDocuments( catalog, catalogDatabase, adapters ); + + Map, CatalogEntityPlacement> physicalGraph = buildPhysicalGraphs( catalog, catalogDatabase ); + + isOutdated = false; + return new SimplePolyphenyDbSchema( logicalRelational, logicalDocument, logicalGraph, physicalRelational, physicalDocument, physicalGraph ); + } + + + private Map, CatalogEntity> buildGraphLogical( Catalog catalog, CatalogDatabase catalogDatabase ) { + return catalog.getGraphs( catalogDatabase.id, null ).stream().collect( Collectors.toMap( e -> Pair.of( e.id, e.id ), e -> e ) ); + } + + + private Map, CatalogEntity> buildRelationalLogical( Catalog catalog, CatalogDatabase catalogDatabase ) { + Map, CatalogEntity> entities = new HashMap<>(); + for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ) ) { + if ( catalogSchema.namespaceType != NamespaceType.RELATIONAL ) { + continue; + } + + for ( CatalogTable catalogTable : catalog.getTables( catalogSchema.id, null ) ) { + entities.put( Pair.of( catalogSchema.id, catalogTable.id ), catalogTable ); + } + } + return entities; + } + + + private Map, CatalogEntity> buildDocumentLogical( Catalog catalog, CatalogDatabase catalogDatabase ) { + Map, CatalogEntity> entities = new HashMap<>(); + for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ) ) { + if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT ) { + continue; + } + + for ( CatalogCollection catalogEntity : catalog.getCollections( catalogSchema.id, null ) ) { + entities.put( Pair.of( catalogSchema.id, catalogEntity.id ), catalogEntity ); + } + } + + return entities; + } + + + private Map, CatalogEntityPlacement> buildPhysicalGraphs( Catalog catalog, CatalogDatabase catalogDatabase ) { + Map, CatalogEntityPlacement> placements = new HashMap<>(); + // Build adapter schema (physical schema) GRAPH + for ( CatalogGraphDatabase graph : catalog.getGraphs( catalogDatabase.id, null ) ) { + for ( int adapterId : graph.placements ) { + + CatalogGraphPlacement placement = catalog.getGraphPlacement( graph.id, adapterId ); + Adapter adapter = AdapterManager.getInstance().getAdapter( adapterId ); + + if ( !adapter.getSupportedNamespaceTypes().contains( NamespaceType.GRAPH ) ) { + continue; + } + + //adapter.createGraphNamespace( rootSchema, schemaName, graph.id ); + + placements.put( new Triple<>( graph.id, (long) adapter.getAdapterId(), graph.id ), placement ); + } + } + return placements; + } + + + private Map, CatalogEntityPlacement> buildPhysicalDocuments( Catalog catalog, CatalogDatabase catalogDatabase, List adapters ) { + Map, CatalogEntityPlacement> placements = new HashMap<>(); + // Build adapter schema (physical schema) DOCUMENT + for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ).stream().filter( s -> s.namespaceType == NamespaceType.DOCUMENT ).collect( Collectors.toList() ) ) { + for ( CatalogAdapter catalogAdapter : adapters ) { + + Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); + + if ( !adapter.getSupportedNamespaceTypes().contains( NamespaceType.DOCUMENT ) ) { + continue; + } + + // Get list of documents on this adapter + Map> documentIdsPerSchema = new HashMap<>(); + for ( CatalogCollectionPlacement placement : Catalog.getInstance().getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { + documentIdsPerSchema.putIfAbsent( placement.physicalNamespaceName, new HashSet<>() ); + documentIdsPerSchema.get( placement.physicalNamespaceName ).add( placement.collectionId ); + } + + for ( String physicalSchemaName : documentIdsPerSchema.keySet() ) { + Set collectionIds = documentIdsPerSchema.get( physicalSchemaName ); + + //adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); + + for ( long collectionId : collectionIds ) { + CatalogCollection catalogCollection = catalog.getCollection( collectionId ); + + for ( CatalogCollectionPlacement partitionPlacement : catalogCollection.placements.stream().map( p -> catalog.getCollectionPlacement( collectionId, adapter.getAdapterId() ) ).collect( Collectors.toList() ) ) { + if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT && catalogAdapter.getSupportedNamespaces().contains( catalogSchema.namespaceType ) ) { + continue; + } + + //Entity entity = adapter.createDocumentSchema( catalogCollection, partitionPlacement ); + placements.put( new Triple<>( catalogSchema.id, (long) catalogAdapter.id, catalogCollection.id ), partitionPlacement ); + } + } + } + } + } + return placements; + } + + + private Map, CatalogEntityPlacement> buildPhysicalTables( Catalog catalog, CatalogDatabase catalogDatabase, List adapters ) { + Map, CatalogEntityPlacement> placements = new HashMap<>(); + // Build adapter schema (physical schema) RELATIONAL + for ( CatalogSchema catalogSchema : new ArrayList<>( catalog.getSchemas( catalogDatabase.id, null ) ) ) { + for ( CatalogAdapter catalogAdapter : adapters ) { + // Get list of tables on this adapter + Map> tableIdsPerSchema = new HashMap<>(); + for ( CatalogColumnPlacement placement : Catalog.getInstance().getColumnPlacementsOnAdapterAndSchema( catalogAdapter.id, catalogSchema.id ) ) { + tableIdsPerSchema.putIfAbsent( placement.namespaceId, new HashSet<>() ); + tableIdsPerSchema.get( placement.namespaceId ).add( placement.tableId ); + } + + for ( Long namespaceId : tableIdsPerSchema.keySet() ) { + Set tableIds = tableIdsPerSchema.get( namespaceId ); + //adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); + for ( long tableId : tableIds ) { + List partitionPlacements = catalog.getPartitionPlacementsByTableOnAdapter( catalogAdapter.id, tableId ); + + for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { + if ( catalogSchema.namespaceType != NamespaceType.RELATIONAL && catalogAdapter.getSupportedNamespaces().contains( catalogSchema.namespaceType ) ) { + continue; + } + + /* + Entity entity = adapter.createTableSchema( + catalogTable, + Catalog.getInstance().getColumnPlacementsOnAdapterSortedByPhysicalPosition( adapter.getAdapterId(), catalogTable.id ), + partitionPlacement ); + + */ + placements.put( new Triple<>( catalogSchema.id, (long) catalogAdapter.id, partitionPlacement.tableId ), partitionPlacement ); + } + } + } + } + } + + return placements; + } + + + private void buildView( Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { + LogicalRelView view = new LogicalRelView( + catalogTable.id, + catalogTable.getNamespaceName(), + catalogTable.name, + columnIds, + columnNames, + AlgDataTypeImpl.proto( fieldInfo.build() ) ); + s.add( catalogTable.name, view ); + tableMap.put( catalogTable.name, view ); + } + + + private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, AlgDataType rowType, List columnIds ) { + LogicalEntity table; + if ( catalogSchema.namespaceType == NamespaceType.RELATIONAL ) { + table = new LogicalEntity( + catalogTable.id, + catalogTable.getNamespaceName(), + catalogTable.name, + columnIds, + columnNames, + AlgDataTypeImpl.proto( rowType ), + catalogSchema.namespaceType ); + if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { + table.getConstraintIds() + .addAll( catalog.getForeignKeys( catalogTable.id ).stream() + .filter( f -> f.enforcementTime == EnforcementTime.ON_COMMIT ) + .map( f -> f.referencedKeyTableId ) + .collect( Collectors.toList() ) ); + table.getConstraintIds() + .addAll( catalog.getExportedKeys( catalogTable.id ).stream() + .filter( f -> f.enforcementTime == EnforcementTime.ON_COMMIT ) + .map( f -> f.referencedKeyTableId ) + .collect( Collectors.toList() ) ); + } + } else if ( catalogSchema.namespaceType == NamespaceType.DOCUMENT ) { + table = new LogicalCollection( + catalogTable.id, + catalogTable.getNamespaceName(), + catalogTable.name, + AlgDataTypeImpl.proto( rowType ) + ); + } else { + throw new RuntimeException( "Model is not supported" ); + } + + s.add( catalogTable.name, table ); + tableMap.put( catalogTable.name, table ); + } + + + public static String buildAdapterSchemaName( String storeName, String logicalSchema, String physicalSchema ) { + return storeName + "_" + logicalSchema + "_" + physicalSchema; + } + + + // Listens on changes to the catalog + @Override + public void propertyChange( PropertyChangeEvent evt ) { + // Catalog changed, flag as outdated + isOutdated = true; + } + + + /** + * Schema that has no parents. + */ + private static class RootSchema extends AbstractNamespace implements Schema { + + RootSchema() { + super( -1L ); + } + + + @Override + public Expression getExpression( SchemaPlus parentSchema, String name ) { + return Expressions.call( DataContext.ROOT, BuiltInMethod.DATA_CONTEXT_GET_ROOT_SCHEMA.method ); + } + + } + + +} diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 45da54ec2b..18ed052e16 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -16,229 +16,55 @@ package org.polypheny.db.schema; -import java.util.Collection; import java.util.List; -import java.util.NavigableMap; -import java.util.NavigableSet; -import java.util.Objects; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogNamespace; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.util.BuiltInMethod; -import org.polypheny.db.util.NameMap; public interface PolyphenyDbSchema { - static PolyphenyDbSchema from( SchemaPlus plus ) { - return plus.polyphenyDbSchema(); - } - - void setCache( boolean cache ); - - TableEntry add( String tableName, Entity entity ); - - TypeEntry add( String name, AlgProtoDataType type ); - - PolyphenyDbSchema root(); - - boolean isRoot(); - - List path( String name ); - - PolyphenyDbSchema getSubNamespace( String namespaceName, boolean caseSensitive ); - - /** - * Adds a child schema of this schema. - */ - PolyphenyDbSchema add( String name, Namespace namespace, NamespaceType type ); - - TableEntry getTable( String tableName ); - - String getName(); - - PolyphenyDbSchema getParent(); - - Namespace getNamespace(); - - void setNamespace( Namespace namespace ); - - SchemaPlus plus(); - - List> getPath(); - - NavigableMap getSubSchemaMap(); - - NavigableSet getTableNames(); - - NavigableSet getTypeNames(); - - TypeEntry getType( String name, boolean caseSensitive ); - - Collection getFunctions( String name, boolean caseSensitive ); - - NavigableSet getFunctionNames(); - - TableEntry getTableBasedOnNullaryFunction( String tableName, boolean caseSensitive ); - - NameMap getTableMap(); - - - /** - * Entry in a schema, such as a table or sub-schema. - * - * Each object's name is a property of its membership in a schema; therefore in principle it could belong to several - * schemas, or even the same schema several times, with different names. In this respect, it is like an inode in a - * Unix file system. - * - * The members of a schema must have unique names. - */ - abstract class Entry { - - public final PolyphenyDbSchema schema; - public final String name; - - - public Entry( PolyphenyDbSchema schema, String name ) { - this.schema = Objects.requireNonNull( schema ); - this.name = Objects.requireNonNull( name ); - } - - - /** - * Returns this object's path. For example ["hr", "emps"]. - */ - public final List path() { - return schema.path( name ); + default CatalogTable getTable( List names ) { + switch ( names.size() ) { + case 3: + return Catalog.getInstance().getTables( Pattern.of( names.get( 0 ) ), Pattern.of( names.get( 1 ) ), Pattern.of( names.get( 2 ) ) ).get( 0 ); + case 2: + return Catalog.getInstance().getTables( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ), Pattern.of( names.get( 1 ) ) ).get( 0 ); + case 1: + return Catalog.getInstance().getTables( Catalog.defaultDatabaseId, null, Pattern.of( names.get( 0 ) ) ).get( 0 ); + default: + return null; } - } - - /** - * Membership of a table in a schema. - */ - abstract class TableEntry extends Entry { - - - public TableEntry( PolyphenyDbSchema schema, String name ) { - super( schema, name ); + default CatalogCollection getCollection( List names ) { + CatalogNamespace namespace; + switch ( names.size() ) { + case 3: + namespace = Catalog.getInstance().getSchemas( Pattern.of( names.get( 0 ) ), Pattern.of( names.get( 1 ) ) ).get( 0 ); + return Catalog.getInstance().getCollections( namespace.id, Pattern.of( names.get( 2 ) ) ).get( 0 ); + case 2: + namespace = Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ) ).get( 0 ); + return Catalog.getInstance().getCollections( namespace.id, Pattern.of( names.get( 1 ) ) ).get( 0 ); + case 1: + // TODO add methods + namespace = Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, null ).get( 0 ); + return Catalog.getInstance().getCollections( namespace.id, Pattern.of( names.get( 0 ) ) ).get( 0 ); + default: + return null; } - - - public abstract Entity getTable(); - - } - - - /** - * Membership of a type in a schema. - */ - abstract class TypeEntry extends Entry { - - public TypeEntry( PolyphenyDbSchema schema, String name ) { - super( schema, name ); - } - - - public abstract AlgProtoDataType getType(); - - } - - - /** - * Membership of a function in a schema. - */ - abstract class FunctionEntry extends Entry { - - public FunctionEntry( PolyphenyDbSchema schema, String name ) { - super( schema, name ); - } - - - public abstract Function getFunction(); - - } - - - /** - * Implementation of {@link PolyphenyDbSchema.TableEntry} where all properties are held in fields. - */ - class TableEntryImpl extends TableEntry { - - private final Entity entity; - - - /** - * Creates a TableEntryImpl. - */ - public TableEntryImpl( PolyphenyDbSchema schema, String name, Entity entity ) { - super( schema, name ); - this.entity = Objects.requireNonNull( entity ); - } - - - @Override - public Entity getTable() { - return entity; - } - - } - - - /** - * Implementation of {@link TypeEntry} where all properties are held in fields. - */ - class TypeEntryImpl extends TypeEntry { - - private final AlgProtoDataType protoDataType; - - - /** - * Creates a TypeEntryImpl. - */ - public TypeEntryImpl( PolyphenyDbSchema schema, String name, AlgProtoDataType protoDataType ) { - super( schema, name ); - this.protoDataType = protoDataType; - } - - - @Override - public AlgProtoDataType getType() { - return protoDataType; - } - - } - - - /** - * Implementation of {@link FunctionEntry} where all properties are held in fields. - */ - class FunctionEntryImpl extends FunctionEntry { - - private final Function function; - - - /** - * Creates a FunctionEntryImpl. - */ - public FunctionEntryImpl( PolyphenyDbSchema schema, String name, Function function ) { - super( schema, name ); - this.function = function; - } - - - @Override - public Function getFunction() { - return function; - } - } + CatalogGraphDatabase getGraph( List names ); /** * Schema that has no parents. diff --git a/core/src/main/java/org/polypheny/db/schema/SimplePolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/SimplePolyphenyDbSchema.java index 49e3b46ef5..c60be9e587 100644 --- a/core/src/main/java/org/polypheny/db/schema/SimplePolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/SimplePolyphenyDbSchema.java @@ -17,16 +17,11 @@ package org.polypheny.db.schema; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSortedMap; -import com.google.common.collect.ImmutableSortedSet; -import java.util.Collection; -import java.util.List; -import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.util.NameMap; -import org.polypheny.db.util.NameMultimap; -import org.polypheny.db.util.NameSet; +import java.util.Map; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogEntityPlacement; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; /** @@ -36,191 +31,26 @@ class SimplePolyphenyDbSchema extends AbstractPolyphenyDbSchema { /** * Creates a SimplePolyphenyDbSchema. - * - * Use {@link AbstractPolyphenyDbSchema#createRootSchema(String)} or {@link #add(String, Namespace, NamespaceType)}. */ - SimplePolyphenyDbSchema( AbstractPolyphenyDbSchema parent, Namespace namespace, String name, NamespaceType namespaceType, boolean caseSensitive ) { - this( - parent, - namespace, - name, - namespaceType, - caseSensitive, - null, - null, - null, - null, - null, - null, - null ); - } - - - private SimplePolyphenyDbSchema( - AbstractPolyphenyDbSchema parent, - Namespace namespace, - String name, - NamespaceType namespaceType, - boolean caseSensitive, - NameMap subSchemaMap, - NameMap tableMap, - NameMap typeMap, - NameMultimap functionMap, - NameSet functionNames, - NameMap nullaryFunctionMap, - List> path ) { - super( parent, namespace, name, namespaceType, caseSensitive, subSchemaMap, tableMap, typeMap, functionMap, functionNames, nullaryFunctionMap, path ); - } - - - @Override - public void setCache( boolean cache ) { - throw new UnsupportedOperationException(); - } - - - @Override - public PolyphenyDbSchema add( String name, Namespace namespace, NamespaceType namespaceType ) { - final PolyphenyDbSchema polyphenyDbSchema = new SimplePolyphenyDbSchema( this, namespace, name, namespaceType, false ); - subSchemaMap.put( name, polyphenyDbSchema ); - return polyphenyDbSchema; - } - - - @Override - protected AbstractPolyphenyDbSchema getImplicitSubSchema( String schemaName, boolean caseSensitive ) { - // Check implicit schemas. - Namespace s = namespace.getSubNamespace( schemaName ); - if ( s != null ) { - return new SimplePolyphenyDbSchema( this, s, schemaName, namespaceType, false ); - } - return null; - } - - - @Override - protected TableEntry getImplicitTable( String tableName ) { - // Check implicit tables. - Entity entity = namespace.getEntity( tableName ); - if ( entity != null ) { - return tableEntry( tableName, entity ); - } - return null; - } - + public SimplePolyphenyDbSchema( + Map, CatalogEntity> logicalRelational, + Map, CatalogEntity> logicalDocument, + Map, CatalogEntity> logicalGraph, + Map, CatalogEntityPlacement> physicalRelational, + Map, CatalogEntityPlacement> physicalDocument, + Map, CatalogEntityPlacement> physicalGraph ) { + super( logicalRelational, logicalDocument, logicalGraph, physicalRelational, physicalDocument, physicalGraph ); - @Override - protected TypeEntry getImplicitType( String name, boolean caseSensitive ) { - // Check implicit types. - AlgProtoDataType type = namespace.getType( name ); - if ( type != null ) { - return typeEntry( name, type ); - } - return null; } @Override - protected void addImplicitSubSchemaToBuilder( ImmutableSortedMap.Builder builder ) { - ImmutableSortedMap explicitSubSchemas = builder.build(); - for ( String schemaName : namespace.getSubNamespaceNames() ) { - if ( explicitSubSchemas.containsKey( schemaName ) ) { - // explicit subschema wins. - continue; - } - Namespace s = namespace.getSubNamespace( schemaName ); - if ( s != null ) { - PolyphenyDbSchema polyphenyDbSchema = new SimplePolyphenyDbSchema( this, s, schemaName, namespaceType, false ); - builder.put( schemaName, polyphenyDbSchema ); - } - } - } - - - @Override - protected void addImplicitTableToBuilder( ImmutableSortedSet.Builder builder ) { - builder.addAll( namespace.getEntityNames() ); - } - - - @Override - protected void addImplicitFunctionsToBuilder( ImmutableList.Builder builder, String name, boolean caseSensitive ) { - Collection functions = namespace.getFunctions( name ); - if ( functions != null ) { - builder.addAll( functions ); - } - } - - - @Override - protected void addImplicitFuncNamesToBuilder( ImmutableSortedSet.Builder builder ) { - builder.addAll( namespace.getFunctionNames() ); - } - - - @Override - protected void addImplicitTypeNamesToBuilder( ImmutableSortedSet.Builder builder ) { - builder.addAll( namespace.getTypeNames() ); - } - - - @Override - protected void addImplicitTablesBasedOnNullaryFunctionsToBuilder( ImmutableSortedMap.Builder builder ) { - ImmutableSortedMap explicitTables = builder.build(); - - for ( String s : namespace.getFunctionNames() ) { - // explicit table wins. - if ( explicitTables.containsKey( s ) ) { - continue; - } - for ( Function function : namespace.getFunctions( s ) ) { - if ( function instanceof TableMacro && function.getParameters().isEmpty() ) { - final Entity entity = ((TableMacro) function).apply( ImmutableList.of() ); - builder.put( s, entity ); - } - } - } - } - + protected PolyphenyDbSchema snapshot( AbstractPolyphenyDbSchema parent, SchemaVersion version ) { - @Override - protected TableEntry getImplicitTableBasedOnNullaryFunction( String tableName, boolean caseSensitive ) { - Collection functions = namespace.getFunctions( tableName ); - if ( functions != null ) { - for ( Function function : functions ) { - if ( function instanceof TableMacro && function.getParameters().isEmpty() ) { - final Entity entity = ((TableMacro) function).apply( ImmutableList.of() ); - return tableEntry( tableName, entity ); - } - } - } return null; } - @Override - protected PolyphenyDbSchema snapshot( AbstractPolyphenyDbSchema parent, SchemaVersion version ) { - AbstractPolyphenyDbSchema snapshot = new SimplePolyphenyDbSchema( - parent, - namespace.snapshot( version ), - name, - namespaceType, - isCaseSensitive(), - null, - tableMap, - typeMap, - functionMap, - functionNames, - nullaryFunctionMap, - getPath() ); - for ( PolyphenyDbSchema subSchema : subSchemaMap.map().values() ) { - PolyphenyDbSchema subSchemaSnapshot = ((AbstractPolyphenyDbSchema) subSchema).snapshot( snapshot, version ); - snapshot.subSchemaMap.put( subSchema.getName(), subSchemaSnapshot ); - } - return snapshot; - } - - @Override protected boolean isCacheEnabled() { return false; diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index 02ad47c736..16b23c8266 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -162,7 +162,7 @@ public enum BuiltInMethod { INTO( ExtendedEnumerable.class, "into", Collection.class ), REMOVE_ALL( ExtendedEnumerable.class, "removeAll", Collection.class ), SCHEMA_GET_SUB_SCHEMA( Namespace.class, "getSubNamespace", String.class ), - SCHEMA_GET_TABLE( Namespace.class, "getEntity", String.class ), + SCHEMA_GET_TABLE( Namespace.class, "getTable", String.class ), SCHEMA_PLUS_UNWRAP( SchemaPlus.class, "unwrap", Class.class ), SCHEMAS_ENUMERABLE_SCANNABLE( Schemas.class, "enumerable", ScannableEntity.class, DataContext.class ), SCHEMAS_ENUMERABLE_FILTERABLE( Schemas.class, "enumerable", FilterableEntity.class, DataContext.class ), diff --git a/core/src/main/java/org/polypheny/db/util/Triple.java b/core/src/main/java/org/polypheny/db/util/Triple.java new file mode 100644 index 0000000000..20f07bac34 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/util/Triple.java @@ -0,0 +1,60 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.util; + +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Value; + +@Value +@AllArgsConstructor +@EqualsAndHashCode +public class Triple implements Comparable> { + + A left; + B middle; + C right; + + + @Override + public int compareTo( Triple o ) { + //noinspection unchecked + int c = compare( (Comparable) this.left, (Comparable) o.left ); + if ( c == 0 ) { + //noinspection unchecked + c = compare( (Comparable) this.right, (Comparable) o.right ); + } + return c; + } + + + private static > int compare( C c1, C c2 ) { + if ( c1 == null ) { + if ( c2 == null ) { + return 0; + } else { + return -1; + } + } else if ( c2 == null ) { + return 1; + } else { + return c1.compareTo( c2 ); + } + } + + +} diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 885ffae660..0bcae730f6 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -528,7 +528,7 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI @Override - public long addCollectionPlacement( int adapterId, long collectionId, PlacementType placementType ) { + public long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ) { throw new NotImplementedException(); } @@ -1083,13 +1083,14 @@ public List getPartitionsOnDataPlacement( int adapterId, long tableId ) { /** * Adds a placement for a partition. * + * @param namespaceId * @param adapterId The adapter on which the table should be placed on * @param placementType The type of placement * @param physicalSchemaName The schema name on the adapter * @param physicalTableName The table name on the adapter */ @Override - public void addPartitionPlacement( int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { + public void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { throw new NotImplementedException(); } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index 769c82eef6..232df09b42 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -63,7 +63,6 @@ import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordTypeImpl; import org.polypheny.db.algebra.type.StructKind; -import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.nodes.Call; @@ -84,7 +83,6 @@ import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.schema.TableType; import org.polypheny.db.schema.Wrapper; -import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.test.JdbcTest.AbstractModifiableEntity; import org.polypheny.db.util.AccessType; import org.polypheny.db.util.ImmutableBitSet; @@ -116,7 +114,7 @@ public abstract class MockCatalogReader extends PolyphenyDbCatalogReader { */ public MockCatalogReader( AlgDataTypeFactory typeFactory, boolean caseSensitive ) { super( - AbstractPolyphenyDbSchema.createRootSchema( DEFAULT_CATALOG ), + AbstractPolyphenyDbSchema.createRootSchema(),//DEFAULT_CATALOG ), PREFIX, typeFactory ); } @@ -165,7 +163,7 @@ protected void registerType( final List names, final AlgProtoDataType al assert names.get( 0 ).equals( DEFAULT_CATALOG ); final List schemaPath = Util.skipLast( names ); final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, schemaPath, NameMatchers.withCaseSensitive( true ) ); - schema.add( Util.last( names ), algProtoDataType ); + //schema.add( Util.last( names ), algProtoDataType ); } @@ -192,17 +190,17 @@ private void registerTable( final List names, final Entity entity ) { final List schemaPath = Util.skipLast( names ); final String tableName = Util.last( names ); final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, schemaPath, NameMatchers.withCaseSensitive( true ) ); - schema.add( tableName, entity ); + //schema.add( tableName, entity ); } protected void registerSchema( MockSchema schema, long id ) { - rootSchema.add( schema.name, new AbstractNamespace( id ), NamespaceType.RELATIONAL ); + //rootSchema.add( schema.name, new AbstractNamespace( id ), NamespaceType.RELATIONAL ); } private void registerNestedSchema( MockSchema parentSchema, MockSchema schema, long id ) { - rootSchema.getSubNamespace( parentSchema.getName(), true ).add( schema.name, new AbstractNamespace( id ), NamespaceType.RELATIONAL ); + //rootSchema.getSubNamespace( parentSchema.getName(), true ).add( schema.name, new AbstractNamespace( id ), NamespaceType.RELATIONAL ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 46ebd34dc4..23d404aec6 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -293,6 +293,7 @@ public void addAdapter( String uniqueName, String adapterName, AdapterType adapt catalog.addPrimaryKey( tableId, primaryKeyColIds ); CatalogTable catalogTable = catalog.getTable( tableId ); catalog.addPartitionPlacement( + catalogTable.namespaceId, adapter.getAdapterId(), catalogTable.id, catalogTable.partitionProperty.partitionIds.get( 0 ), @@ -896,7 +897,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { // Need to create partitionPlacements first in order to trigger schema creation on PolySchemaBuilder for ( long partitionId : partitionIds ) { catalog.addPartitionPlacement( - dataStore.getAdapterId(), + catalogTable.namespaceId, dataStore.getAdapterId(), catalogTable.id, partitionId, PlacementType.AUTOMATIC, @@ -1459,7 +1460,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { if ( newPartitionIdsOnDataPlacement.size() > 0 ) { newPartitionIdsOnDataPlacement.forEach( partitionId -> catalog.addPartitionPlacement( - storeInstance.getAdapterId(), + catalogTable.namespaceId, storeInstance.getAdapterId(), catalogTable.id, partitionId, PlacementType.MANUAL, @@ -1513,7 +1514,7 @@ public void modifyPartitionPlacement( CatalogTable catalogTable, List part // Need to create partitionPlacements first in order to trigger schema creation on PolySchemaBuilder for ( long partitionId : newPartitions ) { catalog.addPartitionPlacement( - storeInstance.getAdapterId(), + catalogTable.namespaceId, storeInstance.getAdapterId(), catalogTable.id, partitionId, PlacementType.AUTOMATIC, @@ -1858,6 +1859,7 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR for ( DataStore store : stores ) { catalog.addPartitionPlacement( + catalogMaterializedView.namespaceId, store.getAdapterId(), tableId, catalogMaterializedView.partitionProperty.partitionIds.get( 0 ), @@ -1989,6 +1991,7 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { catalog.addDataPlacement( store.getAdapterId(), mapping.edgesPropertyId ); catalog.addPartitionPlacement( + nodes.namespaceId, store.getAdapterId(), nodes.id, nodes.partitionProperty.partitionIds.get( 0 ), @@ -1999,6 +2002,7 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { ); catalog.addPartitionPlacement( + nodeProperty.namespaceId, store.getAdapterId(), nodeProperty.id, nodeProperty.partitionProperty.partitionIds.get( 0 ), @@ -2009,6 +2013,7 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { ); catalog.addPartitionPlacement( + edges.namespaceId, store.getAdapterId(), edges.id, edges.partitionProperty.partitionIds.get( 0 ), @@ -2019,6 +2024,7 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { ); catalog.addPartitionPlacement( + edgeProperty.namespaceId, store.getAdapterId(), edgeProperty.id, edgeProperty.partitionProperty.partitionIds.get( 0 ), @@ -2226,7 +2232,7 @@ public void createTable( long schemaId, String name, List fiel for ( DataStore store : stores ) { catalog.addPartitionPlacement( - store.getAdapterId(), + catalogTable.namespaceId, store.getAdapterId(), catalogTable.id, catalogTable.partitionProperty.partitionIds.get( 0 ), PlacementType.AUTOMATIC, @@ -2281,6 +2287,7 @@ public void createCollection( long schemaId, String name, boolean ifNotExists, L for ( DataStore store : stores ) { catalog.addCollectionPlacement( + catalogCollection.namespaceId, store.getAdapterId(), catalogCollection.id, PlacementType.AUTOMATIC ); @@ -2345,7 +2352,7 @@ public void addCollectionPlacement( long namespaceId, String name, List for ( DataStore store : stores ) { for ( long partitionId : partitionIds ) { catalog.addPartitionPlacement( + partitionedTable.namespaceId, store.getAdapterId(), partitionedTable.id, partitionId, @@ -2773,6 +2782,7 @@ public void removePartitioning( CatalogTable partitionedTable, Statement stateme for ( DataStore store : stores ) { // Need to create partitionPlacements first in order to trigger schema creation on PolySchemaBuilder catalog.addPartitionPlacement( + mergedTable.namespaceId, store.getAdapterId(), mergedTable.id, mergedTable.partitionProperty.partitionIds.get( 0 ), diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 89a1d4e249..aa34000480 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -294,7 +294,7 @@ private void redistributePartitions( CatalogTable table, List partitionsFr for ( long partitionId : hotPartitionsToCreate ) { catalog.addPartitionPlacement( - store.getAdapterId(), + catalogTable.namespaceId, store.getAdapterId(), table.id, partitionId, PlacementType.AUTOMATIC, @@ -343,7 +343,7 @@ private void redistributePartitions( CatalogTable table, List partitionsFr for ( long partitionId : coldPartitionsToCreate ) { catalog.addPartitionPlacement( - store.getAdapterId(), + catalogTable.namespaceId, store.getAdapterId(), table.id, partitionId, PlacementType.AUTOMATIC, diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 392056b45e..7208655cd1 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -768,7 +768,7 @@ public AlgNode visit( AlgNode node ) { // .collect( Collectors.toList() ); // } // final {@link AlgNode} replacement = LogicalModify.create( -// ltm.getEntity(), +// ltm.getTable(), // transaction.getCatalogReader(), // newProject, // ltm.getOperation(), diff --git a/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java deleted file mode 100644 index 69a20aadfd..0000000000 --- a/dbms/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ /dev/null @@ -1,444 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.schema; - - -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import org.apache.calcite.linq4j.tree.Expression; -import org.apache.calcite.linq4j.tree.Expressions; -import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.AdapterManager; -import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.algebra.logical.lpg.LogicalGraph; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogCollection; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.schema.Namespace.Schema; -import org.polypheny.db.schema.impl.AbstractNamespace; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; -import org.polypheny.db.util.BuiltInMethod; - - -public class PolySchemaBuilder implements PropertyChangeListener { - - private final static PolySchemaBuilder INSTANCE = new PolySchemaBuilder(); - - private AbstractPolyphenyDbSchema current; - private boolean isOutdated = true; - - - private PolySchemaBuilder() { - Catalog.getInstance().addObserver( this ); - } - - - public static PolySchemaBuilder getInstance() { - return INSTANCE; - } - - - public AbstractPolyphenyDbSchema getCurrent() { - if ( !RuntimeConfig.SCHEMA_CACHING.getBoolean() ) { - return buildSchema(); - } - if ( current == null || isOutdated ) { - current = buildSchema(); - } - return current; - } - - - private synchronized AbstractPolyphenyDbSchema buildSchema() { - final Namespace namespace = new RootSchema(); - final AbstractPolyphenyDbSchema polyphenyDbSchema = new SimplePolyphenyDbSchema( null, namespace, "", NamespaceType.RELATIONAL, false ); - - SchemaPlus rootSchema = polyphenyDbSchema.plus(); - Catalog catalog = Catalog.getInstance(); - - CatalogDatabase catalogDatabase = catalog.getDatabase( 1 ); - - // Build logical namespaces - buildRelationalLogical( polyphenyDbSchema, rootSchema, catalog, catalogDatabase ); - - buildDocumentLogical( polyphenyDbSchema, rootSchema, catalog, catalogDatabase ); - - buildGraphLogical( polyphenyDbSchema, rootSchema, catalog, catalogDatabase ); - - // Build mapping structures - - // Build physical namespaces - List adapters = Catalog.getInstance().getAdapters(); - - buildPhysicalTables( polyphenyDbSchema, rootSchema, catalog, catalogDatabase, adapters ); - - buildPhysicalDocuments( polyphenyDbSchema, rootSchema, catalog, catalogDatabase, adapters ); - - buildPhysicalGraphs( polyphenyDbSchema, rootSchema, catalog, catalogDatabase ); - - isOutdated = false; - return polyphenyDbSchema; - } - - - private void buildGraphLogical( AbstractPolyphenyDbSchema polyphenyDbSchema, SchemaPlus rootSchema, Catalog catalog, CatalogDatabase catalogDatabase ) { - for ( CatalogGraphDatabase graph : catalog.getGraphs( catalogDatabase.id, null ) ) { - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractNamespace( graph.id ), graph.name, NamespaceType.GRAPH, graph.caseSensitive ).plus(); - - rootSchema.add( graph.name, s, NamespaceType.GRAPH ); - s.polyphenyDbSchema().setNamespace( new LogicalGraph( graph.id ) ); - } - } - - - private void buildRelationalLogical( AbstractPolyphenyDbSchema polyphenyDbSchema, SchemaPlus rootSchema, Catalog catalog, CatalogDatabase catalogDatabase ) { - for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ) ) { - if ( catalogSchema.namespaceType != NamespaceType.RELATIONAL ) { - continue; - } - Map tableMap = new HashMap<>(); - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractNamespace( catalogSchema.id ), catalogSchema.name, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); - for ( CatalogTable catalogTable : catalog.getTables( catalogSchema.id, null ) ) { - List columnNames = new LinkedList<>(); - - AlgDataType rowType; - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - - final Builder fieldInfo = typeFactory.builder(); - - for ( CatalogColumn catalogColumn : catalog.getColumns( catalogTable.id ) ) { - columnNames.add( catalogColumn.name ); - fieldInfo.add( catalogColumn.name, null, catalogColumn.getAlgDataType( typeFactory ) ); - fieldInfo.nullable( catalogColumn.nullable ); - } - rowType = fieldInfo.build(); - - List columnIds = new LinkedList<>(); - catalog.getColumns( catalogTable.id ).forEach( c -> columnIds.add( c.id ) ); - if ( catalogTable.entityType == EntityType.VIEW ) { - buildView( tableMap, s, catalogTable, columnNames, fieldInfo, columnIds ); - } else if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE || catalogTable.entityType == EntityType.MATERIALIZED_VIEW ) { - buildEntity( catalog, catalogSchema, tableMap, s, catalogTable, columnNames, rowType, columnIds ); - } else { - throw new RuntimeException( "Unhandled table type: " + catalogTable.entityType.name() ); - } - } - - rootSchema.add( catalogSchema.name, s, catalogSchema.namespaceType ); - tableMap.forEach( rootSchema.getSubNamespace( catalogSchema.name )::add ); - if ( catalogDatabase.defaultNamespaceId != null && catalogSchema.id == catalogDatabase.defaultNamespaceId ) { - tableMap.forEach( rootSchema::add ); - } - s.polyphenyDbSchema().setNamespace( new LogicalSchema( catalogSchema.id, catalogSchema.name, tableMap, new HashMap<>() ) ); - } - } - - - private void buildDocumentLogical( AbstractPolyphenyDbSchema polyphenyDbSchema, SchemaPlus rootSchema, Catalog catalog, CatalogDatabase catalogDatabase ) { - for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ) ) { - if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT ) { - continue; - } - Map collectionMap = new HashMap<>(); - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, new AbstractNamespace( catalogSchema.id ), catalogSchema.name, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); - for ( CatalogCollection catalogEntity : catalog.getCollections( catalogSchema.id, null ) ) { - List columnNames = new LinkedList<>(); - - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - - final Builder fieldInfo = typeFactory.builder(); - - columnNames.add( "d" ); - fieldInfo.add( "d", null, typeFactory.createPolyType( PolyType.DOCUMENT ) ); - fieldInfo.nullable( false ); - - List columnIds = new LinkedList<>(); - catalog.getColumns( catalogEntity.id ).forEach( c -> columnIds.add( c.id ) ); - LogicalEntity entity; - if ( catalogEntity.entityType == EntityType.VIEW ) { - entity = new LogicalRelView( - catalogEntity.id, - catalogEntity.getNamespaceName(), - catalogEntity.name, - columnIds, - columnNames, - AlgDataTypeImpl.proto( fieldInfo.build() ) ); - - } else if ( catalogEntity.entityType == EntityType.ENTITY || catalogEntity.entityType == EntityType.SOURCE || catalogEntity.entityType == EntityType.MATERIALIZED_VIEW ) { - entity = new LogicalCollection( - catalogEntity.id, - catalogEntity.getNamespaceName(), - catalogEntity.name, - AlgDataTypeImpl.proto( fieldInfo.build() ) ); - } else { - throw new RuntimeException( "Unhandled table type: " + catalogEntity.entityType.name() ); - } - - s.add( catalogEntity.name, entity ); - collectionMap.put( catalogEntity.name, entity ); - } - - rootSchema.add( catalogSchema.name, s, catalogSchema.namespaceType ); - collectionMap.forEach( rootSchema.getSubNamespace( catalogSchema.name )::add ); - if ( catalogDatabase.defaultNamespaceId != null && catalogSchema.id == catalogDatabase.defaultNamespaceId ) { - collectionMap.forEach( rootSchema::add ); - } - PolyphenyDbSchema schema = s.polyphenyDbSchema().getSubNamespace( catalogSchema.name, catalogSchema.caseSensitive ); - if ( schema != null ) { - LogicalSchema logicalSchema = new LogicalSchema( catalogSchema.id, catalogSchema.name, ((LogicalSchema) schema.getNamespace()).getTableMap(), collectionMap ); - s.polyphenyDbSchema().setNamespace( logicalSchema ); - } else { - s.polyphenyDbSchema().setNamespace( new LogicalSchema( catalogSchema.id, catalogSchema.name, new HashMap<>(), collectionMap ) ); - } - - } - } - - - private void buildPhysicalGraphs( AbstractPolyphenyDbSchema polyphenyDbSchema, SchemaPlus rootSchema, Catalog catalog, CatalogDatabase catalogDatabase ) { - // Build adapter schema (physical schema) GRAPH - for ( CatalogGraphDatabase graph : catalog.getGraphs( catalogDatabase.id, null ) ) { - for ( int adapterId : graph.placements ) { - - CatalogGraphPlacement placement = catalog.getGraphPlacement( graph.id, adapterId ); - Adapter adapter = AdapterManager.getInstance().getAdapter( adapterId ); - - if ( !adapter.getSupportedNamespaceTypes().contains( NamespaceType.GRAPH ) ) { - continue; - } - - final String schemaName = buildAdapterSchemaName( adapter.getUniqueName(), graph.name, placement.physicalName ); - - adapter.createGraphNamespace( rootSchema, schemaName, graph.id ); - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, adapter.getCurrentGraphNamespace(), schemaName, NamespaceType.GRAPH, graph.caseSensitive ).plus(); - rootSchema.add( schemaName, s, NamespaceType.GRAPH ); - - rootSchema.getSubNamespace( schemaName ).polyphenyDbSchema().setNamespace( adapter.getCurrentGraphNamespace() ); - } - } - } - - - private void buildPhysicalDocuments( AbstractPolyphenyDbSchema polyphenyDbSchema, SchemaPlus rootSchema, Catalog catalog, CatalogDatabase catalogDatabase, List adapters ) { - // Build adapter schema (physical schema) DOCUMENT - for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ).stream().filter( s -> s.namespaceType == NamespaceType.DOCUMENT ).collect( Collectors.toList() ) ) { - for ( CatalogAdapter catalogAdapter : adapters ) { - - Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); - - if ( !adapter.getSupportedNamespaceTypes().contains( NamespaceType.DOCUMENT ) ) { - continue; - } - - // Get list of documents on this adapter - Map> documentIdsPerSchema = new HashMap<>(); - for ( CatalogCollectionPlacement placement : Catalog.getInstance().getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { - documentIdsPerSchema.putIfAbsent( placement.physicalNamespaceName, new HashSet<>() ); - documentIdsPerSchema.get( placement.physicalNamespaceName ).add( placement.collectionId ); - } - - for ( String physicalSchemaName : documentIdsPerSchema.keySet() ) { - Set collectionIds = documentIdsPerSchema.get( physicalSchemaName ); - - HashMap physicalTables = new HashMap<>(); - - final String schemaName = buildAdapterSchemaName( catalogAdapter.uniqueName, catalogSchema.name, physicalSchemaName ); - - adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, adapter.getCurrentSchema(), schemaName, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); - for ( long collectionId : collectionIds ) { - CatalogCollection catalogCollection = catalog.getCollection( collectionId ); - - for ( CatalogCollectionPlacement partitionPlacement : catalogCollection.placements.stream().map( p -> Catalog.getInstance().getCollectionPlacement( collectionId, adapter.getAdapterId() ) ).collect( Collectors.toList() ) ) { - if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT && catalogAdapter.getSupportedNamespaces().contains( catalogSchema.namespaceType ) ) { - continue; - } - - Entity entity = adapter.createDocumentSchema( catalogCollection, partitionPlacement ); - - physicalTables.put( catalog.getCollection( collectionId ).name + "_" + partitionPlacement.id, entity ); - - rootSchema.add( schemaName, s, catalogSchema.namespaceType ); - physicalTables.forEach( rootSchema.getSubNamespace( schemaName )::add ); - rootSchema.getSubNamespace( schemaName ).polyphenyDbSchema().setNamespace( adapter.getCurrentSchema() ); - } - } - } - } - } - } - - - private void buildPhysicalTables( AbstractPolyphenyDbSchema polyphenyDbSchema, SchemaPlus rootSchema, Catalog catalog, CatalogDatabase catalogDatabase, List adapters ) { - // Build adapter schema (physical schema) RELATIONAL - for ( CatalogSchema catalogSchema : new ArrayList<>( catalog.getSchemas( catalogDatabase.id, null ) ) ) { - for ( CatalogAdapter catalogAdapter : adapters ) { - // Get list of tables on this adapter - Map> tableIdsPerSchema = new HashMap<>(); - for ( CatalogColumnPlacement placement : Catalog.getInstance().getColumnPlacementsOnAdapterAndSchema( catalogAdapter.id, catalogSchema.id ) ) { - tableIdsPerSchema.putIfAbsent( placement.physicalSchemaName, new HashSet<>() ); - tableIdsPerSchema.get( placement.physicalSchemaName ).add( placement.tableId ); - } - - for ( String physicalSchemaName : tableIdsPerSchema.keySet() ) { - Set tableIds = tableIdsPerSchema.get( physicalSchemaName ); - - HashMap physicalTables = new HashMap<>(); - Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); - - final String schemaName = buildAdapterSchemaName( catalogAdapter.uniqueName, catalogSchema.name, physicalSchemaName ); - - adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); - SchemaPlus s = new SimplePolyphenyDbSchema( polyphenyDbSchema, adapter.getCurrentSchema(), schemaName, catalogSchema.namespaceType, catalogSchema.caseSensitive ).plus(); - for ( long tableId : tableIds ) { - CatalogTable catalogTable = catalog.getTable( tableId ); - - List partitionPlacements = catalog.getPartitionPlacementsByTableOnAdapter( adapter.getAdapterId(), tableId ); - - for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { - if ( catalogSchema.namespaceType != NamespaceType.RELATIONAL && catalogAdapter.getSupportedNamespaces().contains( catalogSchema.namespaceType ) ) { - continue; - } - - Entity entity = adapter.createTableSchema( - catalogTable, - Catalog.getInstance().getColumnPlacementsOnAdapterSortedByPhysicalPosition( adapter.getAdapterId(), catalogTable.id ), - partitionPlacement ); - - physicalTables.put( catalogTable.name + "_" + partitionPlacement.partitionId, entity ); - - rootSchema.add( schemaName, s, catalogSchema.namespaceType ); - physicalTables.forEach( rootSchema.getSubNamespace( schemaName )::add ); - rootSchema.getSubNamespace( schemaName ).polyphenyDbSchema().setNamespace( adapter.getCurrentSchema() ); - } - } - } - } - } - } - - - private void buildView( Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { - LogicalRelView view = new LogicalRelView( - catalogTable.id, - catalogTable.getNamespaceName(), - catalogTable.name, - columnIds, - columnNames, - AlgDataTypeImpl.proto( fieldInfo.build() ) ); - s.add( catalogTable.name, view ); - tableMap.put( catalogTable.name, view ); - } - - - private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, AlgDataType rowType, List columnIds ) { - LogicalEntity table; - if ( catalogSchema.namespaceType == NamespaceType.RELATIONAL ) { - table = new LogicalEntity( - catalogTable.id, - catalogTable.getNamespaceName(), - catalogTable.name, - columnIds, - columnNames, - AlgDataTypeImpl.proto( rowType ), - catalogSchema.namespaceType ); - if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { - table.getConstraintIds() - .addAll( catalog.getForeignKeys( catalogTable.id ).stream() - .filter( f -> f.enforcementTime == EnforcementTime.ON_COMMIT ) - .map( f -> f.referencedKeyTableId ) - .collect( Collectors.toList() ) ); - table.getConstraintIds() - .addAll( catalog.getExportedKeys( catalogTable.id ).stream() - .filter( f -> f.enforcementTime == EnforcementTime.ON_COMMIT ) - .map( f -> f.referencedKeyTableId ) - .collect( Collectors.toList() ) ); - } - } else if ( catalogSchema.namespaceType == NamespaceType.DOCUMENT ) { - table = new LogicalCollection( - catalogTable.id, - catalogTable.getNamespaceName(), - catalogTable.name, - AlgDataTypeImpl.proto( rowType ) - ); - } else { - throw new RuntimeException( "Model is not supported" ); - } - - s.add( catalogTable.name, table ); - tableMap.put( catalogTable.name, table ); - } - - - public static String buildAdapterSchemaName( String storeName, String logicalSchema, String physicalSchema ) { - return storeName + "_" + logicalSchema + "_" + physicalSchema; - } - - - // Listens on changes to the catalog - @Override - public void propertyChange( PropertyChangeEvent evt ) { - // Catalog changed, flag as outdated - isOutdated = true; - } - - - /** - * Schema that has no parents. - */ - private static class RootSchema extends AbstractNamespace implements Schema { - - RootSchema() { - super( -1L ); - } - - - @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { - return Expressions.call( DataContext.ROOT, BuiltInMethod.DATA_CONTEXT_GET_ROOT_SCHEMA.method ); - } - - } - -} diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 9e16ab613b..d4fe0f0f03 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -184,7 +184,7 @@ public void addTables( Transaction transaction, List tableNames ) { updateCandidates.put( transaction.getXid(), id ); } } catch ( UnknownTableException e ) { - throw new RuntimeException( "Not possible to getEntity to update which Tables were changed.", e ); + throw new RuntimeException( "Not possible to getTable to update which Tables were changed.", e ); } } } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java index fb2047313a..137bf97fda 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java @@ -57,7 +57,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public void implement( CottontailImplementContext context ) { -// context.from = From.newBuilder().setEntity( this.cottontailTable.getEntity() ).build(); +// context.from = From.newBuilder().setEntity( this.cottontailTable.getTable() ).build(); if ( context.queryType == null ) { context.cottontailTable = this.cottontailTable; context.schemaName = this.cottontailTable.getPhysicalSchemaName(); diff --git a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java index 87cbe22896..b87c9b1e1f 100644 --- a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java +++ b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java @@ -3117,7 +3117,7 @@ public class DruidAdapterIT { // @Test // public void testTableMapReused() { // AbstractSchema schema = new DruidSchema( "http://localhost:8082", "http://localhost:8081", true ); -// assertSame( schema.getEntity( "wikiticker" ), schema.getEntity( "wikiticker" ) ); +// assertSame( schema.getTable( "wikiticker" ), schema.getTable( "wikiticker" ) ); // } // // diff --git a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java index 3c8df50d87..8ccfec8940 100644 --- a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java +++ b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java @@ -2717,7 +2717,7 @@ public class DruidAdapterIT2 { // @Test // public void testTableMapReused() { // AbstractSchema schema = new DruidSchema( "http://localhost:8082", "http://localhost:8081", true ); -// assertSame( schema.getEntity( "wikiticker" ), schema.getEntity( "wikiticker" ) ); +// assertSame( schema.getTable( "wikiticker" ), schema.getTable( "wikiticker" ) ); // } // // diff --git a/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/ScrollingTest.java b/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/ScrollingTest.java index 164443bc7f..20d1951e8f 100644 --- a/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/ScrollingTest.java +++ b/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/ScrollingTest.java @@ -100,7 +100,7 @@ public class ScrollingTest { // // get node stats // final Response response = NODE.restClient().performRequest( "GET", "/_nodes/stats/indices/search" ); // -// try ( InputStream is = response.getEntity().getContent() ) { +// try ( InputStream is = response.getTable().getContent() ) { // final ObjectNode node = NODE.mapper().readValue( is, ObjectNode.class ); // final String path = "/indices/search/scroll_current"; // final JsonNode scrollCurrent = node.with( "nodes" ).elements().next().at( path ); diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 47d23dc131..0def5ecd5c 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -908,7 +908,7 @@ private void addDefaultCsvColumn( CatalogAdapter csv, CatalogTable table, String updateColumnPlacementPhysicalPosition( csv.id, colId, position ); long partitionId = table.partitionProperty.partitionIds.get( 0 ); - addPartitionPlacement( csv.id, table.id, partitionId, PlacementType.AUTOMATIC, filename, table.name, DataPlacementRole.UPTODATE ); + addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, filename, table.name, DataPlacementRole.UPTODATE ); } } @@ -2282,6 +2282,7 @@ public void addColumnPlacement( int adapterId, long columnId, PlacementType plac CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); CatalogColumnPlacement columnPlacement = new CatalogColumnPlacement( + column.schemaId, column.tableId, columnId, adapterId, @@ -2309,6 +2310,7 @@ public void updatePartitionPlacementPhysicalNames( int adapterId, long partition try { CatalogPartitionPlacement old = Objects.requireNonNull( partitionPlacements.get( new Object[]{ adapterId, partitionId } ) ); CatalogPartitionPlacement placement = new CatalogPartitionPlacement( + old.namespaceId, old.tableId, old.adapterId, old.adapterUniqueName, @@ -2437,7 +2439,7 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI * {@inheritDoc} */ @Override - public long addCollectionPlacement( int adapterId, long collectionId, PlacementType placementType ) { + public long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ) { long id = partitionIdBuilder.getAndIncrement(); CatalogCollectionPlacement placement = new CatalogCollectionPlacement( adapterId, collectionId, null, null, id ); CatalogCollection old = collections.get( collectionId ); @@ -2810,6 +2812,7 @@ public void updateColumnPlacementType( int adapterId, long columnId, PlacementTy try { CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); CatalogColumnPlacement placement = new CatalogColumnPlacement( + old.namespaceId, old.tableId, old.columnId, old.adapterId, @@ -2838,6 +2841,7 @@ public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, try { CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); CatalogColumnPlacement placement = new CatalogColumnPlacement( + old.namespaceId, old.tableId, old.columnId, old.adapterId, @@ -2866,6 +2870,7 @@ public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId try { CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); CatalogColumnPlacement placement = new CatalogColumnPlacement( + old.namespaceId, old.tableId, old.columnId, old.adapterId, @@ -2894,6 +2899,7 @@ public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, St try { CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); CatalogColumnPlacement placement = new CatalogColumnPlacement( + old.namespaceId, old.tableId, old.columnId, old.adapterId, @@ -4751,10 +4757,11 @@ public boolean isTableFlaggedForDeletion( long tableId ) { * {@inheritDoc} */ @Override - public void addPartitionPlacement( int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { + public void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { if ( !checkIfExistsPartitionPlacement( adapterId, partitionId ) ) { CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); CatalogPartitionPlacement partitionPlacement = new CatalogPartitionPlacement( + namespaceId, tableId, adapterId, store.uniqueName, diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index 9ad2aafa72..6cdd9bea5c 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -359,7 +359,7 @@ public void testKey() throws GenericCatalogException { assertTrue( catalog.getPrimaryKey( catalog.getTable( tableId ).primaryKey ).columnIds.contains( columnId1 ) ); //catalog.deletePrimaryKey( tableId ); - //assertNull( catalog.getEntity( tableId ).primaryKey ); + //assertNull( catalog.getTable( tableId ).primaryKey ); catalog.addPrimaryKey( tableId, Arrays.asList( columnId1, columnId2 ) ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 6dd7b0934d..08efa10ed4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -209,7 +209,7 @@ public void execute( Context context, Statement statement, QueryParameters param long schemaId; try { - // Cannot use getEntity() here since table does not yet exist + // Cannot use getTable() here since table does not yet exist if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName schemaId = catalog.getSchema( name.names.get( 0 ), name.names.get( 1 ) ).id; tableName = name.names.get( 2 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index 3f94a345c1..d216c8acd8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -46,6 +46,7 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.Getter; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.function.Function2; @@ -69,6 +70,7 @@ import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordType; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryLanguage; @@ -90,7 +92,6 @@ import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexPatternFieldRef; @@ -3001,7 +3002,7 @@ private void checkRollUpInUsing( SqlIdentifier identifier, SqlNode leftOrRight ) // if it's not a SqlIdentifier then that's fine, it'll be validated somewhere else. if ( leftOrRight instanceof SqlIdentifier ) { SqlIdentifier from = (SqlIdentifier) leftOrRight; - Entity entity = findTable( + CatalogEntity entity = findTable( catalogReader.getRootSchema(), Util.last( from.names ), catalogReader.nameMatcher.isCaseSensitive() ); @@ -3366,11 +3367,10 @@ private boolean isRolledUpColumnAllowedInAgg( SqlIdentifier identifier, SqlValid } String tableAlias = pair.left; - String columnName = pair.right; - Entity entity = findTable( tableAlias ); + CatalogEntity entity = findTable( tableAlias ); if ( entity != null ) { - return entity.rolledUpColumnValidInsideAgg( columnName, aggCall, parent ); + return entity.rolledUpColumnValidInsideAgg(); } return true; } @@ -3387,7 +3387,7 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc String tableAlias = pair.left; String columnName = pair.right; - Entity entity = findTable( tableAlias ); + CatalogEntity entity = findTable( tableAlias ); if ( entity != null ) { return entity.isRolledUp( columnName ); } @@ -3395,49 +3395,16 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc } - private Entity findTable( PolyphenyDbSchema schema, String tableName, boolean caseSensitive ) { - PolyphenyDbSchema.TableEntry entry = schema.getTable( tableName ); - if ( entry != null ) { - return entry.getTable(); - } - - // Check sub schemas - for ( PolyphenyDbSchema subSchema : schema.getSubSchemaMap().values() ) { - Entity entity = findTable( subSchema, tableName, caseSensitive ); - if ( entity != null ) { - return entity; - } - } - - return null; + private @Nullable CatalogEntity findTable( PolyphenyDbSchema schema, String tableName, boolean caseSensitive ) { + return schema.getTable( List.of( tableName ) ); } /** * Given a table alias, find the corresponding {@link Entity} associated with it */ - private Entity findTable( String alias ) { - List names = null; - if ( tableScope == null ) { - // no tables to find - return null; - } - - for ( ScopeChild child : tableScope.children ) { - if ( catalogReader.nameMatcher.matches( child.name, alias ) ) { - names = ((SqlIdentifier) child.namespace.getNode()).names; - break; - } - } - if ( names == null || names.size() == 0 ) { - return null; - } else if ( names.size() == 1 ) { - return findTable( catalogReader.getRootSchema(), names.get( 0 ), catalogReader.nameMatcher.isCaseSensitive() ); - } - - PolyphenyDbSchema.TableEntry entry = ValidatorUtil.getTableEntry( (CatalogReader) catalogReader, names ); - - return entry == null ? null : entry.getTable(); + private CatalogEntity findTable( String alias ) { + return findTable( catalogReader.getRootSchema(), alias, catalogReader.nameMatcher.isCaseSensitive() ); } From cd05030bccf0cbca4099b209c72da8f8c32a99b3 Mon Sep 17 00:00:00 2001 From: datomo Date: Fri, 24 Feb 2023 00:26:21 +0100 Subject: [PATCH 020/436] removing complex shadow schemaPlus and entity structures --- .../org/polypheny/db/adapter/DataContext.java | 6 +- .../db/algebra/stream/StreamRules.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 3 +- .../entity/CatalogCollectionPlacement.java | 4 +- .../validate/ValidatorCatalogReader.java | 7 -- .../db/prepare/AlgOptEntityImpl.java | 3 +- .../org/polypheny/db/prepare/PlannerImpl.java | 4 +- .../db/prepare/PolyphenyDbCatalogReader.java | 49 ++------ .../db/prepare/PolyphenyDbPrepareImpl.java | 6 +- .../org/polypheny/db/prepare/Prepare.java | 4 +- .../db/prepare/QueryableAlgBuilder.java | 6 - .../org/polypheny/db/schema/Namespace.java | 2 - .../db/schema/PolyphenyDbSchema.java | 13 ++- .../java/org/polypheny/db/schema/Schemas.java | 108 +++--------------- .../org/polypheny/db/tools/AlgBuilder.java | 4 +- .../polypheny/db/tools/FrameworkConfig.java | 4 +- .../org/polypheny/db/tools/Frameworks.java | 31 +++-- .../org/polypheny/db/util/ValidatorUtil.java | 86 +------------- .../org/polypheny/db/catalog/MockCatalog.java | 2 +- .../db/partition/FrequencyMapImpl.java | 6 +- .../db/processing/DataContextImpl.java | 2 +- .../db/transaction/TransactionImpl.java | 3 +- .../org/polypheny/db/catalog/CatalogImpl.java | 6 +- .../db/adapter/mongodb/MongoPlugin.java | 1 + .../org/polypheny/db/catalog/PolyCatalog.java | 10 +- .../language/validate/DelegatingScope.java | 30 ++--- .../DelegatingSqlValidatorCatalogReader.java | 6 - .../db/sql/language/validate/EmptyScope.java | 74 ++---------- .../language/validate/SqlValidatorImpl.java | 2 +- .../language/validate/SqlValidatorScope.java | 46 ++------ .../language/validate/SqlValidatorUtil.java | 53 +++------ 31 files changed, 128 insertions(+), 455 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/DataContext.java b/core/src/main/java/org/polypheny/db/adapter/DataContext.java index 6be97c9513..29f1eec7b5 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataContext.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataContext.java @@ -32,7 +32,7 @@ import org.apache.calcite.linq4j.tree.ParameterExpression; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Advisor; @@ -49,7 +49,7 @@ public interface DataContext { /** * Returns a sub-schema with a given name, or null. */ - SchemaPlus getRootSchema(); + PolyphenyDbSchema getRootSchema(); /** * Returns the type factory. @@ -209,7 +209,7 @@ public T get( DataContext dataContext ) { class SlimDataContext implements DataContext, Serializable { @Override - public SchemaPlus getRootSchema() { + public PolyphenyDbSchema getRootSchema() { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java index 55e8101383..ffff9fa5b7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java @@ -274,7 +274,7 @@ public void onMatch( AlgOptRuleCall call ) { if ( streamableTable != null ) { final Entity entity1 = streamableTable.stream(); final CatalogTable catalogTable = algOptEntity.getCatalogEntity().unwrap( CatalogTable.class ); - final CatalogPartitionPlacement placement = algOptEntity.getPartitionPlacement(); + final CatalogPartitionPlacement placement = algOptEntity.getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ); final AlgOptEntity algOptEntity2 = AlgOptEntityImpl.create( algOptEntity.getRelOptSchema(), algOptEntity.getRowType(), diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 29db1ded33..a9df8e49ea 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -1894,13 +1894,14 @@ protected final boolean isValidIdentifier( final String str ) { /** * Updates the physical name of the given collection. * + * @param namespaceId * @param collectionId The id of the collection to change * @param adapterId The id of the adapter on which the physical names of the collection are updated * @param physicalNamespaceName The new namespace name * @param namespaceName The namespace name * @param physicalCollectionName The new physical collection name */ - public abstract void updateCollectionPartitionPhysicalNames( long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ); + public abstract void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ); /** * Delete a specific collection. diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java index 371de7a650..d410aa16ee 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java @@ -31,8 +31,8 @@ public class CatalogCollectionPlacement extends CatalogEntityPlacement { public final String physicalNamespaceName; - public CatalogCollectionPlacement( int adapterId, long collectionId, @Nullable String physicalName, String physicalNamespaceName, long id ) { - super(); + public CatalogCollectionPlacement( long namespaceId, int adapterId, long collectionId, @Nullable String physicalName, String physicalNamespaceName, long id ) { + super( namespaceId, (long) adapterId, collectionId ); this.adapter = adapterId; this.collectionId = collectionId; this.physicalName = physicalName; diff --git a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java index 3ef6531285..b7cce1e5b6 100644 --- a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java @@ -67,13 +67,6 @@ public interface ValidatorCatalogReader extends Wrapper { */ List getAllSchemaObjectNames( List names ); - /** - * Returns the paths of all schemas to look in for tables. - * - * @return paths of current schema and root schema - */ - List> getSchemaPaths(); - AlgDataType createTypeFromProjection( AlgDataType type, List columnNameList ); /** diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index 14e775a554..feba778ce4 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -211,7 +211,6 @@ protected AlgOptEntity extend( Entity extendedEntity ) { extendedEntity, null, null, - expressionFunction, getRowCount() ); } @@ -271,7 +270,7 @@ public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { } } final AlgOptEntity algOptEntity = - new AlgOptEntityImpl( this.schema, b.build(), this.entity, this.catalogEntity, this.partitionPlacement, this.expressionFunction, this.rowCount ) { + new AlgOptEntityImpl( this.schema, b.build(), this.entity, this.catalogEntity, this.partitionPlacement, this.rowCount ) { @Override public T unwrap( Class clazz ) { if ( clazz.isAssignableFrom( InitializerExpressionFactory.class ) ) { diff --git a/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java b/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java index 18c1a3b33d..001dd7d7f7 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java @@ -46,7 +46,7 @@ import org.polypheny.db.plan.AlgTraitDef; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexExecutor; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.tools.AlgConversionException; import org.polypheny.db.tools.FrameworkConfig; import org.polypheny.db.tools.Frameworks; @@ -76,7 +76,7 @@ public class PlannerImpl implements Planner { private boolean open; // set in STATE_2_READY - private SchemaPlus defaultSchema; + private PolyphenyDbSchema defaultSchema; private JavaTypeFactory typeFactory; private AlgOptPlanner planner; private RexExecutor executor; diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index 2fd4b636e7..b9488816ea 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -36,8 +36,6 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.List; -import java.util.Map; -import java.util.NavigableSet; import java.util.Objects; import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.MonikerType; @@ -47,13 +45,14 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.entity.CatalogCollection; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Wrapper; -import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.util.Moniker; import org.polypheny.db.util.MonikerImpl; import org.polypheny.db.util.ValidatorUtil; @@ -67,12 +66,10 @@ public class PolyphenyDbCatalogReader implements Prepare.CatalogReader { protected final PolyphenyDbSchema rootSchema; protected final AlgDataTypeFactory typeFactory; - private final List> schemaPaths; - public PolyphenyDbCatalogReader( PolyphenyDbSchema rootSchema, List defaultSchema, AlgDataTypeFactory typeFactory ) { + public PolyphenyDbCatalogReader( PolyphenyDbSchema rootSchema, AlgDataTypeFactory typeFactory ) { this.rootSchema = Objects.requireNonNull( rootSchema ); - this.schemaPaths = ImmutableList.of( Objects.requireNonNull( defaultSchema ), ImmutableList.of() ); this.typeFactory = typeFactory; } @@ -98,17 +95,16 @@ public AlgOptEntity getCollection( final List names ) { @Override - public Graph getGraph( final String name ) { - PolyphenyDbSchema schema = rootSchema.getSubNamespace( name, true ); - return schema == null ? null : (Graph) schema.getNamespace(); + public CatalogGraphDatabase getGraph( final String name ) { + return rootSchema.getGraph( List.of( name ) ); } @Override public AlgDataType getNamedType( Identifier typeName ) { - PolyphenyDbSchema.TypeEntry typeEntry = ValidatorUtil.getTypeEntry( getRootSchema(), typeName ); - if ( typeEntry != null ) { - return typeEntry.getType().apply( typeFactory ); + CatalogTable table = rootSchema.getTable( typeName.getNames() ); + if ( table != null ) { + return table.getRowType(); } else { return null; } @@ -123,41 +119,20 @@ public List getAllSchemaObjectNames( List names ) { } final List result = new ArrayList<>(); - // Add root schema if not anonymous - /*if ( !schema.getName().equals( "" ) ) { - result.add( moniker( schema, null, MonikerType.SCHEMA ) ); - }*/ - - final Map schemaMap = schema.getSubSchemaMap(); - - for ( String subSchema : schemaMap.keySet() ) { + for ( String subSchema : rootSchema.getNamespaceNames() ) { result.add( moniker( schema, subSchema, MonikerType.SCHEMA ) ); } - for ( String table : schema.getTableNames() ) { - result.add( moniker( schema, table, MonikerType.TABLE ) ); - } - - final NavigableSet functions = schema.getFunctionNames(); - for ( String function : functions ) { // views are here as well - result.add( moniker( schema, function, MonikerType.FUNCTION ) ); - } return result; } private Moniker moniker( PolyphenyDbSchema schema, String name, MonikerType type ) { - final List path = schema.path( name ); + /*final List path = schema.path( name ); if ( path.size() == 1 && !schema.root().getName().equals( "" ) && type == MonikerType.SCHEMA ) { type = MonikerType.CATALOG; - } - return new MonikerImpl( path, type ); - } - - - @Override - public List> getSchemaPaths() { - return schemaPaths; + }*/ + return new MonikerImpl( name, type ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java index 9116a54e42..ecbb032c61 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java @@ -455,13 +455,13 @@ public R perform( PrepareAction action ) { final JavaTypeFactory typeFactory = prepareContext.getTypeFactory(); final PolyphenyDbSchema schema = action.getConfig().getDefaultSchema() != null - ? PolyphenyDbSchema.from( action.getConfig().getDefaultSchema() ) + ? action.getConfig().getDefaultSchema() : prepareContext.getRootSchema(); - PolyphenyDbCatalogReader catalogReader = new PolyphenyDbCatalogReader( schema.root(), schema.path( null ), typeFactory ); + PolyphenyDbCatalogReader catalogReader = new PolyphenyDbCatalogReader( schema, typeFactory ); final RexBuilder rexBuilder = new RexBuilder( typeFactory ); final AlgOptPlanner planner = createPlanner( prepareContext, action.getConfig().getContext(), action.getConfig().getCostFactory() ); final AlgOptCluster cluster = createCluster( planner, rexBuilder ); - return action.apply( cluster, catalogReader, prepareContext.getRootSchema().plus() ); + return action.apply( cluster, catalogReader, prepareContext.getRootSchema() ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index 1e705f3512..1b12582104 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -53,6 +53,7 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.validate.Validator; @@ -73,7 +74,6 @@ import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ExtensibleEntity; -import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.tools.Program; import org.polypheny.db.tools.Programs; import org.polypheny.db.util.Holder; @@ -247,7 +247,7 @@ public interface CatalogReader extends AlgOptSchema, ValidatorCatalogReader, Ope AlgOptEntity getCollection( List names ); - Graph getGraph( String name ); + CatalogGraphDatabase getGraph( String name ); ThreadLocal THREAD_LOCAL = new ThreadLocal<>(); diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index 0e78239f8c..074460a5e7 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -65,7 +65,6 @@ import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; @@ -107,14 +106,9 @@ AlgNode toAlg( Queryable queryable ) { final AbstractTableQueryable tableQueryable = (AbstractTableQueryable) queryable; final QueryableEntity table = tableQueryable.table; - final PolyphenyDbSchema.TableEntry tableEntry = - PolyphenyDbSchema - .from( tableQueryable.schema ) - .add( tableQueryable.tableName, tableQueryable.table ); final AlgOptEntityImpl algOptTable = AlgOptEntityImpl.create( null, table.getRowType( translator.typeFactory ), - tableEntry, table.getCatalogEntity(), table.getPartitionPlacement(), null ); diff --git a/core/src/main/java/org/polypheny/db/schema/Namespace.java b/core/src/main/java/org/polypheny/db/schema/Namespace.java index a801d08dd9..75beb5dc1b 100644 --- a/core/src/main/java/org/polypheny/db/schema/Namespace.java +++ b/core/src/main/java/org/polypheny/db/schema/Namespace.java @@ -37,7 +37,6 @@ import java.util.Collection; import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; @@ -49,7 +48,6 @@ * * There may be multiple overloaded functions with the same name but different numbers or types of parameters. For this reason, * {@link #getFunctions} returns a list of all members with the same name. Polypheny-DB will call - * {@link Schemas#resolve(AlgDataTypeFactory, String, java.util.Collection, java.util.List)} * to choose the appropriate one. * * The most common and important type of member is the one with no arguments and a result type that is a collection of records. diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 18ed052e16..3dca5a90f2 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -17,6 +17,7 @@ package org.polypheny.db.schema; import java.util.List; +import java.util.stream.Collectors; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.adapter.DataContext; @@ -64,7 +65,17 @@ default CatalogCollection getCollection( List names ) { } } - CatalogGraphDatabase getGraph( List names ); + default CatalogGraphDatabase getGraph( List names ) { + + if ( names.size() == 1 ) {// TODO add methods + return Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ) ).get( 0 ); + } + return null; + } + + default List getNamespaceNames() { + return Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, null ).stream().map( t -> t.name ).collect( Collectors.toList() ); + } /** * Schema that has no parents. diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index c57047a5f4..571096da75 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -33,7 +33,6 @@ package org.polypheny.db.schema; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; @@ -41,11 +40,8 @@ import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Objects; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.Queryable; @@ -58,14 +54,13 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.config.PolyphenyDbConnectionConfigImpl; import org.polypheny.db.config.PolyphenyDbConnectionProperty; import org.polypheny.db.prepare.Context; import org.polypheny.db.prepare.JavaTypeFactoryImpl; import org.polypheny.db.prepare.PolyphenyDbPrepare; -import org.polypheny.db.schema.PolyphenyDbSchema.FunctionEntry; -import org.polypheny.db.schema.graph.QueryableGraph; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyTypeUtil; import org.polypheny.db.util.BuiltInMethod; @@ -82,24 +77,6 @@ private Schemas() { } - public static FunctionEntry resolve( AlgDataTypeFactory typeFactory, String name, Collection functionEntries, List argumentTypes ) { - final List matches = new ArrayList<>(); - for ( FunctionEntry entry : functionEntries ) { - if ( matches( typeFactory, entry.getFunction(), argumentTypes ) ) { - matches.add( entry ); - } - } - switch ( matches.size() ) { - case 0: - return null; - case 1: - return matches.get( 0 ); - default: - throw new RuntimeException( "More than one match for " + name + " with arguments " + argumentTypes ); - } - } - - private static boolean matches( AlgDataTypeFactory typeFactory, Function member, List argumentTypes ) { List parameters = member.getParameters(); if ( parameters.size() != argumentTypes.size() ) { @@ -214,33 +191,22 @@ public static Queryable queryable( DataContext root, Class clazz, Stri * Returns a {@link Queryable}, given a fully-qualified table name as an iterable. */ public static Queryable queryable( DataContext root, Class clazz, Iterable names ) { - SchemaPlus schema = root.getRootSchema(); - for ( Iterator iterator = names.iterator(); ; ) { - String name = iterator.next(); - if ( iterator.hasNext() ) { - schema = schema.getSubNamespace( name ); - } else { - return queryable( root, schema, clazz, name ); - } - } + PolyphenyDbSchema schema = root.getRootSchema(); + + return queryable( root, schema, clazz, names.iterator().next() ); + } /** * Returns a {@link Queryable}, given a schema and table name. */ - public static Queryable queryable( DataContext root, SchemaPlus schema, Class clazz, String tableName ) { + public static Queryable queryable( DataContext root, PolyphenyDbSchema schema, Class clazz, String tableName ) { QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); return table.asQueryable( root, schema, tableName ); } - public static Queryable graph( DataContext root, SchemaPlus schema ) { - QueryableGraph graph = (QueryableGraph) schema.polyphenyDbSchema().getNamespace(); - return graph.asQueryable( root, graph ); - } - - /** * Returns an {@link org.apache.calcite.linq4j.Enumerable} over the rows of a given table, representing each row as an object array. */ @@ -277,17 +243,9 @@ private static int[] identity( int count ) { /** * Returns an {@link org.apache.calcite.linq4j.Enumerable} over object arrays, given a fully-qualified table name which leads to a {@link ScannableEntity}. */ - public static Entity table( DataContext root, String... names ) { - SchemaPlus schema = root.getRootSchema(); - final List nameList = Arrays.asList( names ); - for ( Iterator iterator = nameList.iterator(); ; ) { - String name = iterator.next(); - if ( iterator.hasNext() ) { - schema = schema.getSubNamespace( name ); - } else { - return schema.getEntity( name ); - } - } + public static CatalogTable table( DataContext root, String... names ) { + PolyphenyDbSchema schema = root.getRootSchema(); + return schema.getTable( List.of( names ) ); } /** @@ -362,7 +320,7 @@ public JavaTypeFactory getTypeFactory() { @Override public PolyphenyDbSchema getRootSchema() { - return schema.root(); + return schema; } @@ -375,9 +333,6 @@ public String getDefaultSchemaName() { @Override public List getDefaultSchemaPath() { // schemaPath is usually null. If specified, it overrides schema as the context within which the SQL is validated. - if ( schemaPath == null ) { - return schema.path( null ); - } return schemaPath; } @@ -447,48 +402,11 @@ public static PolyphenyDbSchema subSchema( PolyphenyDbSchema schema, Iterable names ) { - final ImmutableList.Builder> builder = ImmutableList.builder(); - Namespace namespace = rootSchema.plus(); - final Iterator iterator = names.iterator(); - if ( !iterator.hasNext() ) { - return PathImpl.EMPTY; - } - if ( !rootSchema.getName().isEmpty() ) { - Preconditions.checkState( rootSchema.getName().equals( iterator.next() ) ); - } - for ( ; ; ) { - final String name = iterator.next(); - builder.add( Pair.of( name, namespace ) ); - if ( !iterator.hasNext() ) { - return path( builder.build() ); - } - namespace = namespace.getSubNamespace( name ); - } - } - - public static PathImpl path( ImmutableList> build ) { return new PathImpl( build ); } @@ -511,18 +429,18 @@ public static Path path( SchemaPlus schema ) { */ private static class DummyDataContext implements DataContext { - private final SchemaPlus rootSchema; + private final PolyphenyDbSchema rootSchema; private final ImmutableMap map; - DummyDataContext( SchemaPlus rootSchema ) { + DummyDataContext( PolyphenyDbSchema rootSchema ) { this.rootSchema = rootSchema; this.map = ImmutableMap.of(); } @Override - public SchemaPlus getRootSchema() { + public PolyphenyDbSchema getRootSchema() { return rootSchema; } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 5aed7900c9..9a1324db4d 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -126,7 +126,7 @@ import org.polypheny.db.runtime.Hook; import org.polypheny.db.runtime.PolyCollections.PolyDictionary; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.graph.PolyNode; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyType; @@ -276,7 +276,7 @@ public static AlgBuilder create( FrameworkConfig config ) { Frameworks.withPrepare( new Frameworks.PrepareAction( config ) { @Override - public Void apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, SchemaPlus rootSchema ) { + public Void apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, PolyphenyDbSchema rootSchema ) { clusters[0] = cluster; algOptSchemas[0] = algOptSchema; return null; diff --git a/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java b/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java index 10d3537f94..c8881182d8 100644 --- a/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java +++ b/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java @@ -44,7 +44,7 @@ import org.polypheny.db.plan.AlgTraitDef; import org.polypheny.db.plan.Context; import org.polypheny.db.rex.RexExecutor; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; /** @@ -68,7 +68,7 @@ public interface FrameworkConfig { * Returns the default schema that should be checked before looking at the root schema. * Returns null to only consult the root schema. */ - SchemaPlus getDefaultSchema(); + PolyphenyDbSchema getDefaultSchema(); /** * Returns the executor used to evaluate constant expressions. diff --git a/core/src/main/java/org/polypheny/db/tools/Frameworks.java b/core/src/main/java/org/polypheny/db/tools/Frameworks.java index 5efc4647a6..8f9e45c9c7 100644 --- a/core/src/main/java/org/polypheny/db/tools/Frameworks.java +++ b/core/src/main/java/org/polypheny/db/tools/Frameworks.java @@ -59,8 +59,6 @@ import org.polypheny.db.rex.RexExecutor; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.util.Util; /** @@ -91,7 +89,7 @@ public static Planner getPlanner( FrameworkConfig config ) { */ public interface PlannerAction { - R apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, SchemaPlus rootSchema ); + R apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, PolyphenyDbSchema rootSchema ); } @@ -121,7 +119,7 @@ public FrameworkConfig getConfig() { public abstract R apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, - SchemaPlus rootSchema ); + PolyphenyDbSchema rootSchema ); } @@ -135,11 +133,10 @@ public abstract R apply( */ public static R withPlanner( final PlannerAction action, final FrameworkConfig config ) { return withPrepare( - new Frameworks.PrepareAction( config ) { + new Frameworks.PrepareAction<>( config ) { @Override - public R apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, SchemaPlus rootSchema ) { - final PolyphenyDbSchema schema = PolyphenyDbSchema.from( Util.first( config.getDefaultSchema(), rootSchema ) ); - return action.apply( cluster, algOptSchema, schema.root().plus() ); + public R apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, PolyphenyDbSchema rootSchema ) { + return action.apply( cluster, algOptSchema, rootSchema ); } } ); } @@ -152,11 +149,11 @@ public R apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, SchemaPlus roo * @return Return value from action */ public static R withPlanner( final PlannerAction action ) { - SchemaPlus rootSchema = Frameworks.createRootSchema( true ); + PolyphenyDbSchema rootSchema = Frameworks.createRootSchema( true ); FrameworkConfig config = newConfigBuilder() .defaultSchema( rootSchema ) .prepareContext( new ContextImpl( - PolyphenyDbSchema.from( rootSchema ), + rootSchema, new SlimDataContext() { @Override public JavaTypeFactory getTypeFactory() { @@ -199,8 +196,8 @@ public static R withPrepare( PrepareAction action ) { * * @param cache Whether to create a caching schema. */ - public static SchemaPlus createRootSchema( boolean cache ) { - return AbstractPolyphenyDbSchema.createRootSchema( "" ).plus(); + public static PolyphenyDbSchema createRootSchema( boolean cache ) { + return AbstractPolyphenyDbSchema.createRootSchema(); } @@ -234,7 +231,7 @@ public static class ConfigBuilder { private ImmutableList traitDefs; private ParserConfig parserConfig; private NodeToAlgConverter.Config sqlToRelConverterConfig; - private SchemaPlus defaultSchema; + private PolyphenyDbSchema defaultSchema; private RexExecutor executor; private AlgOptCostFactory costFactory; private AlgDataTypeSystem typeSystem; @@ -256,8 +253,6 @@ public ConfigBuilder() { * Creates a ConfigBuilder, initializing from an existing config. */ public ConfigBuilder( FrameworkConfig config ) { - //convertletTable = config.getConvertletTable(); - // operatorTable = config.getOperatorTable(); programs = config.getPrograms(); context = config.getContext(); traitDefs = config.getTraitDefs(); @@ -334,7 +329,7 @@ public ConfigBuilder sqlToRelConverterConfig( NodeToAlgConverter.Config sqlToRel } - public ConfigBuilder defaultSchema( SchemaPlus defaultSchema ) { + public ConfigBuilder defaultSchema( PolyphenyDbSchema defaultSchema ) { this.defaultSchema = defaultSchema; return this; } @@ -398,7 +393,7 @@ public static class StdFrameworkConfig implements FrameworkConfig { private final NodeToAlgConverter.Config sqlToRelConverterConfig; - private final SchemaPlus defaultSchema; + private final PolyphenyDbSchema defaultSchema; private final AlgOptCostFactory costFactory; @@ -416,7 +411,7 @@ public StdFrameworkConfig( ImmutableList traitDefs, ParserConfig parserConfig, NodeToAlgConverter.Config nodeToRelConverterConfig, - SchemaPlus defaultSchema, + PolyphenyDbSchema defaultSchema, AlgOptCostFactory costFactory, AlgDataTypeSystem typeSystem, RexExecutor executor, diff --git a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java index cd3d3c80a7..ebce3f39ef 100644 --- a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java +++ b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java @@ -19,9 +19,7 @@ import static org.polypheny.db.util.Static.RESOURCE; import com.google.common.base.Utf8; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterables; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -36,11 +34,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.nodes.Identifier; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.PolyphenyDbSchema.TableEntry; -import org.polypheny.db.schema.PolyphenyDbSchema.TypeEntry; import org.polypheny.db.type.PolyTypeUtil; public class ValidatorUtil { @@ -256,66 +250,6 @@ public static void checkCharsetAndCollateConsistentIfCharType( AlgDataType type } - /** - * Finds a {@link TypeEntry} in a given schema whose type has the given name, possibly qualified. - * - * @param rootSchema root schema - * @param typeName name of the type, may be qualified or fully-qualified - * @return TypeEntry with a table with the given name, or null - */ - public static TypeEntry getTypeEntry( PolyphenyDbSchema rootSchema, Identifier typeName ) { - final String name; - final List path; - if ( typeName.isSimple() ) { - path = ImmutableList.of(); - name = typeName.getSimple(); - } else { - path = Util.skipLast( typeName.getNames() ); - name = Util.last( typeName.getNames() ); - } - PolyphenyDbSchema schema = rootSchema; - for ( String p : path ) { - if ( schema == rootSchema && NameMatchers.withCaseSensitive( true ).matches( p, schema.getName() ) ) { - continue; - } - schema = schema.getSubNamespace( p, true ); - } - return schema == null ? null : schema.getType( name, false ); - } - - - /** - * Finds a {@link TableEntry} in a given catalog reader whose table has the given name, possibly qualified. - * - * Uses the case-sensitivity policy of the specified catalog reader. - * - * If not found, returns null. - * - * @param catalogReader accessor to the table metadata - * @param names Name of table, may be qualified or fully-qualified - * @return TableEntry with a table with the given name, or null - */ - public static TableEntry getTableEntry( CatalogReader catalogReader, List names ) { - // First look in the default schema, if any. - // If not found, look in the root schema. - for ( List schemaPath : catalogReader.getSchemaPaths() ) { - PolyphenyDbSchema schema = - getSchema( - catalogReader.getRootSchema(), - Iterables.concat( schemaPath, Util.skipLast( names ) ), - catalogReader.nameMatcher ); - if ( schema == null ) { - continue; - } - TableEntry entry = getTableEntryFrom( schema, Util.last( names ), catalogReader.nameMatcher.isCaseSensitive() ); - if ( entry != null ) { - return entry; - } - } - return null; - } - - /** * Finds and returns {@link PolyphenyDbSchema} nested to the given rootSchema with specified schemaPath. * @@ -329,26 +263,8 @@ public static TableEntry getTableEntry( CatalogReader catalogReader, List schemaPath, NameMatcher nameMatcher ) { - PolyphenyDbSchema schema = rootSchema; - for ( String schemaName : schemaPath ) { - if ( schema == rootSchema && nameMatcher.matches( schemaName, schema.getName() ) ) { - continue; - } - schema = schema.getSubNamespace( schemaName, nameMatcher.isCaseSensitive() ); - if ( schema == null ) { - return null; - } - } - return schema; - } - - private static TableEntry getTableEntryFrom( PolyphenyDbSchema schema, String name, boolean caseSensitive ) { - TableEntry entry = schema.getTable( name ); - if ( entry == null ) { - entry = schema.getTableBasedOnNullaryFunction( name, caseSensitive ); - } - return entry; + return rootSchema; } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 0bcae730f6..6eb1f7b9de 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -1369,7 +1369,7 @@ public List getPartitionPlacementsByIdAndRole( long t @Override - public void updateCollectionPartitionPhysicalNames( long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { + public void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { throw new NotImplementedException(); } diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index aa34000480..0c2cc5daba 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -294,7 +294,8 @@ private void redistributePartitions( CatalogTable table, List partitionsFr for ( long partitionId : hotPartitionsToCreate ) { catalog.addPartitionPlacement( - catalogTable.namespaceId, store.getAdapterId(), + table.namespaceId, + store.getAdapterId(), table.id, partitionId, PlacementType.AUTOMATIC, @@ -343,7 +344,8 @@ private void redistributePartitions( CatalogTable table, List partitionsFr for ( long partitionId : coldPartitionsToCreate ) { catalog.addPartitionPlacement( - catalogTable.namespaceId, store.getAdapterId(), + table.namespaceId, + store.getAdapterId(), table.id, partitionId, PlacementType.AUTOMATIC, diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java index 4e89dfbf89..1c6acb509e 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java @@ -201,7 +201,7 @@ public void resetContext() { @Override public SchemaPlus getRootSchema() { - return rootSchema == null ? null : rootSchema.plus(); + return rootSchema == null ? null : rootSchema; } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java index 45fad3c000..d4a403ad38 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java @@ -265,8 +265,7 @@ public boolean isActive() { @Override public PolyphenyDbCatalogReader getCatalogReader() { return new PolyphenyDbCatalogReader( - PolyphenyDbSchema.from( getSchema().plus() ), - PolyphenyDbSchema.from( getSchema().plus() ).path( null ), + getSchema(), getTypeFactory() ); } diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 0def5ecd5c..7be7941aeb 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -2441,7 +2441,7 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI @Override public long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ) { long id = partitionIdBuilder.getAndIncrement(); - CatalogCollectionPlacement placement = new CatalogCollectionPlacement( adapterId, collectionId, null, null, id ); + CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, null, null, id ); CatalogCollection old = collections.get( collectionId ); if ( old == null ) { throw new UnknownCollectionException( collectionId ); @@ -2463,13 +2463,13 @@ public long addCollectionPlacement( long namespaceId, int adapterId, long collec * {@inheritDoc} */ @Override - public void updateCollectionPartitionPhysicalNames( long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { + public void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { CatalogCollection old = getCollection( collectionId ); if ( old == null ) { throw new UnknownCollectionException( collectionId ); } - CatalogCollectionPlacement placement = new CatalogCollectionPlacement( adapterId, collectionId, physicalCollectionName, physicalNamespaceName, old.id ); + CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, physicalCollectionName, physicalNamespaceName, old.id ); CatalogCollection collection = old.setPhysicalName( physicalCollectionName ); synchronized ( this ) { collections.replace( collectionId, collection ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java index b8a7b9a3bb..c3ad25aa89 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java @@ -347,6 +347,7 @@ public void createCollection( Context prepareContext, CatalogCollection catalogC this.currentSchema.database.createCollection( physicalCollectionName ); catalog.updateCollectionPartitionPhysicalNames( + catalogCollection.namespaceId, catalogCollection.id, getAdapterId(), catalogCollection.getNamespaceName(), diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 1d6a26d681..22a4e0294e 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -30,6 +30,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entities.CatalogUser; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; @@ -40,7 +41,6 @@ import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.util.Moniker; @@ -168,12 +168,6 @@ public List getAllSchemaObjectNames( List names ) { } - @Override - public List> getSchemaPaths() { - return null; - } - - @Override public AlgDataType createTypeFromProjection( AlgDataType type, List columnNameList ) { return null; @@ -205,7 +199,7 @@ public AlgOptEntity getCollection( List names ) { @Override - public Graph getGraph( String name ) { + public CatalogGraphDatabase getGraph( String name ) { return null; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index 8ee61d7fac..4fa6f73e07 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -17,11 +17,7 @@ package org.polypheny.db.sql.language.validate; -import com.google.common.collect.ImmutableList; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Map; import org.polypheny.db.algebra.constant.MonikerType; @@ -30,7 +26,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.DynamicRecordType; import org.polypheny.db.algebra.type.StructKind; -import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.CustomColumnResolvingEntity; @@ -44,10 +39,7 @@ import org.polypheny.db.util.Moniker; import org.polypheny.db.util.MonikerImpl; import org.polypheny.db.util.NameMatcher; -import org.polypheny.db.util.NameMatchers; import org.polypheny.db.util.Pair; -import org.polypheny.db.util.Static; -import org.polypheny.db.util.Util; /** @@ -95,7 +87,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, */ void resolveInNamespace( SqlValidatorNamespace ns, boolean nullable, List names, NameMatcher nameMatcher, Path path, Resolved resolved ) { if ( names.isEmpty() ) { - resolved.found( ns, nullable, this, path, null ); + resolved.found( null ); return; } final AlgDataType rowType = ns.getRowType(); @@ -229,7 +221,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { if ( identifier.isStar() ) { return SqlQualified.create( this, 1, null, identifier ); } - + /* final SqlIdentifier previous = identifier; final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; String columnName; @@ -253,7 +245,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { Collections.sort( list ); throw validator.newValidationError( identifier, Static.RESOURCE.columnNotFoundDidYouMean( columnName, Util.sepList( list, "', '" ) ) ); } - } else if ( SqlValidatorUtil.isTableNonRelational( validator ) ) { + } else if ( !SqlValidatorUtil.isTableRelational( validator ) ) { // todo dl, check if this does not lead to problems return SqlQualified.create( this, 0, validator.getSqlNamespace( identifier ), identifier ); } @@ -287,7 +279,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { } // fall through default: { - SqlValidatorNamespace fromNs = null; + CatalogEntity fromNs = null; Path fromPath = null; AlgDataType fromRowType = null; final ResolvedImpl resolved = new ResolvedImpl(); @@ -299,8 +291,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { resolve( prefix.names, nameMatcher, false, resolved ); if ( resolved.count() == 1 ) { final Resolve resolve = resolved.only(); - fromNs = resolve.namespace; - fromPath = resolve.path; + fromNs = resolve.getEntity().unwrap( CatalogTable.class ); fromRowType = resolve.rowType(); break; } @@ -317,7 +308,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { } } } - if ( fromNs == null || fromNs instanceof SchemaNamespace ) { + if ( fromNs == null ) { // Look for a column not qualified by a table alias. columnName = identifier.names.get( 0 ); final Map map = findQualifyingTableNames( columnName, identifier, nameMatcher ); @@ -328,7 +319,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { case 1: { final Map.Entry entry = map.entrySet().iterator().next(); final String tableName2 = map.keySet().iterator().next(); - fromNs = entry.getValue().namespace; + fromPath = Path.EMPTY; // Adding table name is for RecordType column with StructKind.PEEK_FIELDS or StructKind.PEEK_FIELDS only. @@ -343,8 +334,6 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { resolve( ImmutableList.of( tableName2 ), nameMatcher, false, resolved ); if ( resolved.count() == 1 ) { final Resolve resolve = resolved.only(); - fromNs = resolve.namespace; - fromPath = resolve.path; fromRowType = resolve.rowType(); identifier = identifier .setName( 0, columnName ) @@ -370,7 +359,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { // // change "e.empno" to "E.empno". if ( fromNs.getEnclosingNode() != null && !(this instanceof MatchRecognizeScope) ) { - String alias = SqlValidatorUtil.getAlias( fromNs.getEnclosingNode(), -1 ); + if ( alias != null && i > 0 && !alias.equals( identifier.names.get( i - 1 ) ) ) { identifier = identifier.setName( i - 1, alias ); } @@ -492,7 +481,8 @@ private int worstKind( Path path ) { } return SqlQualified.create( this, i, fromNs, identifier ); } - } + }*/ + throw new RuntimeException(); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingSqlValidatorCatalogReader.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingSqlValidatorCatalogReader.java index 024d1e378d..b8136c0105 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingSqlValidatorCatalogReader.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingSqlValidatorCatalogReader.java @@ -61,12 +61,6 @@ public List getAllSchemaObjectNames( List names ) { } - @Override - public List> getSchemaPaths() { - return catalogReader.getSchemaPaths(); - } - - @Override public C unwrap( Class aClass ) { return catalogReader.unwrap( aClass ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index 9f188b62ae..2f3a41f724 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -19,20 +19,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.StructKind; +import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.nodes.validate.ValidatorTable; -import org.polypheny.db.plan.AlgOptSchema; -import org.polypheny.db.prepare.AlgOptEntityImpl; -import org.polypheny.db.prepare.Prepare.PreparingEntity; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.Wrapper; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlDataTypeSpec; import org.polypheny.db.sql.language.SqlDynamicParam; @@ -45,7 +39,6 @@ import org.polypheny.db.util.NameMatcher; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Static; -import org.polypheny.db.util.Util; /** @@ -97,25 +90,12 @@ public SqlValidatorNamespace getTableNamespace( List names ) { @Override public void resolveTable( List names, NameMatcher nameMatcher, Path path, Resolved resolved ) { - final List imperfectResolves = new ArrayList<>(); final List resolves = ((ResolvedImpl) resolved).resolves; // Look in the default schema, then default catalog, then root schema. - for ( List schemaPath : validator.catalogReader.getSchemaPaths() ) { - resolve_( validator.catalogReader.getRootSchema(), names, schemaPath, nameMatcher, path, resolved ); - for ( Resolve resolve : resolves ) { - if ( resolve.remainingNames.isEmpty() ) { - // There is a full match. Return it as the only match. - ((ResolvedImpl) resolved).clear(); - resolves.add( resolve ); - return; - } - } - imperfectResolves.addAll( resolves ); - } - // If there were no matches in the last round, restore those found in previous rounds - if ( resolves.isEmpty() ) { - resolves.addAll( imperfectResolves ); + CatalogTable table = validator.catalogReader.getRootSchema().getTable( names ); + if ( table != null ) { + resolves.add( new Resolve( validator.catalogReader.getRootSchema().getTable( names ) ) ); } } @@ -123,48 +103,10 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path // todo dl: refactor for 0.10 private void resolve_( final PolyphenyDbSchema rootSchema, List names, List schemaNames, NameMatcher nameMatcher, Path path, Resolved resolved ) { final List concat = ImmutableList.builder().addAll( schemaNames ).addAll( names ).build(); - PolyphenyDbSchema schema = rootSchema; - SqlValidatorNamespace namespace = null; - List remainingNames = concat; - for ( String schemaName : concat ) { - if ( schema == rootSchema && nameMatcher.matches( schemaName, schema.getName() ) ) { - remainingNames = Util.skip( remainingNames ); - continue; - } - final PolyphenyDbSchema subSchema = schema.getSubNamespace( schemaName, nameMatcher.isCaseSensitive() ); - if ( subSchema != null ) { - path = path.plus( null, -1, subSchema.getName(), StructKind.NONE ); - remainingNames = Util.skip( remainingNames ); - schema = subSchema; - namespace = new SchemaNamespace( validator, ImmutableList.copyOf( path.stepNames() ) ); - continue; - } - PolyphenyDbSchema.TableEntry entry = schema.getTable( schemaName ); - if ( entry == null ) { - entry = schema.getTableBasedOnNullaryFunction( schemaName, nameMatcher.isCaseSensitive() ); - } - if ( entry != null ) { - path = path.plus( null, -1, entry.name, StructKind.NONE ); - remainingNames = Util.skip( remainingNames ); - final Entity entity = entry.getTable(); - - ValidatorTable table2 = null; - if ( entity instanceof Wrapper ) { - table2 = ((Wrapper) entity).unwrap( PreparingEntity.class ); - } - if ( table2 == null ) { - final AlgOptSchema algOptSchema = validator.catalogReader.unwrap( AlgOptSchema.class ); - final AlgDataType rowType = entity.getRowType( validator.typeFactory ); - table2 = AlgOptEntityImpl.create( algOptSchema, rowType, entry, entity.getCatalogEntity(), entity.getPartitionPlacement(), null ); - } - namespace = new TableNamespace( validator, table2 ); - resolved.found( namespace, false, null, path, remainingNames ); - return; - } - // neither sub-schema nor table - if ( namespace != null && !remainingNames.equals( names ) ) { - resolved.found( namespace, false, null, path, remainingNames ); - } + + CatalogTable table = rootSchema.getTable( concat ); + if ( table != null ) { + resolved.found( table ); return; } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index d216c8acd8..eec7412dcc 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -5511,7 +5511,7 @@ protected SqlNode visitScoped( SqlCall call ) { // Only visits arguments which are expressions. We don't want to qualify non-expressions such as 'x' in 'empno * 5 AS x'. ArgHandler argHandler = new CallCopyingArgHandler( call, false ); - if ( SqlValidatorUtil.isTableNonRelational( validator ) && call.getKind() == Kind.OTHER_FUNCTION && call.getOperator().getOperatorName() == OperatorName.ITEM ) { + if ( !SqlValidatorUtil.isTableRelational( validator ) && call.getKind() == Kind.OTHER_FUNCTION && call.getOperator().getOperatorName() == OperatorName.ITEM ) { return new SqlBasicCall( new SqlCrossMapItemOperator(), call.getOperandList().toArray( SqlNode[]::new ), ParserPos.ZERO ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java index 57e25c67ee..aee7b7c4c2 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java @@ -25,16 +25,17 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import lombok.Getter; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.StructKind; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorScope; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.sql.language.SqlNodeList; -import org.polypheny.db.sql.language.SqlSelect; import org.polypheny.db.sql.language.SqlWindow; import org.polypheny.db.util.Moniker; import org.polypheny.db.util.NameMatcher; @@ -192,7 +193,7 @@ default boolean isWithin( SqlValidatorScope scope2 ) { */ interface Resolved { - void found( SqlValidatorNamespace namespace, boolean nullable, SqlValidatorScope scope, Path path, List remainingNames ); + void found( CatalogEntity entity ); int count(); @@ -310,15 +311,8 @@ class ResolvedImpl implements Resolved { @Override - public void found( SqlValidatorNamespace namespace, boolean nullable, SqlValidatorScope scope, Path path, List remainingNames ) { - if ( scope instanceof TableScope ) { - scope = scope.getValidator().getSelectScope( (SqlSelect) scope.getNode() ); - } - if ( scope instanceof AggregatingSelectScope ) { - scope = ((AggregatingSelectScope) scope).parent; - assert scope instanceof SelectScope; - } - resolves.add( new Resolve( namespace, nullable, scope, path, remainingNames ) ); + public void found( CatalogEntity entity ) { + resolves.add( new Resolve( entity ) ); } @@ -348,26 +342,12 @@ public void clear() { */ class Resolve { - public final SqlValidatorNamespace namespace; - private final boolean nullable; - public final SqlValidatorScope scope; // may be null - public final Path path; - /** - * Names not matched; empty if it was a full match. - */ - final List remainingNames; - - - Resolve( SqlValidatorNamespace namespace, boolean nullable, SqlValidatorScope scope, Path path, List remainingNames ) { - this.namespace = Objects.requireNonNull( namespace ); - this.nullable = nullable; - this.scope = scope; - assert !(scope instanceof TableScope); - this.path = Objects.requireNonNull( path ); - this.remainingNames = - remainingNames == null - ? ImmutableList.of() - : ImmutableList.copyOf( remainingNames ); + @Getter + private final CatalogEntity entity; + + + Resolve( CatalogEntity entity ) { + this.entity = entity; } @@ -375,9 +355,7 @@ class Resolve { * The row type of the found namespace, nullable if the lookup has looked into outer joins. */ public AlgDataType rowType() { - return namespace.getValidator() - .getTypeFactory() - .createTypeWithNullability( namespace.getRowType(), nullable ); + return entity.getRowType(); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 5ba106fda8..8e4013a7f4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -38,7 +38,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; @@ -47,11 +48,9 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchemaWithSampling; import org.polypheny.db.prepare.Prepare; -import org.polypheny.db.schema.AbstractPolyphenyDbSchema; import org.polypheny.db.schema.CustomColumnResolvingEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ExtensibleEntity; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlDataTypeSpec; import org.polypheny.db.sql.language.SqlIdentifier; @@ -307,33 +306,15 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF } - /** - * Resolves a multi-part identifier such as "SCHEMA.EMP.EMPNO" to a namespace. The returned namespace, never null, may represent a schema, table, column, etc. - */ - public static SqlValidatorNamespace lookup( SqlValidatorScope scope, List names ) { - assert names.size() > 0; - final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher; - final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); - scope.resolve( ImmutableList.of( names.get( 0 ) ), nameMatcher, false, resolved ); - assert resolved.count() == 1; - SqlValidatorNamespace namespace = resolved.only().namespace; - for ( String name : Util.skip( names ) ) { - namespace = namespace.lookupChild( name ); - assert namespace != null; - } - return namespace; - } - - public static void getSchemaObjectMonikers( ValidatorCatalogReader catalogReader, List names, List hints ) { // Assume that the last name is 'dummy' or similar. List subNames = Util.skipLast( names ); // Try successively with catalog.schema, catalog and no prefix - for ( List x : catalogReader.getSchemaPaths() ) { + /*for ( List x : catalogReader.getSchemaPaths() ) { final List names2 = ImmutableList.builder().addAll( x ).addAll( subNames ).build(); hints.addAll( catalogReader.getAllSchemaObjectNames( names2 ) ); - } + }*/ } @@ -512,21 +493,10 @@ private static ImmutableBitSet analyzeGroupExpr( SqlValidatorScope scope, GroupA assert resolved.count() == 1; final SqlValidatorScope.Resolve resolve = resolved.only(); final AlgDataType rowType = resolve.rowType(); - final int childNamespaceIndex = resolve.path.steps().get( 0 ).i; + final int childNamespaceIndex = 0; int namespaceOffset = 0; - if ( childNamespaceIndex > 0 ) { - // If not the first child, need to figure out the width of output types from all the preceding namespaces - final SqlValidatorScope ancestorScope = resolve.scope; - assert ancestorScope instanceof ListScope; - List children = ((ListScope) ancestorScope).getChildren(); - - for ( int j = 0; j < childNamespaceIndex; j++ ) { - namespaceOffset += children.get( j ).getRowType().getFieldCount(); - } - } - AlgDataTypeField field = nameMatcher.field( rowType, originalFieldName ); int origPos = namespaceOffset + field.getIndex(); @@ -641,7 +611,7 @@ static boolean containsMonotonic( SelectScope scope, SqlNodeList nodes ) { } - public static boolean isTableNonRelational( SqlValidatorImpl validator ) { + public static boolean isTableRelational( SqlValidatorImpl validator ) { if ( validator.getTableScope() == null ) { return false; } @@ -649,12 +619,15 @@ public static boolean isTableNonRelational( SqlValidatorImpl validator ) { return false; } SqlIdentifier id = ((SqlIdentifier) validator.getTableScope().getNode()); - PolyphenyDbSchema schema = validator.getCatalogReader().getRootSchema().getSubNamespace( id.names.get( 0 ), false ); - if ( schema == null ) { + CatalogGraphDatabase graph = validator.getCatalogReader().getRootSchema().getGraph( id.names ); + if ( graph != null ) { return false; } - - return ((AbstractPolyphenyDbSchema) schema).getNamespaceType() != NamespaceType.RELATIONAL; + CatalogCollection collection = validator.getCatalogReader().getRootSchema().getCollection( id.names ); + if ( collection != null ) { + return false; + } + return true; } From d7cf099a56ff46361dfba42a41497bfea8e938de Mon Sep 17 00:00:00 2001 From: datomo Date: Fri, 24 Feb 2023 12:48:24 +0100 Subject: [PATCH 021/436] initial classes for new entity logic, temp removed some validation --- .../algebra/AlgStructuredTypeFlattener.java | 4 ++-- .../polypheny/db/algebra/core/lpg/LpgAlg.java | 4 ++-- .../db/algebra/core/lpg/LpgModify.java | 6 +++--- .../db/algebra/core/lpg/LpgScan.java | 8 +++---- .../algebra/logical/lpg/LogicalLpgModify.java | 4 ++-- .../algebra/logical/lpg/LogicalLpgScan.java | 4 ++-- .../db/catalog/entity/CatalogEntity.java | 15 +++++++++++++ .../db/catalog/refactor/AllocationEntity.java | 21 +++++++++++++++++++ .../db/catalog/refactor/LogicalEntity.java | 21 +++++++++++++++++++ .../db/catalog/refactor/PhysicalEntity.java | 21 +++++++++++++++++++ .../db/catalog/refactor/QueryableEntity.java | 21 +++++++++++++++++++ .../catalog/refactor/TranslatableEntity.java | 21 +++++++++++++++++++ .../polypheny/db/plan/VisitorDataContext.java | 4 ++-- .../db/prepare/PolyphenyDbCatalogReader.java | 4 ---- .../processing/LogicalAlgAnalyzeShuttle.java | 5 +---- .../java/org/polypheny/db/schema/Schemas.java | 5 +++-- .../db/schema/graph/ModifiableGraph.java | 3 ++- .../org/polypheny/db/tools/AlgBuilder.java | 5 +++-- .../db/catalog/MockCatalogReader.java | 1 - .../org/polypheny/db/plan/RelOptUtilTest.java | 12 ++++------- .../db/test/RexProgramBuilderBase.java | 4 ++-- .../db/processing/DataContextImpl.java | 5 ++--- .../db/processing/DataMigratorImpl.java | 3 +-- .../db/routing/routers/BaseRouter.java | 11 +++++----- .../db/routing/routers/DmlRouterImpl.java | 7 +++---- .../db/transaction/EntityAccessMap.java | 2 +- .../org/polypheny/db/misc/AlgBuilderTest.java | 7 +++---- .../cypher2alg/CypherToAlgConverter.java | 10 ++++----- .../jdbc/rel2sql/AlgToSqlConverter.java | 6 +++--- .../languages/mql2alg/MqlToAlgConverter.java | 11 ++-------- .../polypheny/db/adapter/neo4j/NeoGraph.java | 3 ++- .../db/adapter/neo4j/NeoGraphImplementor.java | 4 ++-- .../neo4j/rules/graph/NeoLpgModify.java | 7 +++---- .../validate/IdentifierNamespace.java | 12 +++++------ .../db/sql/language/validate/ListScope.java | 11 +++++----- .../validate/MatchRecognizeScope.java | 4 ++-- .../language/validate/SqlValidatorImpl.java | 14 ++++++------- .../db/sql/language/validate/WithScope.java | 11 ++++++---- .../db/sql/sql2alg/SqlToAlgConverter.java | 16 +++++++------- 39 files changed, 220 insertions(+), 117 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/AllocationEntity.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/LogicalEntity.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/PhysicalEntity.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/TranslatableEntity.java diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java index c9b2177309..ce321a25f6 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java @@ -67,7 +67,6 @@ import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; import org.polypheny.db.algebra.logical.document.LogicalDocumentSort; import org.polypheny.db.algebra.logical.document.LogicalDocumentTransformer; -import org.polypheny.db.algebra.logical.lpg.LogicalGraph; import org.polypheny.db.algebra.logical.lpg.LogicalLpgAggregate; import org.polypheny.db.algebra.logical.lpg.LogicalLpgFilter; import org.polypheny.db.algebra.logical.lpg.LogicalLpgMatch; @@ -98,6 +97,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.StructKind; +import org.polypheny.db.catalog.refactor.LogicalEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; @@ -482,7 +482,7 @@ public void rewriteAlg( LogicalLpgModify alg ) { @SuppressWarnings("unused") public void rewriteAlg( LogicalLpgScan scan ) { AlgNode alg = scan; - if ( !(scan.getGraph() instanceof LogicalGraph) ) { + if ( !(scan.getGraph() instanceof LogicalEntity) ) { alg = scan.getGraph().toAlg( toAlgContext, scan.getGraph() ); } setNewForOldRel( scan, alg ); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAlg.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAlg.java index 809114ee43..b60098fec0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAlg.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAlg.java @@ -16,7 +16,7 @@ package org.polypheny.db.algebra.core.lpg; -import org.polypheny.db.schema.graph.Graph; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; /** @@ -26,7 +26,7 @@ public interface LpgAlg { NodeType getNodeType(); - default Graph getGraph() { + default CatalogGraphDatabase getGraph() { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgModify.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgModify.java index 14a1f606e9..a7cec40137 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgModify.java @@ -22,10 +22,10 @@ import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.graph.Graph; public abstract class LpgModify extends SingleAlg implements LpgAlg { @@ -34,14 +34,14 @@ public abstract class LpgModify extends SingleAlg implements LpgAlg { public final List ids; public final List operations; @Getter - public final Graph graph; + public final CatalogGraphDatabase graph; /** * Creates a {@link LpgModify}. * {@link org.polypheny.db.schema.ModelTrait#GRAPH} node, which is able to modify an LPG graph. */ - protected LpgModify( AlgOptCluster cluster, AlgTraitSet traits, Graph graph, AlgNode input, Operation operation, List ids, List operations, AlgDataType dmlRowType ) { + protected LpgModify( AlgOptCluster cluster, AlgTraitSet traits, CatalogGraphDatabase graph, AlgNode input, Operation operation, List ids, List operations, AlgDataType dmlRowType ) { super( cluster, traits, input ); this.operation = operation; this.ids = ids; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgScan.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgScan.java index 580b3e5154..3cbc215820 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgScan.java @@ -21,23 +21,23 @@ import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.TranslatableGraph; import org.polypheny.db.type.PolyType; public abstract class LpgScan extends AbstractAlgNode implements LpgAlg { @Getter - protected final TranslatableGraph graph; + protected final CatalogGraphDatabase graph; //TranslatableGraph graph; /** * Creates a {@link LpgScan}. * {@link org.polypheny.db.schema.ModelTrait#GRAPH} native node, which is able to scan a LPG graph. */ - public LpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, TranslatableGraph graph ) { + public LpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogGraphDatabase graph ) { super( cluster, traitSet ); this.graph = graph; this.rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "g", 0, cluster.getTypeFactory().createPolyType( PolyType.GRAPH ) ) ) ); @@ -46,7 +46,7 @@ public LpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, TranslatableGraph g @Override public String algCompareString() { - return "$" + getClass().getSimpleName() + "$" + graph.getId(); + return "$" + getClass().getSimpleName() + "$" + graph.id; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java index 20d1632973..e9d8162fe4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java @@ -25,13 +25,13 @@ import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.graph.Graph; public class LogicalLpgModify extends LpgModify implements RelationalTransformable { @@ -40,7 +40,7 @@ public class LogicalLpgModify extends LpgModify implements RelationalTransformab /** * Subclass of {@link LpgModify} not targeted at any particular engine or calling convention. */ - public LogicalLpgModify( AlgOptCluster cluster, AlgTraitSet traits, Graph graph, AlgNode input, Operation operation, List ids, List operations ) { + public LogicalLpgModify( AlgOptCluster cluster, AlgTraitSet traits, CatalogGraphDatabase graph, AlgNode input, Operation operation, List ids, List operations ) { super( cluster, traits, graph, input, operation, ids, operations, AlgOptUtil.createDmlRowType( Kind.INSERT, cluster.getTypeFactory() ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java index ee6df1db93..db5bbcf4ea 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java @@ -28,6 +28,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; @@ -36,7 +37,6 @@ import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.TranslatableGraph; public class LogicalLpgScan extends LpgScan implements RelationalTransformable { @@ -44,7 +44,7 @@ public class LogicalLpgScan extends LpgScan implements RelationalTransformable { /** * Subclass of {@link LpgScan} not targeted at any particular engine or calling convention. */ - public LogicalLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, TranslatableGraph graph, AlgDataType rowType ) { + public LogicalLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogGraphDatabase graph, AlgDataType rowType ) { super( cluster, traitSet.replace( ModelTrait.GRAPH ), graph ); this.rowType = rowType; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index f204ff11e7..9e96801964 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -17,9 +17,14 @@ package org.polypheny.db.catalog.entity; import java.io.Serializable; +import org.apache.calcite.linq4j.Queryable; +import org.polypheny.db.adapter.DataContext; +import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Wrapper; public abstract class CatalogEntity implements Wrapper, Serializable { @@ -54,4 +59,14 @@ public boolean isRolledUp( String fieldName ) { return false; } + + public Queryable asQueryable( DataContext root, PolyphenyDbSchema schema, String tableName ) { + throw new UnsupportedOperationException( "Not implemented by store" ); + } + + + public AlgNode toAlg( ToAlgContext toAlgContext, CatalogGraphDatabase graph ) { + throw new UnsupportedOperationException( "Not implemented by store" ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/AllocationEntity.java new file mode 100644 index 0000000000..70d0d975a6 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/AllocationEntity.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +public interface AllocationEntity { + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/LogicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/LogicalEntity.java new file mode 100644 index 0000000000..46c91c1356 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/LogicalEntity.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +public interface LogicalEntity { + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/PhysicalEntity.java new file mode 100644 index 0000000000..bdc5230d3b --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/PhysicalEntity.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +public interface PhysicalEntity { + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java new file mode 100644 index 0000000000..e8512a8100 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +public interface QueryableEntity { + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/TranslatableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/TranslatableEntity.java new file mode 100644 index 0000000000..2172bd5abc --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/TranslatableEntity.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +public interface TranslatableEntity { + +} diff --git a/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java b/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java index 145676b1d8..4f0555f212 100644 --- a/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java +++ b/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java @@ -51,7 +51,7 @@ import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.NlsString; import org.polypheny.db.util.Pair; @@ -72,7 +72,7 @@ public VisitorDataContext( Object[] values ) { @Override - public SchemaPlus getRootSchema() { + public PolyphenyDbSchema getRootSchema() { throw new UnsupportedOperationException( "This operation is not supported for " + getClass().getSimpleName() ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index b9488816ea..e49a8d0c07 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -33,7 +33,6 @@ package org.polypheny.db.prepare; -import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -114,9 +113,6 @@ public AlgDataType getNamedType( Identifier typeName ) { @Override public List getAllSchemaObjectNames( List names ) { final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, names, Wrapper.nameMatcher ); - if ( schema == null ) { - return ImmutableList.of(); - } final List result = new ArrayList<>(); for ( String subSchema : rootSchema.getNamespaceNames() ) { diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 27f3668a1e..f3ebc8ccdc 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -29,7 +29,6 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.Scan; -import org.polypheny.db.algebra.core.lpg.LpgAlg; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer; import org.polypheny.db.algebra.logical.document.LogicalDocumentAggregate; import org.polypheny.db.algebra.logical.document.LogicalDocumentFilter; @@ -62,7 +61,6 @@ import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.prepare.AlgOptEntityImpl; -import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.transaction.Statement; @@ -151,7 +149,7 @@ public AlgNode visit( LogicalLpgModify modify ) { @Override public AlgNode visit( LogicalLpgScan scan ) { - hashBasis.add( scan.getClass().getSimpleName() + "#" + scan.getGraph().getId() ); + hashBasis.add( scan.getClass().getSimpleName() + "#" + scan.getGraph().id ); return super.visit( scan ); } @@ -452,7 +450,6 @@ private void getPartitioningInfo( LogicalDocumentFilter filter ) { private void getPartitioningInfo( LogicalLpgFilter filter ) { - Graph graph = ((LpgAlg) filter.getInput()).getGraph(); // todo might add } diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 571096da75..705ea3c01b 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -174,7 +174,7 @@ public static Expression tableExpression( SchemaPlus schema, Type elementType, S } - public static DataContext createDataContext( SchemaPlus rootSchema ) { + public static DataContext createDataContext( PolyphenyDbSchema rootSchema ) { return new DummyDataContext( rootSchema ); } @@ -202,7 +202,8 @@ public static Queryable queryable( DataContext root, Class clazz, Iter * Returns a {@link Queryable}, given a schema and table name. */ public static Queryable queryable( DataContext root, PolyphenyDbSchema schema, Class clazz, String tableName ) { - QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); + //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); + CatalogTable table = schema.getTable( List.of( tableName ) ); return table.asQueryable( root, schema, tableName ); } diff --git a/core/src/main/java/org/polypheny/db/schema/graph/ModifiableGraph.java b/core/src/main/java/org/polypheny/db/schema/graph/ModifiableGraph.java index ea257785df..0790d0e6e7 100644 --- a/core/src/main/java/org/polypheny/db/schema/graph/ModifiableGraph.java +++ b/core/src/main/java/org/polypheny/db/schema/graph/ModifiableGraph.java @@ -23,6 +23,7 @@ import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; @@ -33,7 +34,7 @@ public interface ModifiableGraph extends Graph { - LpgModify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, Graph graph, PolyphenyDbCatalogReader catalogReader, AlgNode input, Operation operation, List ids, List operations ); + LpgModify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, CatalogGraphDatabase graph, PolyphenyDbCatalogReader catalogReader, AlgNode input, Operation operation, List ids, List operations ); Expression getExpression( SchemaPlus schema, String tableName, Class clazz ); diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 9a1324db4d..6065f38fee 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -90,7 +90,6 @@ import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; -import org.polypheny.db.algebra.logical.lpg.LogicalGraph; import org.polypheny.db.algebra.logical.lpg.LogicalLpgMatch; import org.polypheny.db.algebra.logical.lpg.LogicalLpgProject; import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; @@ -103,6 +102,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.StructKind; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; @@ -1372,7 +1373,7 @@ public AlgBuilder documentProject( List projects, List scans = new ArrayList<>(); @@ -439,7 +438,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab CatalogGraphPlacement graphPlacement = catalog.getGraphPlacement( catalogGraph.id, adapterId ); String name = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, catalogGraph.name, graphPlacement.physicalName ); - Graph graph = reader.getGraph( name ); + CatalogGraphDatabase graph = reader.getGraph( name ); if ( !(graph instanceof TranslatableGraph) ) { // needs substitution later on @@ -448,7 +447,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab } // a native placement was used, we go with that - return new LogicalLpgScan( alg.getCluster(), alg.getTraitSet(), (TranslatableGraph) graph, alg.getRowType() ); + return new LogicalLpgScan( alg.getCluster(), alg.getTraitSet(), graph, alg.getRowType() ); } if ( scans.size() < 1 ) { throw new RuntimeException( "Error while routing graph query." ); @@ -494,7 +493,7 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespa public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement statement ) { - CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.getGraph().getId() ); + CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.getGraph().id ); PreparingEntity nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); PreparingEntity nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index d29b25c718..0a6334e0dd 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -114,7 +114,6 @@ import org.polypheny.db.schema.ModifiableCollection; import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.PolySchemaBuilder; -import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.schema.graph.ModifiableGraph; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.RoutedAlgBuilder; @@ -763,7 +762,7 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, @Override public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement ) { - CatalogGraphDatabase catalogGraph = Catalog.getInstance().getGraph( alg.getGraph().getId() ); + CatalogGraphDatabase catalogGraph = alg.getGraph(); return routeGraphDml( alg, statement, catalogGraph, catalogGraph.placements ); } @@ -784,7 +783,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Catalog CatalogGraphPlacement graphPlacement = Catalog.getInstance().getGraphPlacement( catalogGraph.id, adapterId ); String name = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, catalogGraph.name, graphPlacement.physicalName ); - Graph graph = reader.getGraph( name ); + CatalogGraphDatabase graph = reader.getGraph( name ); if ( graph == null ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, adapterId, statement ) ); @@ -968,7 +967,7 @@ private List attachRelationalDocInsert( LogicalDocumentModify alg, Stat private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Statement statement ) { - CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.getGraph().getId() ); + CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.getGraph().id ); PreparingEntity nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); PreparingEntity nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 25405c6609..eed1adcecc 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -322,7 +322,7 @@ private void attachGraph( LpgAlg p ) { newAccess = Mode.READ_ACCESS; } // as graph is on the namespace level in the full polyschema it is unique and can be used like this - EntityIdentifier key = new EntityIdentifier( p.getGraph().getId(), 0, NamespaceLevel.NAMESPACE_LEVEL ); + EntityIdentifier key = new EntityIdentifier( p.getGraph().id, 0, NamespaceLevel.NAMESPACE_LEVEL ); accessMap.put( key, newAccess ); } diff --git a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java index 65c5504def..081d8eb4d7 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java @@ -79,7 +79,6 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.runtime.PolyphenyDbException; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.test.Matchers; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.FrameworkConfig; @@ -150,14 +149,14 @@ private static void dropTestSchema() throws SQLException { private AlgBuilder createAlgBuilder() { - final SchemaPlus rootSchema = transaction.getSchema().plus(); + final PolyphenyDbSchema rootSchema = transaction.getSchema(); FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) - .defaultSchema( rootSchema.getSubNamespace( transaction.getDefaultSchema().name ) ) + .defaultSchema( rootSchema ) .traitDefs( (List) null ) .programs( Programs.heuristicJoinOrder( Programs.RULE_SET, true, 2 ) ) .prepareContext( new ContextImpl( - PolyphenyDbSchema.from( rootSchema ), + rootSchema, new SlimDataContext() { @Override public JavaTypeFactory getTypeFactory() { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index d770d9af73..8263e22e3f 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -36,7 +36,6 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.logical.lpg.LogicalGraph; import org.polypheny.db.algebra.logical.lpg.LogicalLpgFilter; import org.polypheny.db.algebra.logical.lpg.LogicalLpgMatch; import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; @@ -50,6 +49,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.cypher.CypherNode; import org.polypheny.db.cypher.CypherNode.CypherFamily; @@ -109,7 +109,7 @@ public AlgRoot convert( CypherNode query, ExtendedQueryParameters parameters, Al databaseId = parameters.getDatabaseId(); } - LogicalGraph graph = new LogicalGraph( databaseId ); + CatalogGraphDatabase graph = Catalog.getInstance().getGraph( databaseId ); if ( parameters.isFullGraph() ) { // simple full graph scan @@ -128,7 +128,7 @@ public AlgRoot convert( CypherNode query, ExtendedQueryParameters parameters, Al } - private AlgNode buildFullScan( LogicalGraph graph ) { + private AlgNode buildFullScan( CatalogGraphDatabase graph ) { return new LogicalLpgScan( cluster, cluster.traitSet(), @@ -405,7 +405,7 @@ public static class CypherContext { private final Queue> rexQueue = new LinkedList<>(); private final Queue> rexAggQueue = new LinkedList<>(); public final CypherNode original; - public final LogicalGraph graph; + public final CatalogGraphDatabase graph; public final AlgDataType graphType; public final AlgDataType booleanType; @@ -423,7 +423,7 @@ public static class CypherContext { private CypherContext( CypherNode original, - LogicalGraph graph, + CatalogGraphDatabase graph, AlgOptCluster cluster, AlgBuilder algBuilder, RexBuilder rexBuilder, diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java index 2acbee33ba..f80828d045 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java @@ -453,7 +453,7 @@ public Result visit( Modify modify ) { final Context context = aliasContext( pairs, false ); // Target Table Name - final SqlIdentifier sqlTargetTable = getPhysicalTableName( modify.getEntity().getPartitionPlacement() ); + final SqlIdentifier sqlTargetTable = getPhysicalTableName( modify.getEntity().getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ) ); switch ( modify.getOperation() ) { case INSERT: { @@ -466,7 +466,7 @@ public Result visit( Modify modify ) { sqlTargetTable, sqlSource, physicalIdentifierList( - modify.getEntity().getPartitionPlacement(), + modify.getEntity().getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ), modify.getInput().getRowType().getFieldNames() ) ); return result( sqlInsert, ImmutableList.of(), modify, null ); } @@ -475,7 +475,7 @@ public Result visit( Modify modify ) { final SqlUpdate sqlUpdate = new SqlUpdate( POS, sqlTargetTable, - physicalIdentifierList( modify.getEntity().getPartitionPlacement(), modify.getUpdateColumnList() ), + physicalIdentifierList( modify.getEntity().getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ), modify.getUpdateColumnList() ), exprList( context, modify.getSourceExpressionList() ), ((SqlSelect) input.node).getWhere(), input.asSelect(), diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 64d54c5ab8..3862ac5de8 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -65,11 +65,9 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.languages.QueryParameters; @@ -96,8 +94,6 @@ import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.LogicalEntity; -import org.polypheny.db.schema.PolyphenyDbSchema.TableEntryImpl; import org.polypheny.db.schema.document.DocumentUtil; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; @@ -346,11 +342,8 @@ private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaNam return AlgOptEntityImpl.create( table.getRelOptSchema(), rowType, - new TableEntryImpl( - catalogReader.getRootSchema(), names.get( names.size() - 1 ), - new LogicalEntity( Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, new Pattern( dbSchemaName ) ).get( 0 ).id, names.get( 0 ), names.get( names.size() - 1 ), List.of(), List.of(), AlgDataTypeImpl.proto( rowType ), NamespaceType.GRAPH ) ), table.getCatalogEntity(), - table.getPartitionPlacement(), + table.getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ), 1.0 ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java index 3f51fc07c1..4eb98f0666 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java @@ -45,6 +45,7 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; @@ -101,7 +102,7 @@ public NeoGraph( String name, TransactionProvider transactionProvider, Driver db public LpgModify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, - org.polypheny.db.schema.graph.Graph graph, + CatalogGraphDatabase graph, PolyphenyDbCatalogReader catalogReader, AlgNode input, Operation operation, diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraphImplementor.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraphImplementor.java index 4ca84fced0..27903eade7 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraphImplementor.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraphImplementor.java @@ -19,7 +19,6 @@ import static org.polypheny.db.adapter.neo4j.util.NeoStatements.as_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.distinct_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.edge_; -import static org.polypheny.db.adapter.neo4j.util.NeoStatements.labels_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.literal_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.match_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.node_; @@ -45,6 +44,7 @@ import org.polypheny.db.algebra.core.lpg.LpgProject; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.type.PathType; import org.polypheny.db.util.Pair; @@ -66,7 +66,7 @@ public class NeoGraphImplementor extends AlgShuttleImpl { @Setter @Getter - private NeoGraph graph; + private CatalogGraphDatabase graph; @Setter @Getter diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java index a112c98804..4e85b8a3e5 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java @@ -31,7 +31,6 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.polypheny.db.adapter.neo4j.NeoGraph; import org.polypheny.db.adapter.neo4j.NeoGraphImplementor; import org.polypheny.db.adapter.neo4j.rules.NeoGraphAlg; import org.polypheny.db.adapter.neo4j.util.NeoStatements; @@ -45,12 +44,12 @@ import org.polypheny.db.algebra.core.lpg.LpgProject; import org.polypheny.db.algebra.core.lpg.LpgValues; import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.graph.Graph; import org.polypheny.db.schema.graph.PolyEdge; import org.polypheny.db.schema.graph.PolyNode; @@ -64,7 +63,7 @@ public class NeoLpgModify extends LpgModify implements NeoGraphAlg { * @param traits Traits active for this node, including {@link org.polypheny.db.schema.ModelTrait#GRAPH} * @param input Input algebraic expression */ - public NeoLpgModify( AlgOptCluster cluster, AlgTraitSet traits, Graph graph, AlgNode input, Operation operation, List ids, List operations ) { + public NeoLpgModify( AlgOptCluster cluster, AlgTraitSet traits, CatalogGraphDatabase graph, AlgNode input, Operation operation, List ids, List operations ) { super( cluster, traits, graph, input, operation, ids, operations, AlgOptUtil.createDmlRowType( Kind.INSERT, cluster.getTypeFactory() ) ); } @@ -77,7 +76,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { @Override public void implement( NeoGraphImplementor implementor ) { - implementor.setGraph( (NeoGraph) getGraph() ); + implementor.setGraph( graph ); implementor.setDml( true ); implementor.visitChild( 0, getInput() ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java index f9a9298726..5bfd29eede 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java @@ -109,14 +109,14 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { SqlValidatorScope.Resolve previousResolve = null; if ( resolved.count() == 1 ) { final SqlValidatorScope.Resolve resolve = previousResolve = resolved.only(); - if ( resolve.remainingNames.isEmpty() ) { + /*if ( resolve.remainingNames.isEmpty() ) { return resolve.namespace; - } + }*/ // If we're not case-sensitive, give an error. // If we're case-sensitive, we'll shortly try again and give an error then. - if ( !nameMatcher.isCaseSensitive() ) { + /*if ( !nameMatcher.isCaseSensitive() ) { throw validator.newValidationError( id, Static.RESOURCE.objectNotFoundWithin( resolve.remainingNames.get( 0 ), SqlIdentifier.getString( resolve.path.stepNames() ) ) ); - } + }*/ } // Failed to match. If we're matching case-sensitively, try a more lenient match. If we find something we can offer a helpful hint. @@ -126,7 +126,7 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { parentScope.resolveTable( names, liberalMatcher, SqlValidatorScope.Path.EMPTY, resolved ); if ( resolved.count() == 1 ) { final SqlValidatorScope.Resolve resolve = resolved.only(); - if ( resolve.remainingNames.isEmpty() || previousResolve == null ) { + /*if ( resolve.remainingNames.isEmpty() || previousResolve == null ) { // We didn't match it case-sensitive, so they must have had the right identifier, wrong case. // // If previousResolve is null, we matched nothing case-sensitive and everything case-insensitive, so the mismatch must have been at position 0. @@ -143,7 +143,7 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { } } else { throw validator.newValidationError( id, Static.RESOURCE.objectNotFoundWithin( resolve.remainingNames.get( 0 ), SqlIdentifier.getString( resolve.path.stepNames() ) ) ); - } + }*/ } } throw validator.newValidationError( id, Static.RESOURCE.objectNotFound( id.getComponent( 0 ).toString() ) ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java index 45028b96bc..0981ac1b86 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java @@ -28,8 +28,6 @@ import org.polypheny.db.algebra.constant.MonikerType; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.algebra.type.StructKind; -import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.util.Moniker; import org.polypheny.db.util.MonikerImpl; @@ -97,7 +95,7 @@ private ScopeChild findChild( List names, NameMatcher nameMatcher ) { } // Look up the 2 tables independently, in case one is qualified with catalog & schema and the other is not. - final ValidatorTable table = child.namespace.getTable(); + /*final ValidatorTable table = child.namespace.getTable(); if ( table != null ) { final ResolvedImpl resolved = new ResolvedImpl(); resolveTable( names, nameMatcher, Path.EMPTY, resolved ); @@ -107,7 +105,7 @@ private ScopeChild findChild( List names, NameMatcher nameMatcher ) { && resolved.only().namespace.getTable().getQualifiedName().equals( table.getQualifiedName() ) ) { return child; } - } + }*/ } return null; } @@ -172,8 +170,9 @@ public void findAliases( Collection result ) { public void resolve( List names, NameMatcher nameMatcher, boolean deep, Resolved resolved ) { // First resolve by looking through the child namespaces. final ScopeChild child0 = findChild( names, nameMatcher ); + if ( child0 != null ) { - final Step path = + /*final Step path = Path.EMPTY.plus( child0.namespace.getRowType(), child0.ordinal, @@ -184,7 +183,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, child0.nullable, this, path, - null ); + null );*/ return; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java index 599753fdef..a1c2e1a237 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java @@ -97,10 +97,10 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, if ( patternVars.contains( names.get( 0 ) ) ) { final Step path = new EmptyPath().plus( null, 0, null, StructKind.FULLY_QUALIFIED ); final ScopeChild child = children.get( 0 ); - resolved.found( child.namespace, child.nullable, this, path, names ); + /*resolved.found( child.namespace, child.nullable, this, path, names ); if ( resolved.count() > 0 ) { return; - } + }*/ } super.resolve( names, nameMatcher, deep, resolved ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index eec7412dcc..ac1c498705 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -778,7 +778,7 @@ public final void lookupNameCompletionHints( SqlValidatorScope scope, List names ) { @Override public void resolveTable( List names, NameMatcher nameMatcher, Path path, Resolved resolved ) { if ( names.size() == 1 && names.equals( withItem.name.names ) ) { - final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); - final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); - resolved.found( ns, false, null, path2, null ); + //final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); + //final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); + CatalogEntity entity = validator.catalogReader.getRootSchema().getTable( names ); + resolved.found( entity ); return; } super.resolveTable( names, nameMatcher, path, resolved ); @@ -80,7 +82,8 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, if ( names.size() == 1 && names.equals( withItem.name.names ) ) { final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); final Step path = Path.EMPTY.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); - resolved.found( ns, false, null, path, null ); + CatalogEntity entity = validator.catalogReader.getRootSchema().getTable( names ); + resolved.found( entity ); return; } super.resolve( names, nameMatcher, deep, resolved ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 093512c9b6..1f5f6e6dd0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -2236,7 +2236,7 @@ protected AlgNode createJoin( Blackboard bb, AlgNode leftRel, AlgNode rightRel, private CorrelationUse getCorrelationUse( Blackboard bb, final AlgNode r0 ) { - final Set correlatedVariables = AlgOptUtil.getVariablesUsed( r0 ); + /*final Set correlatedVariables = AlgOptUtil.getVariablesUsed( r0 ); if ( correlatedVariables.isEmpty() ) { return null; } @@ -2328,7 +2328,8 @@ private CorrelationUse getCorrelationUse( Blackboard bb, final AlgNode r0 ) { // Add new node to leaves. leaves.add( r ); } - return new CorrelationUse( correlNames.get( 0 ), requiredColumns.build(), r ); + return new CorrelationUse( correlNames.get( 0 ), requiredColumns.build(), r );*/ + return null; } @@ -2340,7 +2341,7 @@ private CorrelationUse getCorrelationUse( Blackboard bb, final AlgNode r0 ) { * @return true if the sub-query is non-correlated */ private boolean isSubQueryNonCorrelated( AlgNode subq, Blackboard bb ) { - Set correlatedVariables = AlgOptUtil.getVariablesUsed( subq ); + /*Set correlatedVariables = AlgOptUtil.getVariablesUsed( subq ); for ( CorrelationId correlName : correlatedVariables ) { DeferredLookup lookup = mapCorrelToDeferred.get( correlName ); String originalRelName = lookup.getOriginalRelName(); @@ -2363,7 +2364,7 @@ private boolean isSubQueryNonCorrelated( AlgNode subq, Blackboard bb ) { break; } } while ( parentScope != null ); - } + }*/ return true; } @@ -3869,7 +3870,7 @@ Pair> lookupExp( SqlQualified qualified ) { // Found in current query's from list. Find which from item. // We assume that the order of the from clause items has been preserved. - final SqlValidatorScope ancestorScope = resolve.scope; + /*final SqlValidatorScope ancestorScope = resolve.scope; boolean isParent = ancestorScope != scope; if ( (inputs != null) && !isParent ) { final LookupContext algs = new LookupContext( this, inputs, systemFieldList.size() ); @@ -3885,7 +3886,7 @@ Pair> lookupExp( SqlQualified qualified ) { } final Map map = ImmutableMap.copyOf( fieldOffsets ); return Pair.of( node, map ); - } + } else { // We're referencing a relational expression which has not been converted yet. This occurs when from items are correlated, e.g. "select from emp as emp join emp.getDepts() as dept". // Create a temporary expression. @@ -3914,7 +3915,8 @@ Pair> lookupExp( SqlQualified qualified ) { final RexNode c = rexBuilder.makeCorrel( builder.uniquify().build(), correlId ); return Pair.of( c, fields.build() ); } - } + }}*/ + return null; } From 8c1420dfb7319ec58b5038138440c65ea86b4df0 Mon Sep 17 00:00:00 2001 From: datomo Date: Sat, 25 Feb 2023 12:50:30 +0100 Subject: [PATCH 022/436] replaced algOpt entites with catalog entities --- .../org/polypheny/db/StatisticsManager.java | 4 +- .../org/polypheny/db/adapter/Adapter.java | 16 +- .../org/polypheny/db/adapter/DataContext.java | 4 +- .../org/polypheny/db/adapter/DataSource.java | 4 +- .../org/polypheny/db/adapter/DataStore.java | 36 +- .../db/adapter/enumerable/EnumerableScan.java | 6 +- .../EnumerableTableModifyToStreamerRule.java | 14 +- .../enumerable/EnumerableTransformer.java | 4 +- .../lpg/EnumerableLpgTransformer.java | 2 +- .../db/adapter/index/CoWHashIndex.java | 8 +- .../db/adapter/index/CowMultiHashIndex.java | 8 +- .../org/polypheny/db/adapter/index/Index.java | 6 +- .../db/adapter/index/IndexManager.java | 10 +- .../polypheny/db/algebra/AbstractAlgNode.java | 4 +- .../polypheny/db/algebra/AlgFieldTrimmer.java | 12 +- .../db/algebra/AlgHomogeneousShuttle.java | 4 +- .../org/polypheny/db/algebra/AlgInput.java | 4 +- .../org/polypheny/db/algebra/AlgNode.java | 4 +- .../org/polypheny/db/algebra/AlgShuttle.java | 8 +- .../polypheny/db/algebra/AlgShuttleImpl.java | 8 +- .../algebra/AlgStructuredTypeFlattener.java | 30 +- .../algebra/UnsupportedFromInsertShuttle.java | 8 +- .../db/algebra/core/AlgFactories.java | 27 +- .../core/common/ConditionalExecute.java | 4 +- .../db/algebra/core/common/Modify.java | 51 + .../db/algebra/core/common/Scan.java | 40 + .../db/algebra/core/common/Transformer.java | 4 +- .../db/algebra/core/document/DocumentAlg.java | 5 - .../algebra/core/document/DocumentModify.java | 23 +- .../algebra/core/document/DocumentScan.java | 19 +- .../algebra/core/document/DocumentValues.java | 4 +- .../polypheny/db/algebra/core/lpg/LpgAlg.java | 7 +- .../db/algebra/core/lpg/LpgModify.java | 15 +- .../db/algebra/core/lpg/LpgScan.java | 18 +- .../db/algebra/core/lpg/LpgTransformer.java | 2 +- .../RelModify.java} | 67 +- .../{Scan.java => relational/RelScan.java} | 68 +- .../relational/RelationalTransformable.java | 14 +- .../db/algebra/externalize/AlgJsonReader.java | 16 +- .../common/LogicalConstraintEnforcer.java | 20 +- .../logical/common/LogicalStreamer.java | 21 +- .../document/LogicalDocumentModify.java | 4 +- .../logical/document/LogicalDocumentScan.java | 3 +- .../db/algebra/logical/lpg/LogicalGraph.java | 4 +- .../algebra/logical/lpg/LogicalLpgModify.java | 24 +- .../algebra/logical/lpg/LogicalLpgScan.java | 8 +- .../logical/lpg/LogicalLpgTransformer.java | 2 +- .../algebra/logical/lpg/LogicalLpgValues.java | 4 +- ...gicalModify.java => LogicalRelModify.java} | 26 +- .../logical/relational/LogicalRelScan.java | 14 +- .../relational/LogicalRelViewScan.java | 16 +- .../algebra/metadata/AlgMdAllPredicates.java | 4 +- .../db/algebra/metadata/AlgMdCollation.java | 6 +- .../metadata/AlgMdColumnUniqueness.java | 6 +- .../algebra/metadata/AlgMdDistribution.java | 6 +- .../metadata/AlgMdExpressionLineage.java | 8 +- .../db/algebra/metadata/AlgMdMaxRowCount.java | 4 +- .../db/algebra/metadata/AlgMdMinRowCount.java | 4 +- .../db/algebra/metadata/AlgMdNodeTypes.java | 6 +- .../db/algebra/metadata/AlgMdParallelism.java | 4 +- .../db/algebra/metadata/AlgMdPredicates.java | 4 +- .../db/algebra/metadata/AlgMdRowCount.java | 4 +- .../db/algebra/metadata/AlgMdSize.java | 4 +- .../metadata/AlgMdTableReferences.java | 4 +- .../db/algebra/metadata/BuiltInMetadata.java | 6 +- .../metadata/JaninoRelMetadataProvider.java | 4 +- .../db/algebra/mutable/MutableAlgs.java | 17 +- .../db/algebra/mutable/MutableScan.java | 10 +- .../algebra/mutable/MutableTableModify.java | 6 +- .../db/algebra/rules/FilterScanRule.java | 16 +- .../db/algebra/rules/ProjectScanRule.java | 16 +- .../polypheny/db/algebra/stream/Delta.java | 4 +- .../db/algebra/stream/StreamRules.java | 18 +- .../org/polypheny/db/catalog/Catalog.java | 26 +- .../db/catalog/entity/CatalogEntity.java | 24 +- .../entity/CatalogMaterializedView.java | 7 +- .../db/catalog/entity/CatalogView.java | 9 +- .../allocation/Allocation.java} | 10 +- .../entity/allocation/AllocationGraph.java | 42 + .../entity/allocation/AllocationTable.java | 35 + .../db/catalog/entity/logical/Logical.java | 9 +- .../LogicalGraph.java} | 35 +- .../LogicalTable.java} | 26 +- .../physical/Physical.java} | 5 +- .../entity/physical/PhysicalGraph.java | 36 + .../entity/physical/PhysicalTable.java | 70 + .../{PhysicalEntity.java => CatalogType.java} | 22 +- .../db/catalog/refactor/ModifiableEntity.java | 40 + .../db/catalog/refactor/QueryableEntity.java | 9 + .../catalog/refactor/TranslatableEntity.java | 9 + .../java/org/polypheny/db/ddl/DdlManager.java | 72 +- .../polypheny/db/interpreter/Bindables.java | 53 +- .../org/polypheny/db/interpreter/Nodes.java | 4 +- .../polypheny/db/interpreter/ScanNode.java | 26 +- .../polypheny/db/partition/FrequencyMap.java | 4 +- .../db/partition/PartitionManager.java | 10 +- .../org/polypheny/db/plan/AlgOptUtil.java | 4 +- .../db/prepare/AlgOptEntityImpl.java | 6 +- .../db/prepare/PolyphenyDbCatalogReader.java | 8 +- .../org/polypheny/db/prepare/Prepare.java | 29 +- .../polypheny/db/processing/DataMigrator.java | 16 +- .../db/processing/DeepCopyShuttle.java | 4 +- .../processing/LogicalAlgAnalyzeShuttle.java | 18 +- .../db/processing/QueryProcessorHelpers.java | 13 +- .../polypheny/db/rex/RexTableInputRef.java | 4 +- .../org/polypheny/db/routing/DmlRouter.java | 8 +- .../functions/CrossModelFunctions.java | 7 +- .../polypheny/db/schema/LogicalEntity.java | 10 +- .../db/schema/ModifiableCollection.java | 2 +- .../polypheny/db/schema/ModifiableEntity.java | 6 +- .../db/schema/PolySchemaBuilder.java | 12 +- .../db/schema/PolyphenyDbSchema.java | 8 +- .../java/org/polypheny/db/schema/Schemas.java | 6 +- .../db/schema/TranslatableGraph.java | 2 +- .../db/schema/graph/ModifiableGraph.java | 8 +- .../org/polypheny/db/tools/AlgBuilder.java | 39 +- .../polypheny/db/tools/AlgBuilderFactory.java | 4 +- .../org/polypheny/db/tools/Frameworks.java | 7 +- .../polypheny/db/transaction/Transaction.java | 4 +- .../org/polypheny/db/util/BuiltInMethod.java | 2 +- .../db/view/MaterializedViewManager.java | 12 +- .../org/polypheny/db/view/ViewManager.java | 12 +- .../org/polypheny/db/catalog/MockCatalog.java | 26 +- .../java/org/polypheny/db/test/JdbcTest.java | 11 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 120 +- .../partition/AbstractPartitionManager.java | 10 +- .../db/partition/FrequencyMapImpl.java | 12 +- .../db/partition/HashPartitionManager.java | 4 +- .../db/partition/ListPartitionManager.java | 4 +- .../db/partition/RangePartitionManager.java | 4 +- .../TemperatureAwarePartitionManager.java | 8 +- .../db/processing/AbstractQueryProcessor.java | 18 +- .../processing/ConstraintEnforceAttacher.java | 22 +- .../db/processing/DataMigratorImpl.java | 30 +- .../shuttles/QueryParameterizer.java | 14 +- .../db/routing/UiRoutingPageUtil.java | 4 +- .../db/routing/routers/AbstractDqlRouter.java | 14 +- .../db/routing/routers/BaseRouter.java | 14 +- .../db/routing/routers/CachedPlanRouter.java | 4 +- .../db/routing/routers/DmlRouterImpl.java | 98 +- .../routers/FullPlacementQueryRouter.java | 12 +- .../db/routing/routers/IcarusRouter.java | 8 +- .../db/routing/routers/SimpleRouter.java | 8 +- .../CreateAllPlacementStrategy.java | 4 +- .../CreateSinglePlacementStrategy.java | 4 +- .../db/transaction/EntityAccessMap.java | 8 +- .../db/transaction/TransactionImpl.java | 4 +- .../db/view/MaterializedViewManagerImpl.java | 10 +- .../java/org/polypheny/db/cypher/DdlTest.java | 10 +- .../db/misc/HorizontalPartitioningTest.java | 12 +- .../db/misc/VerticalPartitioningTest.java | 6 +- .../db/statistics/StatisticsTest.java | 8 +- .../statistics/StatisticQueryProcessor.java | 14 +- .../monitoring/statistics/StatisticTable.java | 4 +- .../statistics/StatisticsManagerImpl.java | 20 +- .../org/polypheny/db/avatica/DbmsMeta.java | 22 +- plugins/cassandra-adapter/.gitignore | 3 - plugins/cassandra-adapter/build.gradle | 94 -- plugins/cassandra-adapter/gradle.properties | 27 - .../db/adapter/cassandra/CassandraAlg.java | 128 -- .../cassandra/CassandraConvention.java | 52 - .../cassandra/CassandraEnumerable.java | 75 - .../cassandra/CassandraEnumerator.java | 157 -- .../db/adapter/cassandra/CassandraFilter.java | 361 ----- .../db/adapter/cassandra/CassandraLimit.java | 111 -- .../db/adapter/cassandra/CassandraMethod.java | 73 - .../CassandraPhysicalNameProvider.java | 199 --- .../db/adapter/cassandra/CassandraPlugin.java | 599 -------- .../adapter/cassandra/CassandraProject.java | 177 --- .../db/adapter/cassandra/CassandraScan.java | 114 -- .../db/adapter/cassandra/CassandraSchema.java | 361 ----- .../db/adapter/cassandra/CassandraSort.java | 113 -- .../db/adapter/cassandra/CassandraTable.java | 361 ----- .../cassandra/CassandraTableModify.java | 167 --- .../CassandraToEnumerableConverter.java | 212 --- .../db/adapter/cassandra/CassandraValues.java | 204 --- .../db/adapter/cassandra/package-info.java | 23 - .../rules/CassandraConverterRule.java | 48 - .../cassandra/rules/CassandraFilterRule.java | 182 --- .../cassandra/rules/CassandraLimitRule.java | 51 - .../cassandra/rules/CassandraProjectRule.java | 74 - .../cassandra/rules/CassandraRules.java | 401 ------ .../cassandra/rules/CassandraSortRule.java | 135 -- .../rules/CassandraTableModificationRule.java | 88 -- .../CassandraToEnumerableConverterRule.java | 53 - .../cassandra/rules/CassandraValuesRule.java | 45 - .../cassandra/util/CassandraTypesUtils.java | 499 ------- .../cassandra/util/CassandraUtils.java | 99 -- .../util/CassandraTypesUtilsTest.java | 182 --- .../db/test/CassandraAdapterTest.java | 215 --- .../src/test/resources/cassandra.yaml | 586 -------- .../src/test/resources/logback-test.xml | 32 - .../src/test/resources/model.json | 16 - .../src/test/resources/twissandra.cql | 1261 ----------------- .../adapter/cottontail/CottontailEntity.java | 10 +- .../adapter/cottontail/CottontailPlugin.java | 18 +- .../cottontail/algebra/CottontailScan.java | 4 +- .../algebra/CottontailTableModify.java | 4 +- .../CottontailTableModificationRule.java | 14 +- .../java/org/polypheny/db/cql/Combiner.java | 6 +- .../polypheny/db/cql/Cql2RelConverter.java | 6 +- .../java/org/polypheny/db/cql/TableIndex.java | 8 +- .../org/polypheny/db/adapter/csv/CsvScan.java | 5 +- .../polypheny/db/adapter/csv/CsvSchema.java | 4 +- .../polypheny/db/adapter/csv/CsvSource.java | 9 +- .../admin/CypherAlterDatabaseAlias.java | 4 +- .../admin/CypherCreateDatabaseAlias.java | 4 +- .../db/cypher/admin/CypherDropAlias.java | 4 +- .../db/cypher/admin/CypherDropDatabase.java | 4 +- .../cypher2alg/CypherToAlgConverter.java | 22 +- .../db/cypher/ddl/CypherAddPlacement.java | 4 +- .../db/cypher/ddl/CypherDropPlacement.java | 4 +- .../db/adapter/druid/DruidQuery.java | 21 +- .../elasticsearch/ElasticsearchScan.java | 4 +- plugins/ethereum-adapter/build.gradle | 89 -- plugins/ethereum-adapter/gradle.properties | 27 - .../db/adapter/ethereum/BlockReader.java | 78 - .../adapter/ethereum/EthereumEnumerator.java | 316 ----- .../adapter/ethereum/EthereumFieldType.java | 108 -- .../db/adapter/ethereum/EthereumMapper.java | 104 -- .../db/adapter/ethereum/EthereumPlugin.java | 274 ---- .../ethereum/EthereumPredicateFactory.java | 123 -- .../db/adapter/ethereum/EthereumSchema.java | 139 -- .../db/adapter/ethereum/EthereumTable.java | 127 -- .../adapter/ethereum/TransactionReader.java | 66 - .../polypheny/db/adapter/file/FilePlugin.java | 16 +- .../db/adapter/file/FileStoreSchema.java | 4 +- .../adapter/file/FileTranslatableEntity.java | 10 +- .../db/adapter/file/algebra/FileRules.java | 10 +- .../db/adapter/file/algebra/FileScan.java | 4 +- .../adapter/file/algebra/FileTableModify.java | 4 +- .../polypheny/db/adapter/file/source/Qfs.java | 10 +- .../db/adapter/file/source/QfsSchema.java | 4 +- .../db/adapter/geode/algebra/GeodeScan.java | 4 +- .../db/hsqldb/stores/HsqldbStore.java | 8 +- .../polypheny/db/adapter/html/HtmlScan.java | 4 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 10 +- .../db/adapter/jdbc/JdbcImplementor.java | 4 +- .../polypheny/db/adapter/jdbc/JdbcRules.java | 10 +- .../polypheny/db/adapter/jdbc/JdbcScan.java | 4 +- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 4 +- .../jdbc/rel2sql/AlgToSqlConverter.java | 14 +- .../jdbc/sources/AbstractJdbcSource.java | 4 +- .../jdbc/stores/AbstractJdbcStore.java | 14 +- .../db/adapter/jdbc/rel2sql/PlannerTest.java | 4 +- .../org/polypheny/db/catalog/CatalogImpl.java | 174 +-- .../org/polypheny/db/test/CatalogTest.java | 4 +- .../monetdb/sources/MonetdbSource.java | 7 +- .../adapter/monetdb/stores/MonetdbStore.java | 10 +- .../db/adapter/mongodb/MongoAlg.java | 12 +- .../db/adapter/mongodb/MongoEntity.java | 16 +- .../db/adapter/mongodb/MongoMethod.java | 2 +- .../db/adapter/mongodb/MongoPlugin.java | 15 +- .../db/adapter/mongodb/MongoRules.java | 24 +- .../db/adapter/mongodb/MongoScan.java | 4 +- .../db/adapter/mongodb/MongoSchema.java | 4 +- .../mongodb/MongoToEnumerableConverter.java | 2 +- .../adapter/mongodb/util/MongoTypeUtil.java | 4 +- .../db/languages/mql/MqlRenameCollection.java | 8 +- .../languages/mql2alg/MqlToAlgConverter.java | 12 +- .../db/adapter/jdbc/MysqlSourcePlugin.java | 7 +- .../db/adapter/neo4j/Neo4jPlugin.java | 34 +- .../polypheny/db/adapter/neo4j/NeoEntity.java | 80 +- .../polypheny/db/adapter/neo4j/NeoGraph.java | 91 +- .../db/adapter/neo4j/NeoGraphImplementor.java | 4 +- .../neo4j/NeoRelationalImplementor.java | 25 +- .../polypheny/db/adapter/neo4j/NeoSchema.java | 35 +- .../db/adapter/neo4j/rules/NeoRules.java | 8 +- .../neo4j/rules/graph/NeoLpgModify.java | 5 +- .../adapter/neo4j/rules/graph/NeoLpgScan.java | 6 +- .../neo4j/rules/relational/NeoModify.java | 12 +- .../neo4j/rules/relational/NeoScan.java | 27 +- .../db/adapter/pig/PigAggregate.java | 4 +- .../db/adapter/pig/PigAlgFactories.java | 6 +- .../polypheny/db/adapter/pig/PigFilter.java | 4 +- .../org/polypheny/db/adapter/pig/PigJoin.java | 4 +- .../polypheny/db/adapter/pig/PigProject.java | 4 +- .../org/polypheny/db/adapter/pig/PigScan.java | 6 +- .../org/polypheny/db/tools/PigAlgBuilder.java | 16 +- .../org/polypheny/db/catalog/PolyCatalog.java | 4 +- .../postgres/source/PostgresqlSource.java | 7 +- .../postgres/store/PostgresqlStore.java | 8 +- .../polypheny/db/restapi/RequestParser.java | 36 +- .../java/org/polypheny/db/restapi/Rest.java | 23 +- .../requests/ResourceDeleteRequest.java | 4 +- .../models/requests/ResourceGetRequest.java | 4 +- .../models/requests/ResourcePatchRequest.java | 4 +- .../models/requests/ResourcePostRequest.java | 4 +- .../requests/ResourceValuesRequest.java | 4 +- .../db/restapi/RequestParserTest.java | 4 +- .../polypheny/db/sql/SqlProcessorImpl.java | 8 +- .../org/polypheny/db/sql/language/SqlDdl.java | 6 +- .../language/ddl/SqlDropMaterializedView.java | 4 +- .../db/sql/language/ddl/SqlDropTable.java | 4 +- .../db/sql/language/ddl/SqlDropView.java | 4 +- .../db/sql/language/ddl/SqlTruncate.java | 4 +- .../SqlAlterMaterializedViewAddIndex.java | 4 +- .../SqlAlterMaterializedViewDropIndex.java | 4 +- ...lAlterMaterializedViewFreshnessManual.java | 4 +- .../SqlAlterMaterializedViewRename.java | 4 +- .../SqlAlterMaterializedViewRenameColumn.java | 4 +- .../SqlAlterSourceTableAddColumn.java | 4 +- .../altertable/SqlAlterTableAddColumn.java | 4 +- .../SqlAlterTableAddForeignKey.java | 6 +- .../ddl/altertable/SqlAlterTableAddIndex.java | 4 +- .../SqlAlterTableAddPartitions.java | 4 +- .../altertable/SqlAlterTableAddPlacement.java | 4 +- .../SqlAlterTableAddPrimaryKey.java | 4 +- .../SqlAlterTableAddUniqueConstraint.java | 4 +- .../altertable/SqlAlterTableDropColumn.java | 4 +- .../SqlAlterTableDropConstraint.java | 4 +- .../SqlAlterTableDropForeignKey.java | 4 +- .../altertable/SqlAlterTableDropIndex.java | 4 +- .../SqlAlterTableDropPlacement.java | 4 +- .../SqlAlterTableDropPrimaryKey.java | 4 +- .../SqlAlterTableMergePartitions.java | 4 +- .../altertable/SqlAlterTableModifyColumn.java | 4 +- .../SqlAlterTableModifyPartitions.java | 4 +- .../SqlAlterTableModifyPlacement.java | 4 +- ...SqlAlterTableModifyPlacementAddColumn.java | 4 +- ...qlAlterTableModifyPlacementDropColumn.java | 4 +- .../ddl/altertable/SqlAlterTableOwner.java | 4 +- .../ddl/altertable/SqlAlterTableRename.java | 4 +- .../altertable/SqlAlterTableRenameColumn.java | 4 +- .../ddl/alterview/SqlAlterViewRename.java | 4 +- .../alterview/SqlAlterViewRenameColumn.java | 4 +- .../db/sql/language/validate/EmptyScope.java | 6 +- .../language/validate/SqlValidatorUtil.java | 4 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 29 +- .../db/sql/web/SchemaToJsonMapper.java | 4 +- .../org/polypheny/db/sql/FrameworksTest.java | 8 +- .../SqlToRelConverterExtendedTest.java | 4 +- .../db/sql/map/NamespaceToJsonMapperTest.java | 4 +- .../db/sql/volcano/TraitPropagationTest.java | 4 +- .../java/org/polypheny/db/webui/Crud.java | 38 +- .../polypheny/db/webui/crud/LanguageCrud.java | 10 +- 336 files changed, 1915 insertions(+), 10760 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/algebra/core/common/Modify.java create mode 100644 core/src/main/java/org/polypheny/db/algebra/core/common/Scan.java rename core/src/main/java/org/polypheny/db/algebra/core/{Modify.java => relational/RelModify.java} (73%) rename core/src/main/java/org/polypheny/db/algebra/core/{Scan.java => relational/RelScan.java} (68%) rename core/src/main/java/org/polypheny/db/algebra/logical/relational/{LogicalModify.java => LogicalRelModify.java} (54%) rename core/src/main/java/org/polypheny/db/catalog/{refactor/LogicalEntity.java => entity/allocation/Allocation.java} (72%) create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java rename plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraMetaDataProvider.java => core/src/main/java/org/polypheny/db/catalog/entity/logical/Logical.java (73%) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogGraphDatabase.java => logical/LogicalGraph.java} (61%) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogTable.java => logical/LogicalTable.java} (91%) rename core/src/main/java/org/polypheny/db/catalog/{refactor/AllocationEntity.java => entity/physical/Physical.java} (88%) create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java rename core/src/main/java/org/polypheny/db/catalog/refactor/{PhysicalEntity.java => CatalogType.java} (60%) create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/ModifiableEntity.java delete mode 100644 plugins/cassandra-adapter/.gitignore delete mode 100644 plugins/cassandra-adapter/build.gradle delete mode 100644 plugins/cassandra-adapter/gradle.properties delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraAlg.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraConvention.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraEnumerable.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraEnumerator.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraFilter.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraLimit.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraMethod.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraPhysicalNameProvider.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraPlugin.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraProject.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraScan.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraSchema.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraSort.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTable.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTableModify.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraToEnumerableConverter.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraValues.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/package-info.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraConverterRule.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraFilterRule.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraLimitRule.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraProjectRule.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraRules.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraSortRule.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraTableModificationRule.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraToEnumerableConverterRule.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraValuesRule.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/util/CassandraTypesUtils.java delete mode 100644 plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/util/CassandraUtils.java delete mode 100644 plugins/cassandra-adapter/src/test/java/org/polypheny/db/adapter/cassandra/util/CassandraTypesUtilsTest.java delete mode 100644 plugins/cassandra-adapter/src/test/java/org/polypheny/db/test/CassandraAdapterTest.java delete mode 100644 plugins/cassandra-adapter/src/test/resources/cassandra.yaml delete mode 100644 plugins/cassandra-adapter/src/test/resources/logback-test.xml delete mode 100644 plugins/cassandra-adapter/src/test/resources/model.json delete mode 100644 plugins/cassandra-adapter/src/test/resources/twissandra.cql delete mode 100644 plugins/ethereum-adapter/build.gradle delete mode 100644 plugins/ethereum-adapter/gradle.properties delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/BlockReader.java delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPredicateFactory.java delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java delete mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/TransactionReader.java diff --git a/core/src/main/java/org/polypheny/db/StatisticsManager.java b/core/src/main/java/org/polypheny/db/StatisticsManager.java index 09009af134..62243e753f 100644 --- a/core/src/main/java/org/polypheny/db/StatisticsManager.java +++ b/core/src/main/java/org/polypheny/db/StatisticsManager.java @@ -21,7 +21,7 @@ import java.util.Map; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; public abstract class StatisticsManager implements PropertyChangeListener { @@ -86,7 +86,7 @@ public static StatisticsManager getInstance() { public abstract void updateColumnName( CatalogColumn catalogColumn, String newName ); - public abstract void updateTableName( CatalogTable catalogTable, String newName ); + public abstract void updateTableName( LogicalTable catalogTable, String newName ); public abstract void updateSchemaName( CatalogSchema catalogSchema, String newName ); diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 6344a42652..5759dd88fa 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -50,9 +50,10 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalGraph; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.ConfigDocker; @@ -322,12 +323,12 @@ public Adapter( int adapterId, String uniqueName, Map settings ) public abstract void createNewSchema( SchemaPlus rootSchema, String name, Long id ); - public abstract Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ); + public abstract PhysicalTable createTableSchema( PhysicalTable boilerplate ); public abstract Namespace getCurrentSchema(); - public void createGraphNamespace( SchemaPlus rootSchema, String name, long id ) { + public void createGraphNamespace( PhysicalGraph graph ) { throw new UnsupportedOperationException( "It is not supported to create a graph with this adapter." ); } @@ -337,12 +338,7 @@ public Entity createDocumentSchema( CatalogCollection catalogEntity, CatalogColl } - public Namespace getCurrentGraphNamespace() { - throw new UnsupportedOperationException( "It is not supported to create a graph with this adapter." ); - } - - - public abstract void truncate( Context context, CatalogTable table ); + public abstract void truncate( Context context, LogicalTable table ); public abstract boolean prepare( PolyXid xid ); diff --git a/core/src/main/java/org/polypheny/db/adapter/DataContext.java b/core/src/main/java/org/polypheny/db/adapter/DataContext.java index 29f1eec7b5..bb0082faaf 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataContext.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataContext.java @@ -183,10 +183,10 @@ enum Variable { TIME_ZONE( "timeZone", TimeZone.class ); public final String camelName; - public final Class clazz; + public final Class clazz; - Variable( String camelName, Class clazz ) { + Variable( String camelName, Class clazz ) { this.camelName = camelName; this.clazz = clazz; assert camelName.equals( CaseFormat.UPPER_UNDERSCORE.to( CaseFormat.LOWER_CAMEL, name() ) ); diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index 96aea10ac9..c236ccd28a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -24,7 +24,7 @@ import lombok.Getter; import org.pf4j.ExtensionPoint; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.prepare.Context; import org.polypheny.db.type.PolyType; @@ -46,7 +46,7 @@ protected DataSource( final int adapterId, final String uniqueName, final Map> getExportedColumns(); - public void createGraph( Context context, CatalogGraphDatabase graphDatabase ) { + public void createGraph( Context context, LogicalGraph graphDatabase ) { throw new UnsupportedOperationException( "It is not supported to create a graph with this adapter." ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index 990aa263a8..712d91c354 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -34,11 +34,11 @@ import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.prepare.Context; import org.polypheny.db.type.PolyType; @@ -65,11 +65,11 @@ public List getSupportedSchemaType() { } - public abstract void createTable( Context context, CatalogTable combinedTable, List partitionIds ); + public abstract void createTable( Context context, LogicalTable combinedTable, List partitionIds ); - public abstract void dropTable( Context context, CatalogTable combinedTable, List partitionIds ); + public abstract void dropTable( Context context, LogicalTable combinedTable, List partitionIds ); - public abstract void addColumn( Context context, CatalogTable catalogTable, CatalogColumn catalogColumn ); + public abstract void addColumn( Context context, LogicalTable catalogTable, CatalogColumn catalogColumn ); public abstract void dropColumn( Context context, CatalogColumnPlacement columnPlacement ); @@ -83,7 +83,7 @@ public List getSupportedSchemaType() { public abstract AvailableIndexMethod getDefaultIndexMethod(); - public abstract List getFunctionalIndexes( CatalogTable catalogTable ); + public abstract List getFunctionalIndexes( LogicalTable catalogTable ); /** @@ -91,7 +91,7 @@ public List getSupportedSchemaType() { * It comes with a substitution methods called by default and should be overwritten if the inheriting {@link DataStore} * support the LPG data model natively. */ - public void createGraph( Context context, CatalogGraphDatabase graphDatabase ) { + public void createGraph( Context context, LogicalGraph graphDatabase ) { // overwrite this if the datastore supports graph createGraphSubstitution( context, graphDatabase ); } @@ -112,19 +112,19 @@ public void dropGraph( Context context, CatalogGraphPlacement graphPlacement ) { * Substitution method, which is used to handle the {@link DataStore} required operations * as if the data model would be {@link NamespaceType#RELATIONAL}. */ - private void createGraphSubstitution( Context context, CatalogGraphDatabase graphDatabase ) { + private void createGraphSubstitution( Context context, LogicalGraph graphDatabase ) { CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( graphDatabase.id ); - CatalogTable nodes = Catalog.getInstance().getTable( mapping.nodesId ); + LogicalTable nodes = Catalog.getInstance().getTable( mapping.nodesId ); createTable( context, nodes, nodes.partitionProperty.partitionIds ); - CatalogTable nodeProperty = Catalog.getInstance().getTable( mapping.nodesPropertyId ); + LogicalTable nodeProperty = Catalog.getInstance().getTable( mapping.nodesPropertyId ); createTable( context, nodeProperty, nodeProperty.partitionProperty.partitionIds ); - CatalogTable edges = Catalog.getInstance().getTable( mapping.edgesId ); + LogicalTable edges = Catalog.getInstance().getTable( mapping.edgesId ); createTable( context, edges, edges.partitionProperty.partitionIds ); - CatalogTable edgeProperty = Catalog.getInstance().getTable( mapping.edgesPropertyId ); + LogicalTable edgeProperty = Catalog.getInstance().getTable( mapping.edgesPropertyId ); createTable( context, edgeProperty, edgeProperty.partitionProperty.partitionIds ); } @@ -137,16 +137,16 @@ private void dropGraphSubstitution( Context context, CatalogGraphPlacement graph Catalog catalog = Catalog.getInstance(); CatalogGraphMapping mapping = catalog.getGraphMapping( graphPlacement.graphId ); - CatalogTable nodes = catalog.getTable( mapping.nodesId ); + LogicalTable nodes = catalog.getTable( mapping.nodesId ); dropTable( context, nodes, nodes.partitionProperty.partitionIds ); - CatalogTable nodeProperty = catalog.getTable( mapping.nodesPropertyId ); + LogicalTable nodeProperty = catalog.getTable( mapping.nodesPropertyId ); dropTable( context, nodeProperty, nodeProperty.partitionProperty.partitionIds ); - CatalogTable edges = catalog.getTable( mapping.edgesId ); + LogicalTable edges = catalog.getTable( mapping.edgesId ); dropTable( context, edges, edges.partitionProperty.partitionIds ); - CatalogTable edgeProperty = catalog.getTable( mapping.edgesPropertyId ); + LogicalTable edgeProperty = catalog.getTable( mapping.edgesPropertyId ); dropTable( context, edgeProperty, edgeProperty.partitionProperty.partitionIds ); } @@ -170,7 +170,7 @@ private void createCollectionSubstitution( Context prepareContext, CatalogCollec Catalog catalog = Catalog.getInstance(); CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); - CatalogTable collectionEntity = catalog.getTable( mapping.collectionId ); + LogicalTable collectionEntity = catalog.getTable( mapping.collectionId ); createTable( prepareContext, collectionEntity, collectionEntity.partitionProperty.partitionIds ); } @@ -194,7 +194,7 @@ private void dropCollectionSubstitution( Context prepareContext, CatalogCollecti Catalog catalog = Catalog.getInstance(); CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); - CatalogTable collectionEntity = catalog.getTable( mapping.collectionId ); + LogicalTable collectionEntity = catalog.getTable( mapping.collectionId ); dropTable( prepareContext, collectionEntity, collectionEntity.partitionProperty.partitionIds ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java index 22c9911f42..ca85e07181 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java @@ -51,7 +51,7 @@ import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgCollationTraitDef; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -73,9 +73,9 @@ /** - * Implementation of {@link Scan} in {@link EnumerableConvention enumerable calling convention}. + * Implementation of {@link RelScan} in {@link EnumerableConvention enumerable calling convention}. */ -public class EnumerableScan extends Scan implements EnumerableAlg { +public class EnumerableScan extends RelScan implements EnumerableAlg { private final Class elementType; diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java index 123389cbba..484070d7bf 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java @@ -17,10 +17,10 @@ package org.polypheny.db.adapter.enumerable; import org.polypheny.db.algebra.core.AlgFactories; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.common.LogicalStreamer; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.plan.Convention; @@ -28,17 +28,17 @@ public class EnumerableTableModifyToStreamerRule extends AlgOptRule { /** - * Helper rule, which can transform a {@link Modify} into a combination of dedicated {@link Scan} - * and prepared {@link Modify}, which can be useful if the executing store is not able to perform the {@link Scan} query natively. + * Helper rule, which can transform a {@link RelModify} into a combination of dedicated {@link RelScan} + * and prepared {@link RelModify}, which can be useful if the executing store is not able to perform the {@link RelScan} query natively. */ public EnumerableTableModifyToStreamerRule() { - super( operandJ( LogicalModify.class, Convention.NONE, r -> !r.isStreamed(), any() ), "EnumerableTableModifyToStreamerRule" ); + super( operandJ( LogicalRelModify.class, Convention.NONE, r -> !r.isStreamed(), any() ), "EnumerableTableModifyToStreamerRule" ); } @Override public void onMatch( AlgOptRuleCall call ) { - Modify modify = call.alg( 0 ); + RelModify modify = call.alg( 0 ); LogicalStreamer streamer = LogicalStreamer.create( modify, diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTransformer.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTransformer.java index 45ac482652..aaf38cf964 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTransformer.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTransformer.java @@ -38,7 +38,7 @@ import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.common.Transformer; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; @@ -60,7 +60,7 @@ public class EnumerableTransformer extends Transformer implements EnumerableAlg * Creates an {@link EnumerableTransformer}, which is able to switch {@link org.polypheny.db.schema.ModelTraitDef} for * non-native underlying adapters if needed. * For example, it will transform the {@link org.polypheny.db.algebra.core.lpg.LpgScan}, which can be handled directly by - * a native adapter, to a combination of {@link Scan} and {@link org.polypheny.db.algebra.core.Union}. + * a native adapter, to a combination of {@link RelScan} and {@link org.polypheny.db.algebra.core.Union}. */ public EnumerableTransformer( AlgOptCluster cluster, List inputs, List names, AlgTraitSet traitSet, ModelTrait inTraitSet, ModelTrait outTraitSet, AlgDataType rowType, boolean isCrossModel ) { super( cluster, inputs, names, traitSet, inTraitSet, outTraitSet, rowType, isCrossModel ); diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/lpg/EnumerableLpgTransformer.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/lpg/EnumerableLpgTransformer.java index 2dade16ed2..92de1a5244 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/lpg/EnumerableLpgTransformer.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/lpg/EnumerableLpgTransformer.java @@ -36,7 +36,7 @@ import org.polypheny.db.adapter.enumerable.EnumerableAlg; import org.polypheny.db.adapter.enumerable.EnumerableAlgImplementor; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.lpg.LpgTransformer; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; diff --git a/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java b/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java index f170722dc9..4e956fa073 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java @@ -30,7 +30,7 @@ import org.polypheny.db.algebra.exceptions.ConstraintViolationException; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.tools.AlgBuilder; @@ -53,7 +53,7 @@ public CoWHashIndex( final long id, final String name, final CatalogSchema schema, - final CatalogTable table, + final LogicalTable table, final List columns, final List targetColumns ) { this.id = id; @@ -69,7 +69,7 @@ public CoWHashIndex( final long id, final String name, final CatalogSchema schema, - final CatalogTable table, + final LogicalTable table, final String[] columns, final String[] targetColumns ) { this( id, name, schema, table, Arrays.asList( columns ), Arrays.asList( targetColumns ) ); @@ -376,7 +376,7 @@ public Index create( Boolean unique, Boolean persistent, CatalogSchema schema, - CatalogTable table, + LogicalTable table, List columns, List targetColumns ) { return new CoWHashIndex( id, name, schema, table, columns, targetColumns ); diff --git a/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java b/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java index 0940f5a280..b0da915bfe 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java @@ -31,7 +31,7 @@ import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.tools.AlgBuilder; @@ -49,7 +49,7 @@ public class CowMultiHashIndex extends Index { private Map, List, Boolean>>> barrierIndex = new HashMap<>(); - public CowMultiHashIndex( long id, String name, CatalogSchema schema, CatalogTable table, List columns, List targetColumns ) { + public CowMultiHashIndex( long id, String name, CatalogSchema schema, LogicalTable table, List columns, List targetColumns ) { this.id = id; this.name = name; this.schema = schema; @@ -59,7 +59,7 @@ public CowMultiHashIndex( long id, String name, CatalogSchema schema, CatalogTab } - public CowMultiHashIndex( long id, String name, CatalogSchema schema, CatalogTable table, String[] columns, String[] targetColumns ) { + public CowMultiHashIndex( long id, String name, CatalogSchema schema, LogicalTable table, String[] columns, String[] targetColumns ) { this( id, name, schema, table, Arrays.asList( columns ), Arrays.asList( targetColumns ) ); } @@ -370,7 +370,7 @@ public Index create( Boolean unique, Boolean persistent, CatalogSchema schema, - CatalogTable table, + LogicalTable table, List columns, List targetColumns ) { return new CowMultiHashIndex( id, name, schema, table, columns, targetColumns ); diff --git a/core/src/main/java/org/polypheny/db/adapter/index/Index.java b/core/src/main/java/org/polypheny/db/adapter/index/Index.java index 389a6eeb0c..859f446f02 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/Index.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/Index.java @@ -29,7 +29,7 @@ import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.processing.QueryProcessor; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; @@ -55,7 +55,7 @@ public abstract class Index { // The logical table this index is for @Getter - protected CatalogTable table; + protected LogicalTable table; // The list of columns over which the index was created protected List columns; @@ -240,7 +240,7 @@ Index create( final Boolean unique, final Boolean persitent, final CatalogSchema schema, - final CatalogTable table, + final LogicalTable table, final List columns, final List targetColumns ); diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index d6f24e03fa..74b58a94e0 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -34,7 +34,7 @@ import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; @@ -174,7 +174,7 @@ protected void addIndex( final long id, final String name, final CatalogKey key, .filter( it -> it.canProvide( method, unique, persistent ) ) .findFirst() .orElseThrow( IllegalArgumentException::new ); - final CatalogTable table = Catalog.getInstance().getTable( key.tableId ); + final LogicalTable table = Catalog.getInstance().getTable( key.tableId ); final CatalogPrimaryKey pk = Catalog.getInstance().getPrimaryKey( table.primaryKey ); final Index index = factory.create( id, @@ -214,7 +214,7 @@ public void deleteIndex( final long indexId ) { } - public Index getIndex( CatalogSchema schema, CatalogTable table, List columns ) { + public Index getIndex( CatalogSchema schema, LogicalTable table, List columns ) { return this.indexById.values().stream().filter( index -> index.schema.equals( schema ) && index.table.equals( table ) @@ -224,7 +224,7 @@ public Index getIndex( CatalogSchema schema, CatalogTable table, List co } - public Index getIndex( CatalogSchema schema, CatalogTable table, List columns, String method, Boolean unique, Boolean persistent ) { + public Index getIndex( CatalogSchema schema, LogicalTable table, List columns, String method, Boolean unique, Boolean persistent ) { return this.indexById.values().stream().filter( index -> index.schema.equals( schema ) && index.table.equals( table ) @@ -236,7 +236,7 @@ public Index getIndex( CatalogSchema schema, CatalogTable table, List co } - public List getIndices( CatalogSchema schema, CatalogTable table ) { + public List getIndices( CatalogSchema schema, LogicalTable table ) { return this.indexById.values().stream() .filter( index -> index.schema.equals( schema ) && index.table.equals( table ) ) .collect( Collectors.toList() ); diff --git a/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java index ffbb9f17f2..5938ca5637 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java @@ -52,9 +52,9 @@ import org.polypheny.db.algebra.metadata.Metadata; import org.polypheny.db.algebra.metadata.MetadataFactory; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTrait; @@ -387,7 +387,7 @@ public final String getDescription() { @Override - public AlgOptEntity getEntity() { + public CatalogEntity getEntity() { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgFieldTrimmer.java b/core/src/main/java/org/polypheny/db/algebra/AlgFieldTrimmer.java index fd35d5732f..f0a73f4e2f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgFieldTrimmer.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgFieldTrimmer.java @@ -53,15 +53,15 @@ import org.polypheny.db.algebra.core.Filter; import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.SetOp; import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalJoin; -import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; @@ -764,9 +764,9 @@ public TrimResult trimFields( Aggregate aggregate, ImmutableBitSet fieldsUsed, S /** - * Variant of {@link #trimFields(AlgNode, ImmutableBitSet, Set)} for {@link LogicalModify}. + * Variant of {@link #trimFields(AlgNode, ImmutableBitSet, Set)} for {@link LogicalRelModify}. */ - public TrimResult trimFields( LogicalModify modifier, ImmutableBitSet fieldsUsed, Set extraFields ) { + public TrimResult trimFields( LogicalRelModify modifier, ImmutableBitSet fieldsUsed, Set extraFields ) { // Ignore what consumer wants. We always project all columns. Util.discard( fieldsUsed ); @@ -788,7 +788,7 @@ public TrimResult trimFields( LogicalModify modifier, ImmutableBitSet fieldsUsed throw new AssertionError( "Expected identity mapping, got " + inputMapping ); } - LogicalModify newModifier = modifier; + LogicalRelModify newModifier = modifier; if ( newInput != input ) { newModifier = modifier.copy( @@ -891,7 +891,7 @@ protected Mapping createMapping( ImmutableBitSet fieldsUsed, int fieldCount ) { /** * Variant of {@link #trimFields(AlgNode, ImmutableBitSet, Set)} for {@link LogicalRelScan}. */ - public TrimResult trimFields( final Scan tableAccessRel, ImmutableBitSet fieldsUsed, Set extraFields ) { + public TrimResult trimFields( final RelScan tableAccessRel, ImmutableBitSet fieldsUsed, Set extraFields ) { final int fieldCount = tableAccessRel.getRowType().getFieldCount(); if ( fieldsUsed.equals( ImmutableBitSet.range( fieldCount ) ) && extraFields.isEmpty() ) { // If there is nothing to project or if we are projecting everything then no need to introduce another AlgNode diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgHomogeneousShuttle.java b/core/src/main/java/org/polypheny/db/algebra/AlgHomogeneousShuttle.java index 19efa1545f..60e76c4fc5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgHomogeneousShuttle.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgHomogeneousShuttle.java @@ -34,8 +34,8 @@ package org.polypheny.db.algebra; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.TableFunctionScan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalCorrelate; import org.polypheny.db.algebra.logical.relational.LogicalExchange; @@ -68,7 +68,7 @@ public AlgNode visit( LogicalMatch match ) { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { return visit( (AlgNode) scan ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgInput.java b/core/src/main/java/org/polypheny/db/algebra/AlgInput.java index d190993d7b..1d182a38d9 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgInput.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgInput.java @@ -36,8 +36,8 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.ImmutableBitSet; @@ -51,7 +51,7 @@ public interface AlgInput { AlgTraitSet getTraitSet(); - AlgOptEntity getTable( String table ); + CatalogEntity getEntity( String entity ); /** * Returns the input relational expression. Throws if there is not precisely one input. diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java index d2373ce61b..5b28b1045e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java @@ -44,9 +44,9 @@ import org.polypheny.db.algebra.metadata.Metadata; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgImplementor; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptNode; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; @@ -259,7 +259,7 @@ public interface AlgNode extends AlgOptNode, Cloneable { * * @return If this relational expression represents an access to a table, returns that table, otherwise returns null */ - AlgOptEntity getEntity(); + CatalogEntity getEntity(); /** * Returns the name of this relational expression's class, sans package name, for use in explain. For example, for a diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgShuttle.java b/core/src/main/java/org/polypheny/db/algebra/AlgShuttle.java index 889542302c..0a15f7d59b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgShuttle.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgShuttle.java @@ -34,8 +34,8 @@ package org.polypheny.db.algebra; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.TableFunctionScan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer; import org.polypheny.db.algebra.logical.document.LogicalDocumentAggregate; @@ -64,8 +64,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; @@ -76,7 +76,7 @@ */ public interface AlgShuttle { - AlgNode visit( Scan scan ); + AlgNode visit( RelScan scan ); AlgNode visit( TableFunctionScan scan ); @@ -104,7 +104,7 @@ public interface AlgShuttle { AlgNode visit( LogicalExchange exchange ); - AlgNode visit( LogicalModify modify ); + AlgNode visit( LogicalRelModify modify ); AlgNode visit( LogicalConditionalExecute lce ); diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgShuttleImpl.java b/core/src/main/java/org/polypheny/db/algebra/AlgShuttleImpl.java index 7d3309e139..aa028a831b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgShuttleImpl.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgShuttleImpl.java @@ -39,8 +39,8 @@ import java.util.Deque; import java.util.List; import org.apache.calcite.linq4j.Ord; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.TableFunctionScan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer; import org.polypheny.db.algebra.logical.document.LogicalDocumentAggregate; @@ -69,8 +69,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; @@ -127,7 +127,7 @@ public AlgNode visit( LogicalMatch match ) { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { return scan; } @@ -205,7 +205,7 @@ public AlgNode visit( LogicalConditionalExecute lce ) { @Override - public AlgNode visit( LogicalModify modify ) { + public AlgNode visit( LogicalRelModify modify ) { return visitChildren( modify ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java index ce321a25f6..499d3eaa4b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java @@ -51,7 +51,6 @@ import org.polypheny.db.algebra.core.Collect; import org.polypheny.db.algebra.core.CorrelationId; import org.polypheny.db.algebra.core.Sample; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Uncollect; import org.polypheny.db.algebra.logical.common.LogicalBatchIterator; @@ -84,9 +83,10 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalModifyCollect; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.algebra.logical.relational.LogicalUnion; @@ -97,7 +97,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.StructKind; -import org.polypheny.db.catalog.refactor.LogicalEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; @@ -115,7 +115,6 @@ import org.polypheny.db.rex.RexShuttle; import org.polypheny.db.rex.RexSubQuery; import org.polypheny.db.rex.RexUtil; -import org.polypheny.db.schema.LogicalCollection; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeUtil; @@ -449,8 +448,8 @@ public void rewriteAlg( LogicalDocumentSort alg ) { @SuppressWarnings("unused") public void rewriteAlg( LogicalDocumentScan scan ) { AlgNode alg = scan; - if ( !(scan.getEntity() instanceof LogicalCollection) ) { - alg = scan.getCollection().toAlg( toAlgContext, scan.traitSet ); + if ( scan.entity.isPhysical() ) { + alg = scan.entity.unwrap( TranslatableEntity.class ).toAlg( toAlgContext, scan.traitSet ); } setNewForOldRel( scan, alg ); } @@ -482,8 +481,8 @@ public void rewriteAlg( LogicalLpgModify alg ) { @SuppressWarnings("unused") public void rewriteAlg( LogicalLpgScan scan ) { AlgNode alg = scan; - if ( !(scan.getGraph() instanceof LogicalEntity) ) { - alg = scan.getGraph().toAlg( toAlgContext, scan.getGraph() ); + if ( !(scan.entity.isPhysical()) ) { + alg = scan.entity.unwrap( TranslatableEntity.class ).toAlg( toAlgContext, scan.traitSet ); } setNewForOldRel( scan, alg ); } @@ -526,11 +525,10 @@ public void rewriteAlg( LogicalLpgUnwind unwind ) { @SuppressWarnings("unused") - public void rewriteAlg( LogicalModify alg ) { - LogicalModify newAlg = - LogicalModify.create( + public void rewriteAlg( LogicalRelModify alg ) { + LogicalRelModify newAlg = + LogicalRelModify.create( alg.getEntity(), - alg.getCatalogReader(), getNewForOldRel( alg.getInput() ), alg.getOperation(), alg.getUpdateColumnList(), @@ -863,8 +861,9 @@ private boolean isConstructor( RexNode rexNode ) { } - public void rewriteAlg( Scan alg ) { - AlgNode newAlg = alg.getEntity().toAlg( toAlgContext, alg.traitSet ); + @SuppressWarnings("unused") + public void rewriteAlg( LogicalRelScan alg ) { + AlgNode newAlg = alg.entity.unwrap( TranslatableEntity.class ).toAlg( toAlgContext, alg.traitSet ); if ( !PolyTypeUtil.isFlat( alg.getRowType() ) ) { final List> flattenedExpList = new ArrayList<>(); flattenInputs( @@ -879,16 +878,19 @@ public void rewriteAlg( Scan alg ) { } + @SuppressWarnings("unused") public void rewriteAlg( LogicalDelta alg ) { rewriteGeneric( alg ); } + @SuppressWarnings("unused") public void rewriteAlg( LogicalChi alg ) { rewriteGeneric( alg ); } + @SuppressWarnings("unused") public void rewriteAlg( LogicalMatch alg ) { rewriteGeneric( alg ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java b/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java index 501b7bf9b4..b006739491 100644 --- a/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java +++ b/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java @@ -17,8 +17,8 @@ package org.polypheny.db.algebra; import java.util.Objects; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.volcano.AlgSubset; public class UnsupportedFromInsertShuttle extends AlgShuttleImpl { @@ -32,7 +32,7 @@ private UnsupportedFromInsertShuttle( Long tableId ) { } - public static boolean contains( Modify modify ) { + public static boolean contains( RelModify modify ) { long id = modify.getEntity().getCatalogEntity().id; UnsupportedFromInsertShuttle shuttle = new UnsupportedFromInsertShuttle( id ); modify.accept( shuttle ); @@ -41,7 +41,7 @@ public static boolean contains( Modify modify ) { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { if ( !Objects.equals( scan.getEntity().getCatalogEntity().id, tableId ) ) { containsOtherTableId = true; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java index ce6f0681aa..ea15080f55 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java @@ -48,6 +48,7 @@ import org.polypheny.db.algebra.constant.SemiJoinType; import org.polypheny.db.algebra.core.common.ConditionalExecute; import org.polypheny.db.algebra.core.common.ConditionalExecute.Condition; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; import org.polypheny.db.algebra.logical.document.LogicalDocumentValues; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; @@ -67,13 +68,13 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.Contexts; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.util.ImmutableBitSet; @@ -530,14 +531,14 @@ public AlgNode createDocuments( /** - * Can create a {@link Scan} of the appropriate type for a rule's calling convention. + * Can create a {@link RelScan} of the appropriate type for a rule's calling convention. */ public interface ScanFactory { /** - * Creates a {@link Scan}. + * Creates a {@link RelScan}. */ - AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ); + AlgNode createScan( AlgOptCluster cluster, CatalogEntity entity ); } @@ -548,13 +549,13 @@ public interface ScanFactory { private static class ScanFactoryImpl implements ScanFactory { @Override - public AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ) { + public AlgNode createScan( AlgOptCluster cluster, CatalogEntity entity ) { // Check if RelOptTable contains a View, in this case a LogicalViewScan needs to be created - if ( table.getCatalogEntity().entityType == EntityType.VIEW ) { - return LogicalRelViewScan.create( cluster, table ); + if ( entity.entityType == EntityType.VIEW ) { + return LogicalRelViewScan.create( cluster, entity ); } else { - return LogicalRelScan.create( cluster, table ); + return LogicalRelScan.create( cluster, entity ); } } @@ -569,13 +570,13 @@ public AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ) { */ @Nonnull public static ScanFactory expandingScanFactory( @Nonnull ScanFactory scanFactory ) { - return ( cluster, table ) -> { - final TranslatableEntity translatableTable = table.unwrap( TranslatableEntity.class ); + return ( cluster, entity ) -> { + final TranslatableEntity translatableTable = entity.unwrap( TranslatableEntity.class ); if ( translatableTable != null ) { final ToAlgContext toAlgContext = () -> cluster; - return translatableTable.toAlg( toAlgContext, table, cluster.traitSet() ); + return translatableTable.toAlg( toAlgContext, cluster.traitSet() ); } - return scanFactory.createScan( cluster, table ); + return scanFactory.createScan( cluster, entity ); }; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java b/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java index 4c7b34d186..36d16cfee6 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java @@ -26,7 +26,7 @@ import org.polypheny.db.algebra.BiAlg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -48,7 +48,7 @@ public abstract class ConditionalExecute extends BiAlg { protected CatalogSchema catalogSchema = null; @Getter @Setter - protected CatalogTable catalogTable = null; + protected LogicalTable catalogTable = null; @Getter @Setter protected List catalogColumns = null; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/Modify.java b/core/src/main/java/org/polypheny/db/algebra/core/common/Modify.java new file mode 100644 index 0000000000..63563e16ec --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/Modify.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.core.common; + +import lombok.Getter; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.SingleAlg; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgTraitSet; + +public abstract class Modify extends SingleAlg { + + public final E entity; + + + /** + * Creates a SingleRel. + * + * @param cluster Cluster this relational expression belongs to + * @param traits + * @param input Input relational expression + */ + protected Modify( AlgOptCluster cluster, AlgTraitSet traits, E target, AlgNode input ) { + super( cluster, traits, input ); + this.entity = target; + } + + + /** + * Enumeration of supported modification operations. + */ + public enum Operation { + INSERT, UPDATE, DELETE, MERGE + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/Scan.java b/core/src/main/java/org/polypheny/db/algebra/core/common/Scan.java new file mode 100644 index 0000000000..00f3326ce6 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/Scan.java @@ -0,0 +1,40 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.core.common; + +import org.polypheny.db.algebra.AbstractAlgNode; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgTraitSet; + +public abstract class Scan extends AbstractAlgNode { + + public final E entity; + + + /** + * Creates an AbstractRelNode. + * + * @param cluster + * @param traitSet + */ + public Scan( AlgOptCluster cluster, AlgTraitSet traitSet, E entity ) { + super( cluster, traitSet ); + this.entity = entity; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/Transformer.java b/core/src/main/java/org/polypheny/db/algebra/core/common/Transformer.java index 93d215fdbc..5e0b776c3e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/common/Transformer.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/Transformer.java @@ -24,7 +24,7 @@ import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgVisitor; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.lpg.LpgScan; import org.polypheny.db.algebra.logical.relational.LogicalProject; @@ -51,7 +51,7 @@ public class Transformer extends AbstractAlgNode { * Creates an {@link Transformer}, which is able to switch {@link ModelTraitDef} for * non-native underlying adapters if needed. * For example, it will transform the {@link LpgScan}, which can be handled directly by - * a native adapter, to a combination of {@link Scan} and {@link Union}. + * a native adapter, to a combination of {@link RelScan} and {@link Union}. */ public Transformer( AlgOptCluster cluster, List inputs, @Nullable List names, AlgTraitSet traitSet, ModelTrait inModelTrait, ModelTrait outModelTrait, AlgDataType rowType, boolean isCrossModel ) { super( cluster, traitSet.replace( outModelTrait ) ); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java index f8d274a2b8..9c64b06519 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentAlg.java @@ -27,11 +27,6 @@ public interface DocumentAlg { DocType getDocType(); - default AlgOptEntity getCollection() { - assert this instanceof AlgNode; - return ((AlgNode) this).getEntity(); - } - enum DocType { SCAN, FILTER, VALUES, PROJECT, AGGREGATE, SORT, MODIFY } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java index f1da086882..e9480b362f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java @@ -21,8 +21,10 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; @@ -30,12 +32,10 @@ import org.polypheny.db.rex.RexNode; -public abstract class DocumentModify extends SingleAlg implements DocumentAlg { +public abstract class DocumentModify extends Modify implements DocumentAlg { public final Operation operation; @Getter - private final AlgOptEntity collection; - @Getter private final List keys; @Getter private final List updates; @@ -47,10 +47,9 @@ public abstract class DocumentModify extends SingleAlg implements DocumentAlg { * Creates a {@link DocumentModify}. * {@link org.polypheny.db.schema.ModelTrait#DOCUMENT} node, which modifies a collection. */ - protected DocumentModify( AlgTraitSet traits, AlgOptEntity collection, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { - super( input.getCluster(), input.getTraitSet(), input ); + protected DocumentModify( AlgTraitSet traits, E collection, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { + super( input.getCluster(), input.getTraitSet(), collection, input ); this.operation = operation; - this.collection = collection; this.keys = keys; this.updates = updates; this.catalogReader = catalogReader; @@ -58,12 +57,6 @@ protected DocumentModify( AlgTraitSet traits, AlgOptEntity collection, CatalogRe } - @Override - public AlgOptEntity getEntity() { - return collection; - } - - @Override public AlgDataType deriveRowType() { return AlgOptUtil.createDmlRowType( Kind.INSERT, getCluster().getTypeFactory() ); @@ -87,12 +80,12 @@ public DocType getDocType() { public boolean isInsert() { - return operation == Operation.INSERT; + return operation == Modify.Operation.INSERT; } public boolean isDelete() { - return operation == Operation.DELETE; + return operation == Modify.Operation.DELETE; } } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java index fe76fd3bc9..093a3d4926 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java @@ -18,37 +18,32 @@ import java.util.List; import java.util.stream.Collectors; -import lombok.Getter; -import org.polypheny.db.algebra.AbstractAlgNode; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.type.PolyType; -public abstract class DocumentScan extends AbstractAlgNode implements DocumentAlg { - - @Getter - private final AlgOptEntity collection; +public abstract class DocumentScan extends RelScan implements DocumentAlg { /** * Creates a {@link DocumentScan}. * {@link org.polypheny.db.schema.ModelTrait#DOCUMENT} node, which scans the content of a collection. */ - public DocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity collection ) { - super( cluster, traitSet ); - this.collection = collection; + public DocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, E collection ) { + super( cluster, traitSet, collection ); AlgDataType docType = cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ); // todo dl: change after RowType refactor - if ( this.collection.getCatalogEntity().namespaceType == NamespaceType.DOCUMENT ) { + if ( getEntity().namespaceType == NamespaceType.DOCUMENT ) { this.rowType = new DocumentType(); } else { List list = collection.getRowType().getFieldList().stream() @@ -61,7 +56,7 @@ public DocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity c @Override public String algCompareString() { - return "$" + getClass().getSimpleName() + "$" + collection.getCatalogEntity().id + "$"; + return "$" + getClass().getSimpleName() + "$" + entity.id + "$"; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java index 6069458275..3a5e5854b9 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java @@ -30,8 +30,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; @@ -130,7 +130,7 @@ public LogicalValues getRelationalEquivalent() { } - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { return List.of( getRelationalEquivalent() ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAlg.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAlg.java index b60098fec0..35e602a21b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAlg.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAlg.java @@ -16,7 +16,9 @@ package org.polypheny.db.algebra.core.lpg; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.schema.LogicalEntity; /** @@ -26,9 +28,6 @@ public interface LpgAlg { NodeType getNodeType(); - default CatalogGraphDatabase getGraph() { - return null; - } enum NodeType { MATCH, diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgModify.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgModify.java index a7cec40137..ccbaa8a666 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgModify.java @@ -19,34 +19,31 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.SingleAlg; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; -public abstract class LpgModify extends SingleAlg implements LpgAlg { +public abstract class LpgModify extends Modify implements LpgAlg { public final Operation operation; public final List ids; public final List operations; - @Getter - public final CatalogGraphDatabase graph; /** * Creates a {@link LpgModify}. * {@link org.polypheny.db.schema.ModelTrait#GRAPH} node, which is able to modify an LPG graph. */ - protected LpgModify( AlgOptCluster cluster, AlgTraitSet traits, CatalogGraphDatabase graph, AlgNode input, Operation operation, List ids, List operations, AlgDataType dmlRowType ) { - super( cluster, traits, input ); + protected LpgModify( AlgOptCluster cluster, AlgTraitSet traits, E graph, AlgNode input, Operation operation, List ids, List operations, AlgDataType dmlRowType ) { + super( cluster, traits, graph, input ); this.operation = operation; this.ids = ids; this.operations = operations; - this.graph = graph; this.rowType = dmlRowType; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgScan.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgScan.java index 3cbc215820..05f47834ef 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgScan.java @@ -17,36 +17,32 @@ package org.polypheny.db.algebra.core.lpg; import java.util.List; -import lombok.Getter; -import org.polypheny.db.algebra.AbstractAlgNode; +import org.polypheny.db.algebra.core.common.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.type.PolyType; -public abstract class LpgScan extends AbstractAlgNode implements LpgAlg { - - @Getter - protected final CatalogGraphDatabase graph; //TranslatableGraph graph; +public abstract class LpgScan extends Scan implements LpgAlg { /** * Creates a {@link LpgScan}. * {@link org.polypheny.db.schema.ModelTrait#GRAPH} native node, which is able to scan a LPG graph. */ - public LpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogGraphDatabase graph ) { - super( cluster, traitSet ); - this.graph = graph; + public LpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, E graph ) { + super( cluster, traitSet, graph ); this.rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "g", 0, cluster.getTypeFactory().createPolyType( PolyType.GRAPH ) ) ) ); } @Override public String algCompareString() { - return "$" + getClass().getSimpleName() + "$" + graph.id; + return "$" + getClass().getSimpleName() + "$" + entity.id; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgTransformer.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgTransformer.java index f1d66e2fd4..2a24510aa4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgTransformer.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgTransformer.java @@ -20,7 +20,7 @@ import java.util.stream.Collectors; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.common.Transformer; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Modify.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java similarity index 73% rename from core/src/main/java/org/polypheny/db/algebra/core/Modify.java rename to core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java index 4d3cd81f3a..d07f29ad05 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Modify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,26 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.algebra.core; +package org.polypheny.db.algebra.core.relational; import com.google.common.base.Preconditions; @@ -41,14 +24,14 @@ import lombok.Getter; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; -import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; @@ -60,7 +43,7 @@ /** * Relational expression that modifies a table. * - * It is similar to {@link Scan}, but represents a request to modify a table rather than read from it. + * It is similar to {@link RelScan}, but represents a request to modify a table rather than read from it. * It takes one child which produces the modified rows. Those rows are: * *
      @@ -69,14 +52,7 @@ *
    • for {@code UPDATE}, all old values plus updated new values. *
    */ -public abstract class Modify extends SingleAlg { - - /** - * Enumeration of supported modification operations. - */ - public enum Operation { - INSERT, UPDATE, DELETE, MERGE - } +public abstract class RelModify extends Modify { /** @@ -88,8 +64,6 @@ public enum Operation { * The table definition. */ @Getter - protected final AlgOptEntity table; - @Getter private final Operation operation; @Getter private final List updateColumnList; @@ -111,17 +85,14 @@ public enum Operation { * @param cluster Cluster this relational expression belongs to * @param traitSet Traits of this relational expression * @param table Target table to modify - * @param catalogReader accessor to the table metadata. * @param input Sub-query or filter condition * @param operation Modify operation (INSERT, UPDATE, DELETE) * @param updateColumnList List of column identifiers to be updated (e.g. ident1, ident2); null if not UPDATE * @param sourceExpressionList List of value expressions to be set (e.g. exp1, exp2); null if not UPDATE * @param flattened Whether set flattens the input row type */ - protected Modify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { - super( cluster, traitSet, input ); - this.table = table; - this.catalogReader = catalogReader; + protected RelModify( AlgOptCluster cluster, AlgTraitSet traitSet, E table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + super( cluster, traitSet, table, input ); this.operation = operation; this.updateColumnList = updateColumnList; this.sourceExpressionList = sourceExpressionList; @@ -133,17 +104,10 @@ protected Modify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity tabl Preconditions.checkArgument( updateColumnList == null ); Preconditions.checkArgument( sourceExpressionList == null ); } - if ( table.getRelOptSchema() != null ) { - cluster.getPlanner().registerSchema( table.getRelOptSchema() ); - } this.flattened = flattened; } - public Prepare.CatalogReader getCatalogReader() { - return catalogReader; - } - public boolean isInsert() { return operation == Operation.INSERT; @@ -180,9 +144,9 @@ public AlgDataType getExpectedInputRowType( int ordinalInParent ) { } final AlgDataTypeFactory typeFactory = getCluster().getTypeFactory(); - final AlgDataType rowType = table.getRowType(); + final AlgDataType rowType = entity.getRowType(); switch ( operation ) { - case UPDATE: + /*case UPDATE: inputRowType = typeFactory.createJoinType( rowType, @@ -193,7 +157,7 @@ public AlgDataType getExpectedInputRowType( int ordinalInParent ) { typeFactory.createJoinType( typeFactory.createJoinType( rowType, rowType ), getCatalogReader().createTypeFromProjection( rowType, updateColumnList ) ); - break; + break;*/ // todo dl rewrite default: inputRowType = rowType; break; @@ -210,7 +174,7 @@ public AlgDataType getExpectedInputRowType( int ordinalInParent ) { @Override public AlgWriter explainTerms( AlgWriter pw ) { return super.explainTerms( pw ) - .item( "table", table.getCatalogEntity().id ) + .item( "table", entity.id ) .item( "operation", getOperation() ) .itemIf( "updateColumnList", updateColumnList, updateColumnList != null ) .itemIf( "sourceExpressionList", sourceExpressionList, sourceExpressionList != null ) @@ -229,7 +193,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public String algCompareString() { return this.getClass().getSimpleName() + "$" + - "." + table.getCatalogEntity().id + "$" + + "." + entity.id + "$" + (getInputs() != null ? getInputs().stream().map( AlgNode::algCompareString ).collect( Collectors.joining( "$" ) ) + "$" : "") + getOperation().name() + "$" + (getUpdateColumnList() != null ? String.join( "$", getUpdateColumnList() ) + "$" : "") + @@ -238,10 +202,5 @@ public String algCompareString() { } - @Override - public AlgOptEntity getEntity() { - return table; - } - } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Scan.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java similarity index 68% rename from core/src/main/java/org/polypheny/db/algebra/core/Scan.java rename to core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java index f422172179..8acc558d7d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Scan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -12,42 +12,27 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.algebra.core; +package org.polypheny.db.algebra.core.relational; import java.util.ArrayList; import java.util.List; import java.util.Set; -import org.polypheny.db.algebra.AbstractAlgNode; +import lombok.NonNull; import org.polypheny.db.algebra.AlgInput; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.AlgWriter; +import org.polypheny.db.algebra.core.Project; +import org.polypheny.db.algebra.core.common.Scan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; @@ -60,46 +45,37 @@ /** * Relational operator that returns the contents of a table. */ -public abstract class Scan extends AbstractAlgNode { +public abstract class RelScan extends Scan { /** - * The table definition. + * The entity definition. */ - protected final AlgOptEntity table; + public final E entity; - protected Scan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table ) { - super( cluster, traitSet ); - this.table = table; - if ( table.getRelOptSchema() != null ) { - cluster.getPlanner().registerSchema( table.getRelOptSchema() ); - } + protected RelScan( AlgOptCluster cluster, AlgTraitSet traitSet, @NonNull E entity ) { + super( cluster, traitSet, entity ); + this.entity = entity; } /** * Creates a Scan by parsing serialized output. */ - protected Scan( AlgInput input ) { - this( input.getCluster(), input.getTraitSet(), input.getTable( "table" ) ); + protected RelScan( AlgInput input ) { + this( input.getCluster(), input.getTraitSet(), (E) input.getEntity( "entity" ) ); } @Override public double estimateRowCount( AlgMetadataQuery mq ) { - return table.getRowCount(); - } - - - @Override - public AlgOptEntity getEntity() { - return table; + return entity.getRowCount(); } @Override public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - double dRows = table.getRowCount(); + double dRows = entity.getRowCount(); double dCpu = dRows + 1; // ensure non-zero cost double dIo = 0; return planner.getCostFactory().makeCost( dRows, dCpu, dIo ); @@ -108,15 +84,15 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public AlgDataType deriveRowType() { - return table.getRowType(); + return entity.getRowType(); } /** * Returns an identity projection for the given table. */ - public static ImmutableIntList identity( AlgOptEntity table ) { - return ImmutableIntList.identity( table.getRowType().getFieldCount() ); + public static ImmutableIntList identity( CatalogEntity entity ) { + return ImmutableIntList.identity( entity.getRowType().getFieldCount() ); } @@ -124,13 +100,13 @@ public static ImmutableIntList identity( AlgOptEntity table ) { * Returns an identity projection. */ public ImmutableIntList identity() { - return identity( table ); + return identity( entity ); } @Override public AlgWriter explainTerms( AlgWriter pw ) { - return super.explainTerms( pw ).item( "table", table.getCatalogEntity().id ); + return super.explainTerms( pw ).item( "table", id ); } @@ -183,7 +159,7 @@ public AlgNode accept( AlgShuttle shuttle ) { @Override public String algCompareString() { return this.getClass().getSimpleName() + "$" + - table.getCatalogEntity().id + "&"; + entity.id + "&"; } } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java index 10b463887b..b5bf0fc617 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java @@ -18,11 +18,11 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.schema.ModifiableEntity; /** @@ -35,11 +35,11 @@ default CatalogReader getCatalogReader() { } - List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ); + List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ); - static Modify getModify( AlgOptEntity table, CatalogReader catalogReader, AlgNode alg, Operation operation ) { - return table.unwrap( ModifiableEntity.class ).toModificationAlg( alg.getCluster(), table, catalogReader, alg, operation, null, null, true ); + static Modify getModify( CatalogEntity entity, AlgNode alg, Operation operation ) { + return entity.unwrap( ModifiableEntity.class ).toModificationAlg( alg.getCluster(), alg.getTraitSet(), entity, alg, operation, null, null ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java b/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java index b3925add6f..02da71ea01 100644 --- a/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java +++ b/core/src/main/java/org/polypheny/db/algebra/externalize/AlgJsonReader.java @@ -53,8 +53,8 @@ import org.polypheny.db.algebra.AlgInput; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -110,7 +110,7 @@ private void readRels( Map jsonRels ) { private void readAlg( final Map jsonAlg ) { String id = (String) jsonAlg.get( "id" ); String type = (String) jsonAlg.get( "relOp" ); - Constructor constructor = algJson.getConstructor( type ); + Constructor constructor = algJson.getConstructor( type ); AlgInput input = new AlgInput() { @Override public AlgOptCluster getCluster() { @@ -125,22 +125,22 @@ public AlgTraitSet getTraitSet() { @Override - public AlgOptEntity getTable( String table ) { + public CatalogEntity getEntity( String entity ) { final List list; - if ( jsonAlg.get( table ) instanceof String ) { - String str = (String) jsonAlg.get( table ); + if ( jsonAlg.get( entity ) instanceof String ) { + String str = (String) jsonAlg.get( entity ); // MV: This is not a nice solution... if ( str.startsWith( "[" ) && str.endsWith( "]" ) ) { str = str.substring( 1, str.length() - 1 ); list = new LinkedList<>(); list.add( StringUtils.join( Arrays.asList( str.split( "," ) ), ", " ) ); } else { - list = getStringList( table ); + list = getStringList( entity ); } } else { - list = getStringList( table ); + list = getStringList( entity ); } - return algOptSchema.getTableForMember( list ); + return null; // todo change } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index e0906cff51..811df67ad1 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -30,11 +30,11 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.JoinAlgType; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.common.ConstraintEnforcer; import org.polypheny.db.algebra.exceptions.ConstraintViolationException; import org.polypheny.db.algebra.logical.relational.LogicalFilter; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; @@ -43,7 +43,7 @@ import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; @@ -84,13 +84,13 @@ public LogicalConstraintEnforcer( AlgOptCluster cluster, AlgTraitSet traitSet, A private static EnforcementInformation getControl( AlgNode node, Statement statement ) { ModifyExtractor extractor = new ModifyExtractor(); node.accept( extractor ); - Modify modify = extractor.getModify(); + RelModify modify = extractor.getModify(); if ( modify == null ) { throw new RuntimeException( "The tree did no conform, while generating the constraint enforcement query!" ); } - final CatalogTable table = getCatalogTable( modify ); + final LogicalTable table = getCatalogTable( modify ); AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = modify.getCluster().getRexBuilder(); @@ -219,7 +219,7 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem } - public static EnforcementInformation getControl( CatalogTable table, Statement statement, EnforcementTime enforcementTime ) { + public static EnforcementInformation getControl( LogicalTable table, Statement statement, EnforcementTime enforcementTime ) { AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = builder.getRexBuilder(); @@ -398,12 +398,12 @@ public AlgNode accept( AlgShuttle shuttle ) { } - public static CatalogTable getCatalogTable( Modify modify ) { + public static LogicalTable getCatalogTable( RelModify modify ) { if ( modify.getEntity() == null ) { throw new RuntimeException( "The table was not found in the catalog!" ); } - return (CatalogTable) modify.getEntity().getCatalogEntity(); + return (LogicalTable) modify.getEntity().getCatalogEntity(); } @@ -435,11 +435,11 @@ public EnforcementInformation( AlgNode control, List> public static class ModifyExtractor extends AlgShuttleImpl { @Getter - private LogicalModify modify; + private LogicalRelModify modify; @Override - public AlgNode visit( LogicalModify modify ) { + public AlgNode visit( LogicalRelModify modify ) { this.modify = modify; return modify; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java index b5a1d2c13d..07185fd6e0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java @@ -22,11 +22,11 @@ import java.util.stream.Collectors; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.common.Streamer; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; @@ -63,7 +63,7 @@ public static LogicalStreamer create( AlgNode provider, AlgNode collector ) { } - public static LogicalStreamer create( Modify modify, AlgBuilder algBuilder ) { + public static LogicalStreamer create( RelModify modify, AlgBuilder algBuilder ) { RexBuilder rexBuilder = algBuilder.getRexBuilder(); if ( !isModifyApplicable( modify ) ) { @@ -79,7 +79,7 @@ public static LogicalStreamer create( Modify modify, AlgBuilder algBuilder ) { } - private static LogicalStreamer getLogicalStreamer( Modify modify, AlgBuilder algBuilder, RexBuilder rexBuilder, AlgNode input ) { + private static LogicalStreamer getLogicalStreamer( RelModify modify, AlgBuilder algBuilder, RexBuilder rexBuilder, AlgNode input ) { if ( input == null ) { throw new RuntimeException( "Error while creating Streamer." ); } @@ -124,9 +124,8 @@ private static LogicalStreamer getLogicalStreamer( Modify modify, AlgBuilder alg ); } - LogicalModify prepared = LogicalModify.create( + LogicalRelModify prepared = LogicalRelModify.create( modify.getEntity(), - modify.getCatalogReader(), algBuilder.build(), modify.getOperation(), modify.getUpdateColumnList(), @@ -137,7 +136,7 @@ private static LogicalStreamer getLogicalStreamer( Modify modify, AlgBuilder alg } - private static List createSourceList( Modify modify, RexBuilder rexBuilder ) { + private static List createSourceList( RelModify modify, RexBuilder rexBuilder ) { return modify.getUpdateColumnList() .stream() .map( name -> { @@ -149,7 +148,7 @@ private static List createSourceList( Modify modify, RexBuilder rexBuil } - private static void attachFilter( Modify modify, AlgBuilder algBuilder, RexBuilder rexBuilder ) { + private static void attachFilter( RelModify modify, AlgBuilder algBuilder, RexBuilder rexBuilder ) { List fields = new ArrayList<>(); int i = 0; for ( AlgDataTypeField field : modify.getEntity().getRowType().getFieldList() ) { @@ -174,14 +173,14 @@ private static AlgNode getChild( AlgNode child ) { } - public static boolean isModifyApplicable( Modify modify ) { + public static boolean isModifyApplicable( RelModify modify ) { // simple delete, which all store should be able to handle by themselves if ( modify.isInsert() && modify.getInput() instanceof Values ) { // simple insert, which all store should be able to handle by themselves return false; } else { - return !modify.isDelete() || !(modify.getInput() instanceof Scan); + return !modify.isDelete() || !(modify.getInput() instanceof RelScan); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java index c6a773e76a..846ef690a8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java @@ -19,9 +19,9 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; -import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -50,7 +50,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { return List.of( RelationalTransformable.getModify( entities.get( 0 ), catalogReader, values.get( 0 ), operation ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java index e058252473..b7530355f2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java @@ -22,6 +22,7 @@ import org.polypheny.db.algebra.core.document.DocumentScan; import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; @@ -46,7 +47,7 @@ public static AlgNode create( AlgOptCluster cluster, AlgOptEntity collection ) { @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { return List.of( AlgOptRule.convert( LogicalRelScan.create( getCluster(), entities.get( 0 ) ), ModelTrait.RELATIONAL ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java index e20bd78e38..b88e18ef13 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java @@ -26,7 +26,7 @@ import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.schema.Entity; @@ -55,7 +55,7 @@ public LogicalGraph( long id ) { @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java index e9d8162fe4..e8bb277847 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java @@ -21,47 +21,45 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -public class LogicalLpgModify extends LpgModify implements RelationalTransformable { +public class LogicalLpgModify extends LpgModify implements RelationalTransformable { /** * Subclass of {@link LpgModify} not targeted at any particular engine or calling convention. */ - public LogicalLpgModify( AlgOptCluster cluster, AlgTraitSet traits, CatalogGraphDatabase graph, AlgNode input, Operation operation, List ids, List operations ) { - super( cluster, traits, graph, input, operation, ids, operations, AlgOptUtil.createDmlRowType( Kind.INSERT, cluster.getTypeFactory() ) ); + public LogicalLpgModify( AlgOptCluster cluster, AlgTraitSet traits, CatalogEntity entity, AlgNode input, Operation operation, List ids, List operations ) { + super( cluster, traits, entity, input, operation, ids, operations, AlgOptUtil.createDmlRowType( Kind.INSERT, cluster.getTypeFactory() ) ); } @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new LogicalLpgModify( inputs.get( 0 ).getCluster(), traitSet, getGraph(), inputs.get( 0 ), operation, ids, operations ); + return new LogicalLpgModify( inputs.get( 0 ).getCluster(), traitSet, entity, inputs.get( 0 ), operation, ids, operations ); } @Override - public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { List modifies = new ArrayList<>(); // modify of nodes - Modify nodeModify = RelationalTransformable.getModify( entities.get( 0 ), catalogReader, inputs.get( 0 ), operation ); + RelModify nodeModify = RelationalTransformable.getModify( entities.get( 0 ), catalogReader, inputs.get( 0 ), operation ); modifies.add( nodeModify ); // modify of properties if ( inputs.get( 1 ) != null ) { - Modify nodePropertyModify = RelationalTransformable.getModify( entities.get( 1 ), catalogReader, inputs.get( 1 ), operation ); + RelModify nodePropertyModify = RelationalTransformable.getModify( entities.get( 1 ), catalogReader, inputs.get( 1 ), operation ); modifies.add( nodePropertyModify ); } @@ -70,12 +68,12 @@ public List getRelationalEquivalent( List inputs, List edgeModify = RelationalTransformable.getModify( entities.get( 2 ), catalogReader, inputs.get( 2 ), operation ); modifies.add( edgeModify ); // modify of edge properties if ( inputs.get( 3 ) != null ) { - Modify edgePropertyModify = RelationalTransformable.getModify( entities.get( 3 ), catalogReader, inputs.get( 3 ), operation ); + RelModify edgePropertyModify = RelationalTransformable.getModify( entities.get( 3 ), catalogReader, inputs.get( 3 ), operation ); modifies.add( edgePropertyModify ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java index db5bbcf4ea..15e54802f5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java @@ -28,10 +28,10 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; @@ -44,14 +44,14 @@ public class LogicalLpgScan extends LpgScan implements RelationalTransformable { /** * Subclass of {@link LpgScan} not targeted at any particular engine or calling convention. */ - public LogicalLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogGraphDatabase graph, AlgDataType rowType ) { + public LogicalLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, LogicalGraph graph, AlgDataType rowType ) { super( cluster, traitSet.replace( ModelTrait.GRAPH ), graph ); this.rowType = rowType; } @Override - public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { assert !entities.isEmpty(); AlgTraitSet out = getTraitSet().replace( ModelTrait.RELATIONAL ); LogicalRelScan nodes = new LogicalRelScan( getCluster(), out, entities.get( 0 ) ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgTransformer.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgTransformer.java index a95bef6cc4..353e8c27d0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgTransformer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgTransformer.java @@ -19,7 +19,7 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.lpg.LpgTransformer; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java index 1c212fc7f6..d8f3e5ac8f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java @@ -36,8 +36,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexLiteral; @@ -114,7 +114,7 @@ public static LogicalLpgValues create( @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder() ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java similarity index 54% rename from core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModify.java rename to core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java index ebfba416a8..4c70342ceb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java @@ -22,20 +22,19 @@ import lombok.experimental.Accessors; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModelTrait; /** - * Sub-class of {@link Modify} not targeted at any particular engine or calling convention. + * Sub-class of {@link RelModify} not targeted at any particular engine or calling convention. */ -public final class LogicalModify extends Modify { +public final class LogicalRelModify extends RelModify { @Getter @Setter @@ -48,25 +47,30 @@ public final class LogicalModify extends Modify { * * Use {@link #create} unless you know what you're doing. */ - public LogicalModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, CatalogReader schema, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { - super( cluster, traitSet.replace( ModelTrait.RELATIONAL ), table, schema, input, operation, updateColumnList, sourceExpressionList, flattened ); + public LogicalRelModify( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + super( cluster, traitSet.replace( ModelTrait.RELATIONAL ), table, input, operation, updateColumnList, sourceExpressionList, flattened ); + } + + + public LogicalRelModify( AlgTraitSet traits, CatalogEntity table, AlgNode child, Operation operation, List targets, List sources ) { + super( child.getCluster(), traits, table, child, operation, targets, sources, false ); } /** * Creates a LogicalModify. */ - public static LogicalModify create( AlgOptEntity table, CatalogReader schema, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public static LogicalRelModify create( CatalogEntity table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { final AlgOptCluster cluster = input.getCluster(); final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ); - return new LogicalModify( cluster, traitSet, table, schema, input, operation, updateColumnList, sourceExpressionList, flattened ); + return new LogicalRelModify( cluster, traitSet, table, input, operation, updateColumnList, sourceExpressionList, flattened ); } @Override - public LogicalModify copy( AlgTraitSet traitSet, List inputs ) { + public LogicalRelModify copy( AlgTraitSet traitSet, List inputs ) { assert traitSet.containsIfApplicable( Convention.NONE ); - return new LogicalModify( getCluster(), traitSet, table, catalogReader, sole( inputs ), getOperation(), getUpdateColumnList(), getSourceExpressionList(), isFlattened() ).isStreamed( isStreamed ); + return new LogicalRelModify( getCluster(), traitSet, entity, sole( inputs ), getOperation(), getUpdateColumnList(), getSourceExpressionList(), isFlattened() ).isStreamed( isStreamed ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java index 62647d8dc2..8b3f0ea26e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java @@ -39,7 +39,8 @@ import org.polypheny.db.algebra.AlgCollationTraitDef; import org.polypheny.db.algebra.AlgInput; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; @@ -69,7 +70,7 @@ * * can. It is the optimizer's responsibility to find these ways, by applying transformation rules. */ -public final class LogicalRelScan extends Scan { +public final class LogicalRelScan extends RelScan { /** @@ -77,7 +78,7 @@ public final class LogicalRelScan extends Scan { * * Use {@link #create} unless you know what you're doing. */ - public LogicalRelScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table ) { + public LogicalRelScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table ) { super( cluster, traitSet, table ); } @@ -104,8 +105,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { * @param cluster Cluster * @param algOptEntity Table */ - public static LogicalRelScan create( AlgOptCluster cluster, final AlgOptEntity algOptEntity ) { - final Entity entity = algOptEntity.unwrap( Entity.class ); + public static LogicalRelScan create( AlgOptCluster cluster, final CatalogEntity entity ) { final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ) @@ -114,12 +114,12 @@ public static LogicalRelScan create( AlgOptCluster cluster, final AlgOptEntity a AlgCollationTraitDef.INSTANCE, () -> { if ( entity != null ) { - return entity.getStatistic().getCollations(); + return entity.getCollations(); } return ImmutableList.of(); } ); - return new LogicalRelScan( cluster, traitSet, algOptEntity ); + return new LogicalRelScan( cluster, traitSet, entity ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java index 589370ae28..af78f0b967 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java @@ -23,8 +23,9 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgCollationTraitDef; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; @@ -35,7 +36,7 @@ import org.polypheny.db.schema.Entity; -public class LogicalRelViewScan extends Scan { +public class LogicalRelViewScan extends RelScan { @Getter private final AlgNode algNode; @@ -43,15 +44,14 @@ public class LogicalRelViewScan extends Scan { private final AlgCollation algCollation; - public LogicalRelViewScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, AlgNode algNode, AlgCollation algCollation ) { + public LogicalRelViewScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, AlgNode algNode, AlgCollation algCollation ) { super( cluster, traitSet, table ); this.algNode = algNode; this.algCollation = algCollation; } - public static AlgNode create( AlgOptCluster cluster, final AlgOptEntity algOptEntity ) { - final Entity entity = algOptEntity.unwrap( Entity.class ); + public static AlgNode create( AlgOptCluster cluster, final CatalogEntity entity ) { final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ) @@ -59,15 +59,15 @@ public static AlgNode create( AlgOptCluster cluster, final AlgOptEntity algOptEn AlgCollationTraitDef.INSTANCE, () -> { if ( entity != null ) { - return entity.getStatistic().getCollations(); + return entity.getCollations(); } return ImmutableList.of(); } ); - CatalogView catalogView = (CatalogView) algOptEntity.getCatalogEntity(); + CatalogView catalogView = entity.unwrap( CatalogView.class ); AlgCollation algCollation = catalogView.getAlgCollation(); - return new LogicalRelViewScan( cluster, traitSet, algOptEntity, catalogView.prepareView( cluster ), algCollation ); + return new LogicalRelViewScan( cluster, traitSet, entity, catalogView.prepareView( cluster ), algCollation ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdAllPredicates.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdAllPredicates.java index 10e895bb54..45648b5833 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdAllPredicates.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdAllPredicates.java @@ -52,9 +52,9 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptPredicateList; import org.polypheny.db.plan.AlgOptUtil; @@ -114,7 +114,7 @@ public AlgOptPredicateList getAllPredicates( AlgSubset alg, AlgMetadataQuery mq /** * Extract predicates for a table scan. */ - public AlgOptPredicateList getAllPredicates( Scan table, AlgMetadataQuery mq ) { + public AlgOptPredicateList getAllPredicates( RelScan table, AlgMetadataQuery mq ) { return AlgOptPredicateList.EMPTY; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java index 5fa6746635..898bd09a2f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java @@ -66,12 +66,12 @@ import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.SortExchange; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.Window; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.hep.HepAlgVertex; @@ -135,7 +135,7 @@ public ImmutableList collations( Filter alg, AlgMetadataQuery mq ) } - public ImmutableList collations( Scan scan, AlgMetadataQuery mq ) { + public ImmutableList collations( RelScan scan, AlgMetadataQuery mq ) { return ImmutableList.copyOf( table( scan.getEntity() ) ); } @@ -202,7 +202,7 @@ public ImmutableList collations( AlgSubset alg, AlgMetadataQuery m /** - * Helper method to determine a {@link Scan}'s collation. + * Helper method to determine a {@link RelScan}'s collation. */ public static List table( AlgOptEntity table ) { return table.getCollationList(); diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java index 2f327c91bc..51476f4a1f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java @@ -50,11 +50,11 @@ import org.polypheny.db.algebra.core.JoinInfo; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.SetOp; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -88,7 +88,7 @@ public MetadataDef getDef() { } - public Boolean areColumnsUnique( Scan alg, AlgMetadataQuery mq, ImmutableBitSet columns, boolean ignoreNulls ) { + public Boolean areColumnsUnique( RelScan alg, AlgMetadataQuery mq, ImmutableBitSet columns, boolean ignoreNulls ) { return alg.getEntity().isKey( columns ); } @@ -327,7 +327,7 @@ public Boolean areColumnsUnique( AlgSubset alg, AlgMetadataQuery mq, ImmutableBi if ( alg2 instanceof Aggregate || alg2 instanceof Filter || alg2 instanceof Values - || alg2 instanceof Scan + || alg2 instanceof RelScan || simplyProjects( alg2, columns ) ) { try { final Boolean unique = mq.areColumnsUnique( alg2, columns, ignoreNulls ); diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java index 27f4c33bc4..736e012a64 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java @@ -44,10 +44,10 @@ import org.polypheny.db.algebra.core.Exchange; import org.polypheny.db.algebra.core.Filter; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SetOp; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.hep.HepAlgVertex; @@ -102,7 +102,7 @@ public AlgDistribution distribution( SetOp alg, AlgMetadataQuery mq ) { } - public AlgDistribution distribution( Scan scan, AlgMetadataQuery mq ) { + public AlgDistribution distribution( RelScan scan, AlgMetadataQuery mq ) { return table( scan.getEntity() ); } @@ -128,7 +128,7 @@ public AlgDistribution distribution( HepAlgVertex alg, AlgMetadataQuery mq ) { /** - * Helper method to determine a {@link Scan}'s distribution. + * Helper method to determine a {@link RelScan}'s distribution. */ public static AlgDistribution table( AlgOptEntity table ) { return table.getDistribution(); diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdExpressionLineage.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdExpressionLineage.java index 1a393c2b47..b8328a988e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdExpressionLineage.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdExpressionLineage.java @@ -56,9 +56,9 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.hep.HepAlgVertex; @@ -80,7 +80,7 @@ * * The goal of this provider is to infer the lineage for the given expression. * - * The output expressions might contain references to columns produced by {@link Scan} operators ({@link RexTableInputRef}). In turn, each Scan operator is identified uniquely + * The output expressions might contain references to columns produced by {@link RelScan} operators ({@link RexTableInputRef}). In turn, each Scan operator is identified uniquely * by a {@link AlgTableRef} containing its qualified name and an identifier. * * If the lineage cannot be inferred, we return null. @@ -117,11 +117,11 @@ public Set getExpressionLineage( AlgSubset alg, AlgMetadataQuery mq, Re /** - * Expression lineage from {@link Scan}. + * Expression lineage from {@link RelScan}. * * We extract the fields referenced by the expression and we express them using {@link RexTableInputRef}. */ - public Set getExpressionLineage( Scan alg, AlgMetadataQuery mq, RexNode outputExpression ) { + public Set getExpressionLineage( RelScan alg, AlgMetadataQuery mq, RexNode outputExpression ) { final RexBuilder rexBuilder = alg.getCluster().getRexBuilder(); // Extract input fields referenced by expression diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdMaxRowCount.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdMaxRowCount.java index 9f7ac787be..578200ee17 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdMaxRowCount.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdMaxRowCount.java @@ -42,10 +42,10 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.volcano.AlgSubset; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.util.Bug; @@ -176,7 +176,7 @@ public Double getMaxRowCount( Join alg, AlgMetadataQuery mq ) { } - public Double getMaxRowCount( Scan alg, AlgMetadataQuery mq ) { + public Double getMaxRowCount( RelScan alg, AlgMetadataQuery mq ) { // For typical tables, there is no upper bound to the number of rows. return Double.POSITIVE_INFINITY; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdMinRowCount.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdMinRowCount.java index 1c3d1a0288..489493e406 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdMinRowCount.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdMinRowCount.java @@ -42,10 +42,10 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.volcano.AlgSubset; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.util.Bug; @@ -156,7 +156,7 @@ public Double getMinRowCount( Join alg, AlgMetadataQuery mq ) { } - public Double getMinRowCount( Scan alg, AlgMetadataQuery mq ) { + public Double getMinRowCount( RelScan alg, AlgMetadataQuery mq ) { return 0D; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdNodeTypes.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdNodeTypes.java index 72e3f78048..7131d024cf 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdNodeTypes.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdNodeTypes.java @@ -44,11 +44,11 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.hep.HepAlgVertex; import org.polypheny.db.plan.volcano.AlgSubset; import org.polypheny.db.util.BuiltInMethod; @@ -142,8 +142,8 @@ public Multimap, AlgNode> getNodeTypes( Aggregate alg, } - public Multimap, AlgNode> getNodeTypes( Scan alg, AlgMetadataQuery mq ) { - return getNodeTypes( alg, Scan.class, mq ); + public Multimap, AlgNode> getNodeTypes( RelScan alg, AlgMetadataQuery mq ) { + return getNodeTypes( alg, RelScan.class, mq ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdParallelism.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdParallelism.java index 06f35aacab..0957095dcc 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdParallelism.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdParallelism.java @@ -36,8 +36,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Exchange; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.BuiltInMetadata.Parallelism; import org.polypheny.db.util.BuiltInMethod; @@ -80,7 +80,7 @@ public Boolean isPhaseTransition( AlgNode alg, AlgMetadataQuery mq ) { } - public Boolean isPhaseTransition( Scan alg, AlgMetadataQuery mq ) { + public Boolean isPhaseTransition( RelScan alg, AlgMetadataQuery mq ) { return true; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPredicates.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPredicates.java index ac935537c4..3db3ebe646 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPredicates.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPredicates.java @@ -60,10 +60,10 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; @@ -156,7 +156,7 @@ public AlgOptPredicateList getPredicates( HepAlgVertex alg, AlgMetadataQuery mq /** * Infers predicates for a table scan. */ - public AlgOptPredicateList getPredicates( Scan table, AlgMetadataQuery mq ) { + public AlgOptPredicateList getPredicates( RelScan table, AlgMetadataQuery mq ) { return AlgOptPredicateList.EMPTY; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdRowCount.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdRowCount.java index cbc3ab646e..dffdc093f0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdRowCount.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdRowCount.java @@ -45,11 +45,11 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.volcano.AlgSubset; import org.polypheny.db.rex.RexDynamicParam; import org.polypheny.db.rex.RexLiteral; @@ -241,7 +241,7 @@ public Double getRowCount( Aggregate alg, AlgMetadataQuery mq ) { } - public Double getRowCount( Scan alg, AlgMetadataQuery mq ) { + public Double getRowCount( RelScan alg, AlgMetadataQuery mq ) { return alg.estimateRowCount( mq ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdSize.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdSize.java index c4719413bf..926098a4d8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdSize.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdSize.java @@ -47,11 +47,11 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.BuiltInMetadata.Size; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -178,7 +178,7 @@ public List averageColumnSizes( Values alg, AlgMetadataQuery mq ) { } - public List averageColumnSizes( Scan alg, AlgMetadataQuery mq ) { + public List averageColumnSizes( RelScan alg, AlgMetadataQuery mq ) { final List fields = alg.getRowType().getFieldList(); final ImmutableList.Builder list = ImmutableList.builder(); for ( AlgDataTypeField field : fields ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java index 57d8a66f2e..00902338b3 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java @@ -49,9 +49,9 @@ import org.polypheny.db.algebra.core.Filter; import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.hep.HepAlgVertex; import org.polypheny.db.plan.volcano.AlgSubset; import org.polypheny.db.rex.RexTableInputRef.AlgTableRef; @@ -102,7 +102,7 @@ public Set getTableReferences( AlgSubset alg, AlgMetadataQuery mq ) /** * Scan table reference. */ - public Set getTableReferences( Scan alg, AlgMetadataQuery mq ) { + public Set getTableReferences( RelScan alg, AlgMetadataQuery mq ) { return ImmutableSet.of( AlgTableRef.of( alg.getEntity(), 0 ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/BuiltInMetadata.java b/core/src/main/java/org/polypheny/db/algebra/metadata/BuiltInMetadata.java index f60f4ae381..7593f13ed9 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/BuiltInMetadata.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/BuiltInMetadata.java @@ -42,7 +42,7 @@ import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.ExplainLevel; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptPredicateList; import org.polypheny.db.rex.RexNode; @@ -639,7 +639,7 @@ interface Handler extends MetadataHandler { * Metadata about the predicates that hold in the rows emitted from a relational expression. * * The difference with respect to {@link Predicates} provider is that this provider tries to extract ALL predicates even if they are not applied on the output expressions of the relational expression; we rely - * on {@link RexTableInputRef} to reference origin columns in {@link Scan} for the result predicates. + * on {@link RexTableInputRef} to reference origin columns in {@link RelScan} for the result predicates. */ public interface AllPredicates extends Metadata { @@ -674,7 +674,7 @@ public interface Parallelism extends Metadata { /** * Returns whether each physical operator implementing this relational expression belongs to a different process than its inputs. * - * A collection of operators processing all of the splits of a particular stage in the query pipeline is called a "phase". A phase starts with a leaf node such as a {@link Scan}, + * A collection of operators processing all of the splits of a particular stage in the query pipeline is called a "phase". A phase starts with a leaf node such as a {@link RelScan}, * or with a phase-change node such as an {@link org.polypheny.db.algebra.core.Exchange}. Hadoop's shuffle operator (a form of sort-exchange) causes data to be sent across the network. */ Boolean isPhaseTransition(); diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java b/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java index 151aa3d3ef..27c114875d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java @@ -107,7 +107,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalIntersect; import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; @@ -172,7 +172,7 @@ public class JaninoRelMetadataProvider implements AlgMetadataProvider { LogicalProject.class, LogicalSort.class, LogicalTableFunctionScan.class, - LogicalModify.class, + LogicalRelModify.class, LogicalRelScan.class, LogicalUnion.class, LogicalValues.class, diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java index 169a81593d..fd23b533e4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java @@ -49,10 +49,10 @@ import org.polypheny.db.algebra.core.Intersect; import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Minus; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.Sample; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.TableFunctionScan; @@ -63,7 +63,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalCalc; import org.polypheny.db.algebra.logical.relational.LogicalCorrelate; import org.polypheny.db.algebra.logical.relational.LogicalExchange; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.algebra.logical.relational.LogicalWindow; @@ -249,9 +249,8 @@ public static AlgNode fromMutable( MutableAlg node, AlgBuilder algBuilder ) { } case TABLE_MODIFY: final MutableTableModify modify = (MutableTableModify) node; - return LogicalModify.create( + return LogicalRelModify.create( modify.table, - modify.catalogReader, fromMutable( modify.getInput(), algBuilder ), modify.operation, modify.updateColumnList, @@ -323,8 +322,8 @@ public static MutableAlg toMutable( AlgNode alg ) { return toMutable( Util.first( ((AlgSubset) alg).getBest(), ((AlgSubset) alg).getOriginal() ) ); } - if ( alg instanceof Scan ) { - return MutableScan.of( (Scan) alg ); + if ( alg instanceof RelScan ) { + return MutableScan.of( (RelScan) alg ); } if ( alg instanceof Values ) { return MutableValues.of( (Values) alg ); @@ -374,8 +373,8 @@ public static MutableAlg toMutable( AlgNode alg ) { final MutableAlg input = toMutable( window.getInput() ); return MutableWindow.of( window.getRowType(), input, window.groups, window.getConstants() ); } - if ( alg instanceof Modify ) { - final Modify modify = (Modify) alg; + if ( alg instanceof RelModify ) { + final RelModify modify = (RelModify) alg; final MutableAlg input = toMutable( modify.getInput() ); return MutableTableModify.of( modify.getRowType(), diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java index d6d9a1436f..4542a5433d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java @@ -34,15 +34,15 @@ package org.polypheny.db.algebra.mutable; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; /** - * Mutable equivalent of {@link Scan}. + * Mutable equivalent of {@link RelScan}. */ public class MutableScan extends MutableLeafAlg { - private MutableScan( Scan alg ) { + private MutableScan( RelScan alg ) { super( MutableAlgType.TABLE_SCAN, alg ); } @@ -52,7 +52,7 @@ private MutableScan( Scan alg ) { * * @param scan The underlying Scan object */ - public static MutableScan of( Scan scan ) { + public static MutableScan of( RelScan scan ) { return new MutableScan( scan ); } @@ -79,7 +79,7 @@ public StringBuilder digest( StringBuilder buf ) { @Override public MutableAlg clone() { - return MutableScan.of( (Scan) alg ); + return MutableScan.of( (RelScan) alg ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java index 9babdd7616..4074d6f6d8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java @@ -36,8 +36,8 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; @@ -45,7 +45,7 @@ /** - * Mutable equivalent of {@link Modify}. + * Mutable equivalent of {@link RelModify}. */ public class MutableTableModify extends MutableSingleAlg { diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java index 385b532606..733dd1eca5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java @@ -38,7 +38,7 @@ import org.polypheny.db.adapter.enumerable.EnumerableInterpreter; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.Filter; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.interpreter.Bindables.BindableScan; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; @@ -57,7 +57,7 @@ /** * Planner rule that converts * a {@link Filter} - * on a {@link Scan} + * on a {@link RelScan} * of a {@link FilterableEntity} * or a {@link ProjectableFilterableEntity} * to a {@link BindableScan}. @@ -73,13 +73,13 @@ public abstract class FilterScanRule extends AlgOptRule { */ public static final FilterScanRule INSTANCE = new FilterScanRule( - operand( Filter.class, operandJ( Scan.class, null, FilterScanRule::test, none() ) ), + operand( Filter.class, operandJ( RelScan.class, null, FilterScanRule::test, none() ) ), AlgFactories.LOGICAL_BUILDER, "FilterScanRule" ) { @Override public void onMatch( AlgOptRuleCall call ) { final Filter filter = call.alg( 0 ); - final Scan scan = call.alg( 1 ); + final RelScan scan = call.alg( 1 ); apply( call, filter, scan ); } }; @@ -94,14 +94,14 @@ public void onMatch( AlgOptRuleCall call ) { operand( EnumerableInterpreter.class, operandJ( - Scan.class, + RelScan.class, null, FilterScanRule::test, none() ) ) ), AlgFactories.LOGICAL_BUILDER, "FilterScanRule:interpreter" ) { @Override public void onMatch( AlgOptRuleCall call ) { final Filter filter = call.alg( 0 ); - final Scan scan = call.alg( 2 ); + final RelScan scan = call.alg( 2 ); apply( call, filter, scan ); } }; @@ -115,14 +115,14 @@ protected FilterScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilde } - public static boolean test( Scan scan ) { + public static boolean test( RelScan scan ) { // We can only push filters into a FilterableTable or ProjectableFilterableTable. final AlgOptEntity table = scan.getEntity(); return table.unwrap( FilterableEntity.class ) != null || table.unwrap( ProjectableFilterableEntity.class ) != null; } - protected void apply( AlgOptRuleCall call, Filter filter, Scan scan ) { + protected void apply( AlgOptRuleCall call, Filter filter, RelScan scan ) { final ImmutableIntList projects; final ImmutableList.Builder filters = ImmutableList.builder(); if ( scan instanceof BindableScan ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java index b05ff0dc37..631cef93ed 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java @@ -39,7 +39,7 @@ import org.polypheny.db.adapter.enumerable.EnumerableInterpreter; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.interpreter.Bindables.BindableScan; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; @@ -56,7 +56,7 @@ /** * Planner rule that converts a {@link Project} - * on a {@link Scan} + * on a {@link RelScan} * of a {@link ProjectableFilterableEntity} * to a {@link BindableScan}. * @@ -72,13 +72,13 @@ public abstract class ProjectScanRule extends AlgOptRule { */ public static final ProjectScanRule INSTANCE = new ProjectScanRule( - operand( Project.class, operandJ( Scan.class, null, ProjectScanRule::test, none() ) ), + operand( Project.class, operandJ( RelScan.class, null, ProjectScanRule::test, none() ) ), AlgFactories.LOGICAL_BUILDER, "ProjectScanRule" ) { @Override public void onMatch( AlgOptRuleCall call ) { final Project project = call.alg( 0 ); - final Scan scan = call.alg( 1 ); + final RelScan scan = call.alg( 1 ); apply( call, project, scan ); } }; @@ -88,13 +88,13 @@ public void onMatch( AlgOptRuleCall call ) { */ public static final ProjectScanRule INTERPRETER = new ProjectScanRule( - operand( Project.class, operand( EnumerableInterpreter.class, operandJ( Scan.class, null, ProjectScanRule::test, none() ) ) ), + operand( Project.class, operand( EnumerableInterpreter.class, operandJ( RelScan.class, null, ProjectScanRule::test, none() ) ) ), AlgFactories.LOGICAL_BUILDER, "ProjectScanRule:interpreter" ) { @Override public void onMatch( AlgOptRuleCall call ) { final Project project = call.alg( 0 ); - final Scan scan = call.alg( 2 ); + final RelScan scan = call.alg( 2 ); apply( call, project, scan ); } }; @@ -108,14 +108,14 @@ public ProjectScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilderF } - protected static boolean test( Scan scan ) { + protected static boolean test( RelScan scan ) { // We can only push projects into a ProjectableFilterableTable. final AlgOptEntity table = scan.getEntity(); return table.unwrap( ProjectableFilterableEntity.class ) != null; } - protected void apply( AlgOptRuleCall call, Project project, Scan scan ) { + protected void apply( AlgOptRuleCall call, Project project, RelScan scan ) { final AlgOptEntity table = scan.getEntity(); assert table.unwrap( ProjectableFilterableEntity.class ) != null; diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/Delta.java b/core/src/main/java/org/polypheny/db/algebra/stream/Delta.java index 7442f8284a..8d1a5231cd 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/Delta.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/Delta.java @@ -37,7 +37,7 @@ import org.polypheny.db.algebra.AlgInput; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.SingleAlg; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -45,7 +45,7 @@ /** * Relational operator that converts a relation to a stream. * - * For example, if {@code Orders} is a table, and {@link Scan}(Orders) is a relational operator that returns the current contents of the table, then {@link Delta}(Scan(Orders)) is a relational operator that returns + * For example, if {@code Orders} is a table, and {@link RelScan}(Orders) is a relational operator that returns the current contents of the table, then {@link Delta}(Scan(Orders)) is a relational operator that returns * all inserts into the table. * * If unrestricted, Delta returns all previous inserts into the table (from time -∞ to now) and all future inserts into the table (from now to +∞) and never terminates. diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java index ffff9fa5b7..530637b51a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java @@ -43,7 +43,7 @@ import org.polypheny.db.algebra.core.Filter; import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; @@ -55,7 +55,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; @@ -246,7 +246,7 @@ public void onMatch( AlgOptRuleCall call ) { /** - * Planner rule that pushes a {@link Delta} into a {@link Scan} of a {@link StreamableEntity}. + * Planner rule that pushes a {@link Delta} into a {@link RelScan} of a {@link StreamableEntity}. * * Very likely, the stream was only represented as a table for uniformity with the other relations in the system. The Delta disappears and the stream can be implemented directly. */ @@ -259,7 +259,7 @@ public static class DeltaScanRule extends AlgOptRule { */ public DeltaScanRule( AlgBuilderFactory algBuilderFactory ) { super( - operand( Delta.class, operand( Scan.class, none() ) ), + operand( Delta.class, operand( RelScan.class, none() ) ), algBuilderFactory, null ); } @@ -267,13 +267,13 @@ public DeltaScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final Delta delta = call.alg( 0 ); - final Scan scan = call.alg( 1 ); + final RelScan scan = call.alg( 1 ); final AlgOptCluster cluster = delta.getCluster(); final AlgOptEntity algOptEntity = scan.getEntity(); final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); if ( streamableTable != null ) { final Entity entity1 = streamableTable.stream(); - final CatalogTable catalogTable = algOptEntity.getCatalogEntity().unwrap( CatalogTable.class ); + final LogicalTable catalogTable = algOptEntity.getCatalogEntity().unwrap( LogicalTable.class ); final CatalogPartitionPlacement placement = algOptEntity.getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ); final AlgOptEntity algOptEntity2 = AlgOptEntityImpl.create( algOptEntity.getRelOptSchema(), @@ -290,7 +290,7 @@ public void onMatch( AlgOptRuleCall call ) { /** - * Planner rule that converts {@link Delta} over a {@link Scan} of a table other than {@link StreamableEntity} to an empty {@link Values}. + * Planner rule that converts {@link Delta} over a {@link RelScan} of a table other than {@link StreamableEntity} to an empty {@link Values}. */ public static class DeltaScanToEmptyRule extends AlgOptRule { @@ -301,7 +301,7 @@ public static class DeltaScanToEmptyRule extends AlgOptRule { */ public DeltaScanToEmptyRule( AlgBuilderFactory algBuilderFactory ) { super( - operand( Delta.class, operand( Scan.class, none() ) ), + operand( Delta.class, operand( RelScan.class, none() ) ), algBuilderFactory, null ); } @@ -309,7 +309,7 @@ public DeltaScanToEmptyRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final Delta delta = call.alg( 0 ); - final Scan scan = call.alg( 1 ); + final RelScan scan = call.alg( 1 ); final AlgOptEntity algOptEntity = scan.getEntity(); final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); final AlgBuilder builder = call.builder(); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index a9df8e49ea..c2d254a094 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -43,7 +43,6 @@ import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -54,10 +53,11 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; @@ -359,7 +359,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param id The id of the graph to return * @return The graph entity with the provided id */ - public abstract CatalogGraphDatabase getGraph( long id ); + public abstract LogicalGraph getGraph( long id ); /** * Get a collection of all graphs, which match the given conditions. @@ -368,7 +368,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param graphName The pattern to which the name has to match, null if every name is matched * @return A collection of all graphs matching */ - public abstract List getGraphs( long databaseId, Pattern graphName ); + public abstract List getGraphs( long databaseId, Pattern graphName ); /** * Add a new alias for a given graph. @@ -411,7 +411,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param tableNamePattern Pattern for the table name. null returns all. * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. */ - public abstract List getTables( long schemaId, Pattern tableNamePattern ); + public abstract List getTables( long schemaId, Pattern tableNamePattern ); /** * Get all tables of the specified database which fit to the specified filters. @@ -422,7 +422,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param tableNamePattern Pattern for the table name. null returns all. * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. */ - public abstract List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ); + public abstract List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Returns the table with the given name in the specified database and schema. @@ -432,7 +432,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param tableName The name of the table * @return The table */ - public abstract CatalogTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException; + public abstract LogicalTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException; /** * Get all tables of the specified database which fit to the specified filters. @@ -443,7 +443,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param tableNamePattern Pattern for the table name. null returns all. * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. */ - public abstract List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ); + public abstract List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Returns the table with the given id @@ -451,7 +451,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param tableId The id of the table * @return The table */ - public abstract CatalogTable getTable( long tableId ); + public abstract LogicalTable getTable( long tableId ); /** * Returns the table with the given name in the specified schema. @@ -461,7 +461,7 @@ protected final boolean isValidIdentifier( final String str ) { * @return The table * @throws UnknownTableException If there is no table with this name in the specified database and schema. */ - public abstract CatalogTable getTable( long schemaId, String tableName ) throws UnknownTableException; + public abstract LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException; /** * Returns the table with the given name in the specified database and schema. @@ -472,7 +472,7 @@ protected final boolean isValidIdentifier( final String str ) { * @return The table * @throws UnknownTableException If there is no table with this name in the specified database and schema. */ - public abstract CatalogTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException; + public abstract LogicalTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException; /** * Returns the table which is associated with a given partitionId @@ -480,7 +480,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param partitionId to use for lookup * @return CatalogEntity that contains partitionId */ - public abstract CatalogTable getTableFromPartition( long partitionId ); + public abstract LogicalTable getTableFromPartition( long partitionId ); /** * Adds a table to a specified schema. @@ -1775,7 +1775,7 @@ protected final boolean isValidIdentifier( final String str ) { * * @return List of tables which need to be periodically processed */ - public abstract List getTablesForPeriodicProcessing(); + public abstract List getTablesForPeriodicProcessing(); /** * Registers a table to be considered for periodic processing diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index 9e96801964..c52a5baa98 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -17,17 +17,17 @@ package org.polypheny.db.catalog.entity; import java.io.Serializable; -import org.apache.calcite.linq4j.Queryable; -import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.algebra.AlgNode; +import java.util.List; +import org.polypheny.db.StatisticsManager; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; -import org.polypheny.db.schema.PolyphenyDbSchema; +import org.polypheny.db.catalog.entity.logical.Logical; +import org.polypheny.db.catalog.refactor.CatalogType; +import org.polypheny.db.plan.AlgMultipleTrait; import org.polypheny.db.schema.Wrapper; -public abstract class CatalogEntity implements Wrapper, Serializable { +public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializable, CatalogType, Logical { public final long id; public final EntityType entityType; @@ -60,13 +60,17 @@ public boolean isRolledUp( String fieldName ) { } - public Queryable asQueryable( DataContext root, PolyphenyDbSchema schema, String tableName ) { - throw new UnsupportedOperationException( "Not implemented by store" ); + public double getRowCount() { + Integer count = StatisticsManager.getInstance().rowCountPerTable( id ); + if ( count == null ) { + return 0; + } + return count; } - public AlgNode toAlg( ToAlgContext toAlgContext, CatalogGraphDatabase graph ) { - throw new UnsupportedOperationException( "Not implemented by store" ); + public List getCollations() { + return null; } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java index 0a386ae28a..6fb5d507f9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java @@ -24,6 +24,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; @@ -93,7 +94,7 @@ public CatalogMaterializedView( @Override - public CatalogTable getTableWithColumns( ImmutableList newColumnIds ) { + public LogicalTable getTableWithColumns( ImmutableList newColumnIds ) { return new CatalogMaterializedView( id, name, @@ -117,7 +118,7 @@ public CatalogTable getTableWithColumns( ImmutableList newColumnIds ) { @Override - public CatalogTable getConnectedViews( ImmutableList newConnectedViews ) { + public LogicalTable getConnectedViews( ImmutableList newConnectedViews ) { return new CatalogMaterializedView( id, name, @@ -141,7 +142,7 @@ public CatalogTable getConnectedViews( ImmutableList newConnectedViews ) { @Override - public CatalogTable getRenamed( String newName ) { + public LogicalTable getRenamed( String newName ) { return new CatalogMaterializedView( id, newName, diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index 776018b7df..ba91903f88 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -28,13 +28,14 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.view.ViewManager.ViewVisitor; -public class CatalogView extends CatalogTable { +public class CatalogView extends LogicalTable { private static final long serialVersionUID = -4771308114962700515L; @@ -75,7 +76,7 @@ public CatalogView( @Override - public CatalogTable getConnectedViews( ImmutableList newConnectedViews ) { + public LogicalTable getConnectedViews( ImmutableList newConnectedViews ) { return new CatalogView( id, name, @@ -97,7 +98,7 @@ public CatalogTable getConnectedViews( ImmutableList newConnectedViews ) { @Override - public CatalogTable getRenamed( String newName ) { + public LogicalTable getRenamed( String newName ) { return new CatalogView( id, newName, @@ -119,7 +120,7 @@ public CatalogTable getRenamed( String newName ) { @Override - public CatalogTable getTableWithColumns( ImmutableList newColumnIds ) { + public LogicalTable getTableWithColumns( ImmutableList newColumnIds ) { return new CatalogView( id, name, diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/LogicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/Allocation.java similarity index 72% rename from core/src/main/java/org/polypheny/db/catalog/refactor/LogicalEntity.java rename to core/src/main/java/org/polypheny/db/catalog/entity/allocation/Allocation.java index 46c91c1356..77b62b8ebb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/Allocation.java @@ -14,8 +14,14 @@ * limitations under the License. */ -package org.polypheny.db.catalog.refactor; +package org.polypheny.db.catalog.entity.allocation; -public interface LogicalEntity { +import org.polypheny.db.catalog.refactor.CatalogType; + +public interface Allocation extends CatalogType { + + default State getCatalogType() { + return State.ALLOCATION; + } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java new file mode 100644 index 0000000000..d125dd4cd6 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity.allocation; + +import java.io.Serializable; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; + +public class AllocationGraph extends CatalogEntity implements Allocation { + + + public final LogicalGraph logical; + public final long id; + + + public AllocationGraph( long id, LogicalGraph graph ) { + super( id, graph.name, graph.entityType, graph.namespaceType ); + this.id = id; + this.logical = graph; + } + + + @Override + public Serializable[] getParameterArray() { + return new Serializable[0]; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java new file mode 100644 index 0000000000..cc48e1cfcd --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity.allocation; + +import java.io.Serializable; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; + +public class AllocationTable extends CatalogEntity implements Allocation { + + protected AllocationTable( long id, LogicalGraph graph ) { + super( id, graph.name, graph.entityType, graph.namespaceType ); + } + + + @Override + public Serializable[] getParameterArray() { + return new Serializable[0]; + } + +} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraMetaDataProvider.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/Logical.java similarity index 73% rename from plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraMetaDataProvider.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/Logical.java index f2060a564b..d74ad6ba4f 100644 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraMetaDataProvider.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/Logical.java @@ -14,9 +14,14 @@ * limitations under the License. */ -package org.polypheny.db.adapter.cassandra; +package org.polypheny.db.catalog.entity.logical; +import org.polypheny.db.catalog.refactor.CatalogType; -public class CassandraMetaDataProvider { +public interface Logical extends CatalogType { + + default State getCatalogType() { + return State.LOGICAL; + } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java similarity index 61% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index cd39d9087f..4447386e1c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphDatabase.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import com.drew.lang.annotations.NotNull; import com.google.common.collect.ImmutableList; @@ -25,29 +25,28 @@ import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.NonNull; +import lombok.experimental.SuperBuilder; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogObject; - +@SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = false) -public class CatalogGraphDatabase extends CatalogEntity implements CatalogObject, Comparable { +public class LogicalGraph extends CatalogEntity implements CatalogObject, Comparable, Logical { private static final long serialVersionUID = 7343856827901459672L; - public final String name; - public final long id; public final long databaseId; public final ImmutableList placements; public final int ownerId; - private final boolean modifiable; + public final boolean modifiable; public final boolean caseSensitive; - public CatalogGraphDatabase( long databaseId, long id, String name, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { + public LogicalGraph( long databaseId, long id, String name, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { super( id, name, EntityType.ENTITY, NamespaceType.GRAPH ); - this.id = id; - this.name = name; this.ownerId = ownerId; this.databaseId = databaseId; this.modifiable = modifiable; @@ -56,6 +55,11 @@ public CatalogGraphDatabase( long databaseId, long id, String name, int ownerId, } + public LogicalGraph( LogicalGraph graph ) { + this( graph.databaseId, graph.id, graph.name, graph.ownerId, graph.modifiable, graph.placements, graph.caseSensitive ); + } + + @Override public Serializable[] getParameterArray() { return new Serializable[0]; @@ -63,7 +67,7 @@ public Serializable[] getParameterArray() { @Override - public int compareTo( @NotNull CatalogGraphDatabase o ) { + public int compareTo( @NotNull LogicalGraph o ) { if ( o != null ) { return (int) (this.id - o.id); } @@ -71,15 +75,16 @@ public int compareTo( @NotNull CatalogGraphDatabase o ) { } - public CatalogGraphDatabase addPlacement( int adapterId ) { + public LogicalGraph addPlacement( int adapterId ) { List placements = new ArrayList<>( this.placements ); placements.add( adapterId ); - return new CatalogGraphDatabase( databaseId, id, name, ownerId, modifiable, placements, true ); + return toBuilder().placements( ImmutableList.copyOf( placements ) ).build(); } - public CatalogGraphDatabase removePlacement( int adapterId ) { - return new CatalogGraphDatabase( databaseId, id, name, ownerId, modifiable, placements.stream().filter( i -> i != adapterId ).collect( Collectors.toList() ), true ); + public LogicalGraph removePlacement( int adapterId ) { + return toBuilder().placements( ImmutableList.copyOf( placements.stream().filter( i -> i != adapterId ).collect( Collectors.toList() ) ) ).build(); } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java similarity index 91% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 87206a0a18..8f3cf7ccae 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import com.google.common.collect.ImmutableList; @@ -29,11 +29,13 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.partition.properties.PartitionProperty; @EqualsAndHashCode(callSuper = false) -public class CatalogTable extends CatalogEntity implements CatalogObject, Comparable { +public class LogicalTable extends CatalogEntity implements CatalogObject, Comparable, Logical { private static final long serialVersionUID = 4653390333258552102L; @@ -56,7 +58,7 @@ public class CatalogTable extends CatalogEntity implements CatalogObject, Compar public final ImmutableList connectedViews; - public CatalogTable( + public LogicalTable( final long id, @NonNull final String name, final ImmutableList fieldIds, @@ -90,7 +92,7 @@ public CatalogTable( } - public CatalogTable( + public LogicalTable( final long id, @NonNull final String name, final ImmutableList fieldIds, @@ -182,7 +184,7 @@ public Serializable[] getParameterArray() { @Override - public int compareTo( CatalogTable o ) { + public int compareTo( LogicalTable o ) { if ( o != null ) { int comp = (int) (this.databaseId - o.databaseId); if ( comp == 0 ) { @@ -210,8 +212,8 @@ static String getEnumNameOrNull( Enum theEnum ) { } - public CatalogTable getRenamed( String newName ) { - return new CatalogTable( + public LogicalTable getRenamed( String newName ) { + return new LogicalTable( id, newName, fieldIds, @@ -227,8 +229,8 @@ public CatalogTable getRenamed( String newName ) { } - public CatalogTable getConnectedViews( ImmutableList newConnectedViews ) { - return new CatalogTable( + public LogicalTable getConnectedViews( ImmutableList newConnectedViews ) { + return new LogicalTable( id, name, fieldIds, @@ -244,8 +246,8 @@ public CatalogTable getConnectedViews( ImmutableList newConnectedViews ) { } - public CatalogTable getTableWithColumns( ImmutableList newColumnIds ) { - return new CatalogTable( + public LogicalTable getTableWithColumns( ImmutableList newColumnIds ) { + return new LogicalTable( id, name, newColumnIds, diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java similarity index 88% rename from core/src/main/java/org/polypheny/db/catalog/refactor/AllocationEntity.java rename to core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java index 70d0d975a6..8761342ba2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java @@ -14,8 +14,9 @@ * limitations under the License. */ -package org.polypheny.db.catalog.refactor; +package org.polypheny.db.catalog.entity.physical; + +public interface Physical { -public interface AllocationEntity { } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java new file mode 100644 index 0000000000..e681c82beb --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity.physical; + +import java.io.Serializable; +import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; + +public class PhysicalGraph extends CatalogEntity implements Physical { + + protected PhysicalGraph( long id, String name, EntityType type, NamespaceType namespaceType ) { + super( id, name, type, namespaceType ); + } + + + @Override + public Serializable[] getParameterArray() { + return new Serializable[0]; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java new file mode 100644 index 0000000000..36c9625519 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -0,0 +1,70 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity.physical; + +import com.google.common.collect.ImmutableList; +import java.io.Serializable; +import java.util.List; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeImpl; +import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.type.PolyTypeFactoryImpl; + +public class PhysicalTable extends CatalogEntity implements Physical { + + public final ImmutableList placements; + + + protected PhysicalTable( long id, String name, EntityType type, NamespaceType namespaceType, List placements ) { + super( id, name, type, namespaceType ); + this.placements = ImmutableList.copyOf( placements ); + } + + + public PhysicalTable( PhysicalTable table ) { + this( table.id, table.name, table.entityType, table.namespaceType, table.placements ); + } + + + public AlgProtoDataType buildProto() { + final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); + + for ( CatalogColumnPlacement placement : placements ) { + CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); + AlgDataType sqlType = catalogColumn.getAlgDataType( typeFactory ); + fieldInfo.add( catalogColumn.name, placement.physicalColumnName, sqlType ).nullable( catalogColumn.nullable ); + } + + return AlgDataTypeImpl.proto( fieldInfo.build() ); + } + + + @Override + public Serializable[] getParameterArray() { + return new Serializable[0]; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/CatalogType.java similarity index 60% rename from core/src/main/java/org/polypheny/db/catalog/refactor/PhysicalEntity.java rename to core/src/main/java/org/polypheny/db/catalog/refactor/CatalogType.java index bdc5230d3b..c825479a3f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/PhysicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/CatalogType.java @@ -16,6 +16,26 @@ package org.polypheny.db.catalog.refactor; -public interface PhysicalEntity { +public interface CatalogType { + + State getCatalogType(); + + default boolean isLogical() { + return getCatalogType() == State.LOGICAL; + } + + default boolean isAllocation() { + return getCatalogType() == State.ALLOCATION; + } + + default boolean isPhysical() { + return getCatalogType() == State.PHYSICAL; + } + + enum State { + LOGICAL, + ALLOCATION, + PHYSICAL + } } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/ModifiableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/ModifiableEntity.java new file mode 100644 index 0000000000..39c931555d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/ModifiableEntity.java @@ -0,0 +1,40 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +import java.util.List; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.rex.RexNode; + +public interface ModifiableEntity { + + Modify toModificationAlg( + AlgOptCluster cluster, + AlgTraitSet traits, + CatalogEntity physicalEntity, + AlgNode child, + Operation operation, + List targets, + List sources ); + + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java index e8512a8100..5e968a202d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java @@ -16,6 +16,15 @@ package org.polypheny.db.catalog.refactor; +import org.apache.calcite.linq4j.Queryable; +import org.polypheny.db.adapter.DataContext; +import org.polypheny.db.schema.graph.QueryableGraph; + public interface QueryableEntity { + /** + * Converts this table into a {@link Queryable}. + */ + Queryable asQueryable( DataContext root, QueryableGraph graph ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/TranslatableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/TranslatableEntity.java index 2172bd5abc..1577155054 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/TranslatableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/TranslatableEntity.java @@ -16,6 +16,15 @@ package org.polypheny.db.catalog.refactor; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; +import org.polypheny.db.plan.AlgTraitSet; + public interface TranslatableEntity { + /** + * Converts this entity into a {@link AlgNode}. + */ + AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ); + } diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 0e6741415f..3c79bb21d5 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -32,7 +32,7 @@ import org.polypheny.db.catalog.Catalog.PlacementType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogCollection; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; @@ -166,7 +166,7 @@ public static DdlManager getInstance() { * @param afterColumnName the name of the column after the column, which is inserted; can be null * @param defaultValue the default value of the inserted column */ - public abstract void addColumnToSourceTable( CatalogTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException; + public abstract void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException; /** * Add a column to an existing table @@ -180,7 +180,7 @@ public static DdlManager getInstance() { * @param defaultValue a default value for the column; can be null * @param statement the query statement */ - public abstract void addColumn( String columnName, CatalogTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws NotNullAndDefaultValueException, ColumnAlreadyExistsException, ColumnNotExistsException; + public abstract void addColumn( String columnName, LogicalTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws NotNullAndDefaultValueException, ColumnAlreadyExistsException, ColumnNotExistsException; /** * Add a foreign key to a table @@ -193,7 +193,7 @@ public static DdlManager getInstance() { * @param onUpdate how to enforce the constraint on updated * @param onDelete how to enforce the constraint on delete */ - public abstract void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException; + public abstract void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException; /** * Adds an index to a table @@ -206,7 +206,7 @@ public static DdlManager getInstance() { * @param location instance of the data store on which to create the index; if null, default strategy is being used * @param statement the initial query statement */ - public abstract void addIndex( CatalogTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; + public abstract void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; /** * Adds an index located in Polypheny to a table @@ -218,7 +218,7 @@ public static DdlManager getInstance() { * @param isUnique whether the index is unique * @param statement the initial query statement */ - public abstract void addPolyphenyIndex( CatalogTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; + public abstract void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; /** * Adds new column placements to a table @@ -230,7 +230,7 @@ public static DdlManager getInstance() { * @param dataStore the data store on which to create the placement * @param statement the query statement */ - public abstract void addDataPlacement( CatalogTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) throws PlacementAlreadyExistsException; + public abstract void addDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) throws PlacementAlreadyExistsException; /** * Adds a new primary key to a table @@ -239,7 +239,7 @@ public static DdlManager getInstance() { * @param columnNames the names of all columns in the primary key * @param statement the query statement */ - public abstract void addPrimaryKey( CatalogTable catalogTable, List columnNames, Statement statement ) throws DdlOnSourceException; + public abstract void addPrimaryKey( LogicalTable catalogTable, List columnNames, Statement statement ) throws DdlOnSourceException; /** * Adds a unique constraint to a table @@ -248,7 +248,7 @@ public static DdlManager getInstance() { * @param columnNames the names of the columns which are part of the constraint * @param constraintName the name of the unique constraint */ - public abstract void addUniqueConstraint( CatalogTable catalogTable, List columnNames, String constraintName ) throws DdlOnSourceException; + public abstract void addUniqueConstraint( LogicalTable catalogTable, List columnNames, String constraintName ) throws DdlOnSourceException; /** * Drop a specific column in a table @@ -257,7 +257,7 @@ public static DdlManager getInstance() { * @param columnName the name of column which is dropped * @param statement the query statement */ - public abstract void dropColumn( CatalogTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException; + public abstract void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException; /** * Drop a specific constraint from a table @@ -265,7 +265,7 @@ public static DdlManager getInstance() { * @param catalogTable the table * @param constraintName the name of the constraint to be dropped */ - public abstract void dropConstraint( CatalogTable catalogTable, String constraintName ) throws DdlOnSourceException; + public abstract void dropConstraint( LogicalTable catalogTable, String constraintName ) throws DdlOnSourceException; /** * Drop a foreign key of a table @@ -273,7 +273,7 @@ public static DdlManager getInstance() { * @param catalogTable the table the foreign key belongs to * @param foreignKeyName the name of the foreign key to drop */ - public abstract void dropForeignKey( CatalogTable catalogTable, String foreignKeyName ) throws DdlOnSourceException; + public abstract void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) throws DdlOnSourceException; /** * Drop an indexes @@ -282,7 +282,7 @@ public static DdlManager getInstance() { * @param indexName the name of the index to drop * @param statement the query statement */ - public abstract void dropIndex( CatalogTable catalogTable, String indexName, Statement statement ) throws DdlOnSourceException; + public abstract void dropIndex( LogicalTable catalogTable, String indexName, Statement statement ) throws DdlOnSourceException; /** * Drop the data placement of a table on a specified data store @@ -291,14 +291,14 @@ public static DdlManager getInstance() { * @param storeInstance the data store from which to drop the placement * @param statement the query statement */ - public abstract void dropDataPlacement( CatalogTable catalogTable, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, LastPlacementException; + public abstract void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, LastPlacementException; /** * Drop the primary key of a table * * @param catalogTable the table */ - public abstract void dropPrimaryKey( CatalogTable catalogTable ) throws DdlOnSourceException; + public abstract void dropPrimaryKey( LogicalTable catalogTable ) throws DdlOnSourceException; /** * Set the type of the column @@ -308,7 +308,7 @@ public static DdlManager getInstance() { * @param typeInformation the new type of the column * @param statement the used statement */ - public abstract void setColumnType( CatalogTable catalogTable, String columnName, ColumnTypeInformation typeInformation, Statement statement ) throws DdlOnSourceException, ColumnNotExistsException, GenericCatalogException; + public abstract void setColumnType( LogicalTable catalogTable, String columnName, ColumnTypeInformation typeInformation, Statement statement ) throws DdlOnSourceException, ColumnNotExistsException, GenericCatalogException; /** * Set if the column can hold the value NULL or not @@ -318,7 +318,7 @@ public static DdlManager getInstance() { * @param nullable if the column should be nullable * @param statement the used statement */ - public abstract void setColumnNullable( CatalogTable catalogTable, String columnName, boolean nullable, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException, GenericCatalogException; + public abstract void setColumnNullable( LogicalTable catalogTable, String columnName, boolean nullable, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException, GenericCatalogException; /** * Changes the position of the column and places it before or after the provided columns @@ -329,7 +329,7 @@ public static DdlManager getInstance() { * @param afterColumnName change position of the column and place it after this column; nullable * @param statement the used statement */ - public abstract void setColumnPosition( CatalogTable catalogTable, String columnName, String beforeColumnName, String afterColumnName, Statement statement ) throws ColumnNotExistsException; + public abstract void setColumnPosition( LogicalTable catalogTable, String columnName, String beforeColumnName, String afterColumnName, Statement statement ) throws ColumnNotExistsException; /** * Set the collation to the column @@ -339,7 +339,7 @@ public static DdlManager getInstance() { * @param collation the new collation of the column * @param statement the used statement */ - public abstract void setColumnCollation( CatalogTable catalogTable, String columnName, Collation collation, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException; + public abstract void setColumnCollation( LogicalTable catalogTable, String columnName, Collation collation, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException; /** * Set the default value of the column @@ -349,7 +349,7 @@ public static DdlManager getInstance() { * @param defaultValue the new default value of the column * @param statement the used statement */ - public abstract void setDefaultValue( CatalogTable catalogTable, String columnName, String defaultValue, Statement statement ) throws ColumnNotExistsException; + public abstract void setDefaultValue( LogicalTable catalogTable, String columnName, String defaultValue, Statement statement ) throws ColumnNotExistsException; /** * Drop the default value of the column @@ -358,7 +358,7 @@ public static DdlManager getInstance() { * @param columnName the name of the column to be modified * @param statement the used statement */ - public abstract void dropDefaultValue( CatalogTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException; + public abstract void dropDefaultValue( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException; /** * Modify the placement of a table on a specified data store. This method compares the specified list of column ids with @@ -375,7 +375,7 @@ public static DdlManager getInstance() { * @param storeInstance the data store * @param statement the used statement */ - public abstract void modifyDataPlacement( CatalogTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException; + public abstract void modifyDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException; /** * Modified the partition distribution on the selected store. Can be used to add or remove partitions on a store. @@ -386,7 +386,7 @@ public static DdlManager getInstance() { * @param storeInstance the data store on which the partition placements should be altered * @param statement the used statement */ - public abstract void modifyPartitionPlacement( CatalogTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ) throws LastPlacementException; + public abstract void modifyPartitionPlacement( LogicalTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ) throws LastPlacementException; /** * Add a column placement for a specified column on a specified data store. If the store already contains a placement of @@ -397,7 +397,7 @@ public static DdlManager getInstance() { * @param storeInstance the data store on which the column should be placed * @param statement the used statement */ - public abstract void addColumnPlacement( CatalogTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, PlacementAlreadyExistsException, ColumnNotExistsException; + public abstract void addColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, PlacementAlreadyExistsException, ColumnNotExistsException; /** * Drop a specified column from a specified data store. If the column is part of the primary key, the column placement typ @@ -408,7 +408,7 @@ public static DdlManager getInstance() { * @param storeInstance the data store from which to remove the placement * @param statement the used statement */ - public abstract void dropColumnPlacement( CatalogTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException, PlacementIsPrimaryException, ColumnNotExistsException; + public abstract void dropColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException, PlacementIsPrimaryException, ColumnNotExistsException; /** * Change the owner of a table @@ -416,7 +416,7 @@ public static DdlManager getInstance() { * @param catalogTable the table * @param newOwnerName the name of the new owner */ - public abstract void alterTableOwner( CatalogTable catalogTable, String newOwnerName ) throws UnknownUserException; + public abstract void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) throws UnknownUserException; /** * Rename a table (changing the logical name of the table) @@ -425,7 +425,7 @@ public static DdlManager getInstance() { * @param newTableName the new name for the table * @param statement the used statement */ - public abstract void renameTable( CatalogTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException; + public abstract void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException; /** * Rename a column of a table (changing the logical name of the column) @@ -435,7 +435,7 @@ public static DdlManager getInstance() { * @param newColumnName the new name for the column * @param statement the used statement */ - public abstract void renameColumn( CatalogTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException; + public abstract void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException; public abstract void removeGraphDatabase( long graphId, boolean ifExists, Statement statement ); @@ -491,7 +491,7 @@ public static DdlManager getInstance() { * @param catalogTable teh table to be merged * @param statement the used Statement */ - public abstract void removePartitioning( CatalogTable catalogTable, Statement statement ) throws UnknownDatabaseException, GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException; + public abstract void removePartitioning( LogicalTable catalogTable, Statement statement ) throws UnknownDatabaseException, GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException; /** * Adds a new constraint to a table @@ -519,19 +519,19 @@ public static DdlManager getInstance() { * @param catalogTable the table to be dropped * @param statement the used statement */ - public abstract void dropTable( CatalogTable catalogTable, Statement statement ) throws DdlOnSourceException; + public abstract void dropTable( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException; /** * Drop View */ - public abstract void dropView( CatalogTable catalogTable, Statement statement ) throws DdlOnSourceException; + public abstract void dropView( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException; /** * @param materializedView to be dropped * @param statement the used statement */ - public abstract void dropMaterializedView( CatalogTable materializedView, Statement statement ) throws DdlOnSourceException; + public abstract void dropMaterializedView( LogicalTable materializedView, Statement statement ) throws DdlOnSourceException; /** * Truncate a table @@ -539,7 +539,7 @@ public static DdlManager getInstance() { * @param catalogTable the table to be truncated * @param statement the used statement */ - public abstract void truncate( CatalogTable catalogTable, Statement statement ); + public abstract void truncate( LogicalTable catalogTable, Statement statement ); /** * Create a new type @@ -680,7 +680,7 @@ public static ColumnTypeInformation fromDataTypeSpec( DataTypeSpec sqlDataType ) public static class PartitionInformation { - public final CatalogTable table; + public final LogicalTable table; public final String columnName; public final String typeName; public final List partitionGroupNames; @@ -691,7 +691,7 @@ public static class PartitionInformation { public PartitionInformation( - CatalogTable table, + LogicalTable table, String typeName, String columnName, List partitionGroupNames, @@ -711,7 +711,7 @@ public PartitionInformation( public static PartitionInformation fromNodeLists( - CatalogTable table, + LogicalTable table, String typeName, String columnName, List partitionGroupNames, diff --git a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java index e09999a506..d52eb27058 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java @@ -59,11 +59,11 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.Window; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalJoin; @@ -77,9 +77,9 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; @@ -88,7 +88,6 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.FilterableEntity; import org.polypheny.db.schema.ProjectableFilterableEntity; import org.polypheny.db.schema.ScannableEntity; @@ -152,7 +151,7 @@ private static Enumerable help( DataContext dataContext, BindableAlg a /** - * Rule that converts a {@link Scan} to bindable convention. + * Rule that converts a {@link RelScan} to bindable convention. */ public static class BindableScanRule extends AlgOptRule { @@ -169,9 +168,9 @@ public BindableScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final LogicalRelScan scan = call.alg( 0 ); - final AlgOptEntity table = scan.getEntity(); + final CatalogEntity table = scan.entity; if ( BindableScan.canHandle( table ) ) { - call.transformTo( BindableScan.create( scan.getCluster(), table ) ); + call.transformTo( BindableScan.create( scan.getCluster(), scan.entity ) ); } } @@ -181,7 +180,7 @@ public void onMatch( AlgOptRuleCall call ) { /** * Scan of a table that implements {@link ScannableEntity} and therefore can be converted into an {@link Enumerable}. */ - public static class BindableScan extends Scan implements BindableAlg { + public static class BindableScan extends RelScan implements BindableAlg { public final ImmutableList filters; public final ImmutableIntList projects; @@ -192,44 +191,38 @@ public static class BindableScan extends Scan implements BindableAlg { * * Use {@link #create} unless you know what you are doing. */ - BindableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, ImmutableList filters, ImmutableIntList projects ) { - super( cluster, traitSet, table ); + BindableScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity entity, ImmutableList filters, ImmutableIntList projects ) { + super( cluster, traitSet, entity ); this.filters = Objects.requireNonNull( filters ); this.projects = Objects.requireNonNull( projects ); - Preconditions.checkArgument( canHandle( table ) ); + Preconditions.checkArgument( canHandle( entity ) ); } /** * Creates a BindableScan. */ - public static BindableScan create( AlgOptCluster cluster, AlgOptEntity algOptEntity ) { - return create( cluster, algOptEntity, ImmutableList.of(), identity( algOptEntity ) ); + public static BindableScan create( AlgOptCluster cluster, CatalogEntity entity ) { + return create( cluster, entity, ImmutableList.of(), identity( entity ) ); } /** * Creates a BindableScan. */ - public static BindableScan create( AlgOptCluster cluster, AlgOptEntity algOptEntity, List filters, List projects ) { - final Entity entity = algOptEntity.unwrap( Entity.class ); + public static BindableScan create( AlgOptCluster cluster, CatalogEntity entity, List filters, List projects ) { final AlgTraitSet traitSet = cluster.traitSetOf( BindableConvention.INSTANCE ) - .replace( entity.getNamespaceType().getModelTrait() ) - .replaceIfs( AlgCollationTraitDef.INSTANCE, () -> { - if ( entity != null ) { - return entity.getStatistic().getCollations(); - } - return ImmutableList.of(); - } ); - return new BindableScan( cluster, traitSet, algOptEntity, ImmutableList.copyOf( filters ), ImmutableIntList.copyOf( projects ) ); + .replace( entity.namespaceType.getModelTrait() ) + .replaceIfs( AlgCollationTraitDef.INSTANCE, entity::getCollations ); + return new BindableScan( cluster, traitSet, entity, ImmutableList.copyOf( filters ), ImmutableIntList.copyOf( projects ) ); } @Override public AlgDataType deriveRowType() { final AlgDataTypeFactory.Builder builder = getCluster().getTypeFactory().builder(); - final List fieldList = table.getRowType().getFieldList(); + final List fieldList = entity.getRowType().getFieldList(); for ( int project : projects ) { builder.add( fieldList.get( project ) ); } @@ -258,7 +251,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) // Cost factor for pushing fields // The "+ 2d" on top and bottom keeps the function fairly smooth. - double p = ((double) projects.size() + 2d) / ((double) table.getRowType().getFieldCount() + 2d); + double p = ((double) projects.size() + 2d) / ((double) entity.getRowType().getFieldCount() + 2d); // Multiply the cost by a factor that makes a scan more attractive if filters and projects are pushed to the table scan return super.computeSelfCost( planner, mq ).multiplyBy( f * p * 0.01d * 100.0d ); //TODO(s3lph): Temporary *100, otherwise foreign key enforcement breaks @@ -268,23 +261,23 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public String algCompareString() { return "BindableScan$" + - "." + table.getCatalogEntity().id + + "." + entity.id + (filters != null ? filters.stream().map( RexNode::hashCode ).map( Objects::toString ).collect( Collectors.joining( "$" ) ) : "") + "$" + (projects != null ? projects.toString() : "") + "&"; } - public static boolean canHandle( AlgOptEntity table ) { - return table.unwrap( ScannableEntity.class ) != null - || table.unwrap( FilterableEntity.class ) != null - || table.unwrap( ProjectableFilterableEntity.class ) != null; + public static boolean canHandle( CatalogEntity entity ) { + return entity.unwrap( ScannableEntity.class ) != null + || entity.unwrap( FilterableEntity.class ) != null + || entity.unwrap( ProjectableFilterableEntity.class ) != null; } @Override public Enumerable bind( DataContext dataContext ) { // TODO: filterable and projectable - return table.unwrap( ScannableEntity.class ).scan( dataContext ); + return entity.unwrap( ScannableEntity.class ).scan( dataContext ); } diff --git a/core/src/main/java/org/polypheny/db/interpreter/Nodes.java b/core/src/main/java/org/polypheny/db/interpreter/Nodes.java index c6ad43daec..d02338a4b1 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Nodes.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Nodes.java @@ -40,7 +40,7 @@ import org.polypheny.db.algebra.core.Filter; import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; @@ -85,7 +85,7 @@ public void visit( Values value ) { } - public void visit( Scan scan ) { + public void visit( RelScan scan ) { final ImmutableList filters = ImmutableList.of(); node = ScanNode.create( this, scan, filters, null ); } diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index f379602871..a812780b89 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -48,11 +48,11 @@ import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.function.Function1; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexNode; @@ -70,11 +70,11 @@ /** - * Interpreter node that implements a {@link Scan}. + * Interpreter node that implements a {@link RelScan}. */ public class ScanNode implements Node { - private ScanNode( Compiler compiler, Scan alg, Enumerable enumerable ) { + private ScanNode( Compiler compiler, RelScan alg, Enumerable enumerable ) { compiler.enumerable( alg, enumerable ); } @@ -90,7 +90,7 @@ public void run() { * * Tries various table SPIs, and negotiates with the table which filters and projects it can implement. Adds to the Enumerable implementations of any filters and projects that cannot be implemented by the table. */ - static ScanNode create( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects ) { + static ScanNode create( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects ) { final AlgOptEntity algOptEntity = alg.getEntity(); final ProjectableFilterableEntity pfTable = algOptEntity.unwrap( ProjectableFilterableEntity.class ); if ( pfTable != null ) { @@ -117,13 +117,13 @@ static ScanNode create( Compiler compiler, Scan alg, ImmutableList filt } - private static ScanNode createScannable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, ScannableEntity scannableTable ) { + private static ScanNode createScannable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, ScannableEntity scannableTable ) { final Enumerable rowEnumerable = Enumerables.toRow( scannableTable.scan( compiler.getDataContext() ) ); return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } - private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, QueryableEntity queryableTable ) { + private static ScanNode createQueryable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, QueryableEntity queryableTable ) { final DataContext root = compiler.getDataContext(); final AlgOptEntity algOptEntity = alg.getEntity(); final Type elementType = queryableTable.getElementType(); @@ -131,7 +131,7 @@ private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableL final Enumerable rowEnumerable; if ( elementType instanceof Class ) { //noinspection unchecked - final Queryable queryable = Schemas.queryable( root, (Class) elementType, List.of( algOptEntity.getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); + final Queryable queryable = Schemas.queryable( root, (Class) elementType, List.of( algOptEntity.getCatalogEntity().unwrap( LogicalTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); ImmutableList.Builder fieldBuilder = ImmutableList.builder(); Class type = (Class) elementType; for ( Field field : type.getFields() ) { @@ -153,13 +153,13 @@ private static ScanNode createQueryable( Compiler compiler, Scan alg, ImmutableL return new Row( values ); } ); } else { - rowEnumerable = Schemas.queryable( root, Row.class, List.of( algOptEntity.getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); + rowEnumerable = Schemas.queryable( root, Row.class, List.of( algOptEntity.getCatalogEntity().unwrap( LogicalTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); } return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } - private static ScanNode createFilterable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, FilterableEntity filterableTable ) { + private static ScanNode createFilterable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, FilterableEntity filterableTable ) { final DataContext root = compiler.getDataContext(); final List mutableFilters = Lists.newArrayList( filters ); final Enumerable enumerable = filterableTable.scan( root, mutableFilters ); @@ -173,13 +173,13 @@ private static ScanNode createFilterable( Compiler compiler, Scan alg, Immutable } - private static ScanNode createProjectableFilterable( Compiler compiler, Scan alg, ImmutableList filters, ImmutableIntList projects, ProjectableFilterableEntity pfTable ) { + private static ScanNode createProjectableFilterable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, ProjectableFilterableEntity pfTable ) { final DataContext root = compiler.getDataContext(); final ImmutableIntList originalProjects = projects; for ( ; ; ) { final List mutableFilters = Lists.newArrayList( filters ); final int[] projectInts; - if ( projects == null || projects.equals( Scan.identity( alg.getEntity() ) ) ) { + if ( projects == null || projects.equals( RelScan.identity( alg.getEntity() ) ) ) { projectInts = null; } else { projectInts = projects.toIntArray(); @@ -218,7 +218,7 @@ private static ScanNode createProjectableFilterable( Compiler compiler, Scan alg } - private static ScanNode createEnumerable( Compiler compiler, Scan alg, Enumerable enumerable, final ImmutableIntList acceptedProjects, List rejectedFilters, final ImmutableIntList rejectedProjects ) { + private static ScanNode createEnumerable( Compiler compiler, RelScan alg, Enumerable enumerable, final ImmutableIntList acceptedProjects, List rejectedFilters, final ImmutableIntList rejectedProjects ) { if ( !rejectedFilters.isEmpty() ) { final RexNode filter = RexUtil.composeConjunction( alg.getCluster().getRexBuilder(), rejectedFilters ); // Re-map filter for the projects that have been applied already diff --git a/core/src/main/java/org/polypheny/db/partition/FrequencyMap.java b/core/src/main/java/org/polypheny/db/partition/FrequencyMap.java index 001eec1ddd..a965dfd47c 100644 --- a/core/src/main/java/org/polypheny/db/partition/FrequencyMap.java +++ b/core/src/main/java/org/polypheny/db/partition/FrequencyMap.java @@ -16,7 +16,7 @@ package org.polypheny.db.partition; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; public abstract class FrequencyMap { @@ -37,6 +37,6 @@ public static FrequencyMap setAndGetInstance( FrequencyMap frequencyMap ) { public abstract void terminate(); - public abstract void determinePartitionFrequency( CatalogTable table, long invocationTimestamp ); + public abstract void determinePartitionFrequency( LogicalTable table, long invocationTimestamp ); } diff --git a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java index 7afd094817..ad5ecdb5f4 100644 --- a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java +++ b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java @@ -20,7 +20,7 @@ import java.util.Map; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.type.PolyType; @@ -29,11 +29,11 @@ public interface PartitionManager { /** * Returns the Index of the partition where to place the object */ - long getTargetPartitionId( CatalogTable catalogTable, String columnValue ); + long getTargetPartitionId( LogicalTable catalogTable, String columnValue ); - boolean probePartitionGroupDistributionChange( CatalogTable catalogTable, int storeId, long columnId, int threshold ); + boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ); - Map> getRelevantPlacements( CatalogTable catalogTable, List partitionIds, List excludedAdapters ); + Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ); boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, CatalogColumn partitionColumn ); @@ -44,7 +44,7 @@ public interface PartitionManager { * @param partitionIds List of all requested partitions ids * @return Returns map of AdapterId {@code ->} [Map PartitionsId {@code ->}needed Columns Placements] */ - Map>> getAllPlacements( CatalogTable catalogTable, List partitionIds ); + Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ); int getNumberOfPartitionsPerGroup( int numberOfPartitions ); diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java index a977d3dd63..5bee679029 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java @@ -74,9 +74,9 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.externalize.AlgJsonWriter; import org.polypheny.db.algebra.externalize.AlgWriterImpl; import org.polypheny.db.algebra.externalize.AlgXmlWriter; @@ -199,7 +199,7 @@ public static List findAllTables( AlgNode alg ) { final Multimap, AlgNode> nodes = AlgMetadataQuery.instance().getNodeTypes( alg ); final List usedTables = new ArrayList<>(); for ( Entry, Collection> e : nodes.asMap().entrySet() ) { - if ( Scan.class.isAssignableFrom( e.getKey() ) ) { + if ( RelScan.class.isAssignableFrom( e.getKey() ) ) { for ( AlgNode node : e.getValue() ) { usedTables.add( node.getEntity() ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index feba778ce4..bb92ae9525 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -61,7 +61,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptSchema; @@ -177,7 +177,7 @@ public T unwrap( Class clazz ) { } } if ( clazz == PolyphenyDbSchema.class ) { - return clazz.cast( Schemas.subSchema( ((PolyphenyDbCatalogReader) schema).rootSchema, List.of( catalogEntity.unwrap( CatalogTable.class ).getNamespaceName(), catalogEntity.name ) ) ); + return clazz.cast( Schemas.subSchema( ((PolyphenyDbCatalogReader) schema).rootSchema, List.of( catalogEntity.unwrap( LogicalTable.class ).getNamespaceName(), catalogEntity.name ) ) ); } return null; } @@ -359,7 +359,7 @@ public boolean supportsModality( Modality modality ) { @Override public List getQualifiedName() { - return List.of( catalogEntity.unwrap( CatalogTable.class ).getNamespaceName(), catalogEntity.name ); + return List.of( catalogEntity.unwrap( LogicalTable.class ).getNamespaceName(), catalogEntity.name ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index e49a8d0c07..a4fc8d45d1 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -44,8 +44,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.entity.CatalogCollection; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptEntity; @@ -94,14 +94,14 @@ public AlgOptEntity getCollection( final List names ) { @Override - public CatalogGraphDatabase getGraph( final String name ) { + public LogicalGraph getGraph( final String name ) { return rootSchema.getGraph( List.of( name ) ); } @Override public AlgDataType getNamedType( Identifier typeName ) { - CatalogTable table = rootSchema.getTable( typeName.getNames() ); + LogicalTable table = rootSchema.getTable( typeName.getNames() ); if ( table != null ) { return table.getRowType(); } else { diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index 1b12582104..81920d9f31 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -48,12 +48,14 @@ import org.polypheny.db.algebra.constant.ExplainFormat; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Scan; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.relational.RelScan; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.validate.Validator; @@ -149,10 +151,10 @@ protected AlgRoot optimize( AlgRoot root ) { final AlgVisitor visitor = new AlgVisitor() { @Override public void visit( AlgNode node, int ordinal, AlgNode parent ) { - if ( node instanceof Scan ) { + if ( node instanceof RelScan ) { final AlgOptCluster cluster = node.getCluster(); final ToAlgContext context = () -> cluster; - final AlgNode r = node.getEntity().toAlg( context, node.getTraitSet() ); + final AlgNode r = node.getEntity().unwrap( TranslatableEntity.class ).toAlg( context, node.getTraitSet() ); planner.registerClass( r ); } super.visit( node, ordinal, parent ); @@ -200,20 +202,19 @@ protected AlgTraitSet getDesiredRootTraitSet( AlgRoot root ) { protected abstract PreparedResult implement( AlgRoot root ); - - protected LogicalModify.Operation mapTableModOp( boolean isDml, Kind Kind ) { + protected LogicalRelModify.Operation mapTableModOp( boolean isDml, Kind Kind ) { if ( !isDml ) { return null; } switch ( Kind ) { case INSERT: - return LogicalModify.Operation.INSERT; + return Modify.Operation.INSERT; case DELETE: - return LogicalModify.Operation.DELETE; + return Modify.Operation.DELETE; case MERGE: - return LogicalModify.Operation.MERGE; + return Modify.Operation.MERGE; case UPDATE: - return LogicalModify.Operation.UPDATE; + return Modify.Operation.UPDATE; default: return null; } @@ -247,7 +248,7 @@ public interface CatalogReader extends AlgOptSchema, ValidatorCatalogReader, Ope AlgOptEntity getCollection( List names ); - CatalogGraphDatabase getGraph( String name ); + LogicalGraph getGraph( String name ); ThreadLocal THREAD_LOCAL = new ThreadLocal<>(); @@ -401,7 +402,7 @@ public abstract static class PreparedResultImpl implements PreparedResult, Typed protected final AlgDataType parameterRowType; protected final AlgDataType rowType; protected final boolean isDml; - protected final LogicalModify.Operation tableModOp; + protected final LogicalRelModify.Operation tableModOp; protected final List> fieldOrigins; protected final List collations; @@ -412,7 +413,7 @@ public PreparedResultImpl( List> fieldOrigins, List collations, AlgNode rootAlg, - LogicalModify.Operation tableModOp, + LogicalRelModify.Operation tableModOp, boolean isDml ) { this.rowType = Objects.requireNonNull( rowType ); this.parameterRowType = Objects.requireNonNull( parameterRowType ); diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 1830269180..51ad4cb73e 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -22,8 +22,8 @@ import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; @@ -38,7 +38,7 @@ void copyData( /** * Currently used to transfer data if partitioned table is about to be merged. - * For Table Partitioning use {@link #copyPartitionData(Transaction, CatalogAdapter, CatalogTable, CatalogTable, List, List, List)} } instead + * For Table Partitioning use {@link #copyPartitionData(Transaction, CatalogAdapter, LogicalTable, LogicalTable, List, List, List)} } instead * * @param transaction Transactional scope * @param store Target Store where data should be migrated to @@ -51,13 +51,13 @@ void copyData( void copySelectiveData( Transaction transaction, CatalogAdapter store, - CatalogTable sourceTable, CatalogTable targetTable, List columns, + LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ); /** * Currently used to to transfer data if unpartitioned is about to be partitioned. - * For Table Merge use {@link #copySelectiveData(Transaction, CatalogAdapter, CatalogTable, CatalogTable, List, Map, List)} } instead + * For Table Merge use {@link #copySelectiveData(Transaction, CatalogAdapter, LogicalTable, LogicalTable, List, Map, List)} } instead * * @param transaction Transactional scope * @param store Target Store where data should be migrated to @@ -70,8 +70,8 @@ void copySelectiveData( void copyPartitionData( Transaction transaction, CatalogAdapter store, - CatalogTable sourceTable, - CatalogTable targetTable, + LogicalTable sourceTable, + LogicalTable targetTable, List columns, List sourcePartitionIds, List targetPartitionIds ); @@ -86,6 +86,6 @@ void copyPartitionData( AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ); - void copyGraphData( CatalogGraphDatabase graph, Transaction transaction, Integer existingAdapterId, CatalogAdapter adapter ); + void copyGraphData( LogicalGraph graph, Transaction transaction, Integer existingAdapterId, CatalogAdapter adapter ); } diff --git a/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java b/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java index 8affb22d05..7ea4a851a7 100644 --- a/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java @@ -20,7 +20,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.TableFunctionScan; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; @@ -50,7 +50,7 @@ private AlgTraitSet copy( final AlgTraitSet other ) { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { final AlgNode node = super.visit( scan ); return new LogicalRelScan( node.getCluster(), copy( node.getTraitSet() ), node.getEntity() ); } diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index f3ebc8ccdc..8c7a61294b 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -28,7 +28,7 @@ import lombok.Getter; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer; import org.polypheny.db.algebra.logical.document.LogicalDocumentAggregate; import org.polypheny.db.algebra.logical.document.LogicalDocumentFilter; @@ -54,12 +54,12 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.transaction.Statement; @@ -284,7 +284,7 @@ public AlgNode visit( LogicalMatch match ) { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { hashBasis.add( "Scan#" + scan.getEntity().getCatalogEntity().id ); // get available columns for every table scan this.getAvailableColumns( scan ); @@ -373,7 +373,7 @@ public AlgNode visit( LogicalExchange exchange ) { @Override - public AlgNode visit( LogicalModify modify ) { + public AlgNode visit( LogicalRelModify modify ) { hashBasis.add( "LogicalModify" ); // e.g. inserts only have underlying values and need to attach the table correctly this.getAvailableColumns( modify ); @@ -390,7 +390,7 @@ public AlgNode visit( AlgNode other ) { private void getAvailableColumns( AlgNode scan ) { this.entityId.add( scan.getEntity().getCatalogEntity().id ); - final CatalogTable table = (CatalogTable) scan.getEntity().getCatalogEntity(); + final LogicalTable table = (LogicalTable) scan.getEntity().getCatalogEntity(); if ( table != null ) { final List ids = table.fieldIds; final List names = table.getColumnNames(); @@ -410,11 +410,11 @@ private void getPartitioningInfo( LogicalFilter filter ) { return; } - handleIfPartitioned( filter, (CatalogTable) table.getCatalogEntity() ); + handleIfPartitioned( filter, (LogicalTable) table.getCatalogEntity() ); } - private void handleIfPartitioned( AlgNode node, CatalogTable catalogTable ) { + private void handleIfPartitioned( AlgNode node, LogicalTable catalogTable ) { // Only if table is partitioned if ( catalogTable.partitionProperty.isPartitioned ) { WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( @@ -445,7 +445,7 @@ private void getPartitioningInfo( LogicalDocumentFilter filter ) { return; } - handleIfPartitioned( filter, (CatalogTable) table.getCatalogEntity() ); + handleIfPartitioned( filter, (LogicalTable) table.getCatalogEntity() ); } diff --git a/core/src/main/java/org/polypheny/db/processing/QueryProcessorHelpers.java b/core/src/main/java/org/polypheny/db/processing/QueryProcessorHelpers.java index a807fb436c..20802fc73e 100644 --- a/core/src/main/java/org/polypheny/db/processing/QueryProcessorHelpers.java +++ b/core/src/main/java/org/polypheny/db/processing/QueryProcessorHelpers.java @@ -27,7 +27,8 @@ import org.apache.calcite.linq4j.Ord; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -79,19 +80,19 @@ public static StatementType getStatementType( PreparedResult preparedResult ) { } - public static LogicalModify.Operation mapTableModOp( boolean isDml, Kind sqlKind ) { + public static LogicalRelModify.Operation mapTableModOp( boolean isDml, Kind sqlKind ) { if ( !isDml ) { return null; } switch ( sqlKind ) { case INSERT: - return LogicalModify.Operation.INSERT; + return Modify.Operation.INSERT; case DELETE: - return LogicalModify.Operation.DELETE; + return Modify.Operation.DELETE; case MERGE: - return LogicalModify.Operation.MERGE; + return Modify.Operation.MERGE; case UPDATE: - return LogicalModify.Operation.UPDATE; + return Modify.Operation.UPDATE; default: return null; } diff --git a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java index b5f2afb507..165a55a685 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java +++ b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.metadata.BuiltInMetadata.AllPredicates; import org.polypheny.db.algebra.metadata.BuiltInMetadata.ExpressionLineage; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptEntity; @@ -163,7 +163,7 @@ public AlgOptEntity getTable() { public List getQualifiedName() { - return List.of( table.getCatalogEntity().unwrap( CatalogTable.class ).getNamespaceName(), table.getCatalogEntity().name ); + return List.of( table.getCatalogEntity().unwrap( LogicalTable.class ).getNamespaceName(), table.getCatalogEntity().name ); } diff --git a/core/src/main/java/org/polypheny/db/routing/DmlRouter.java b/core/src/main/java/org/polypheny/db/routing/DmlRouter.java index 1bc9be8587..3908e86917 100644 --- a/core/src/main/java/org/polypheny/db/routing/DmlRouter.java +++ b/core/src/main/java/org/polypheny/db/routing/DmlRouter.java @@ -20,8 +20,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.document.LogicalDocumentModify; import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; -import org.polypheny.db.algebra.logical.relational.LogicalModify; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.transaction.Statement; @@ -33,7 +33,7 @@ public interface DmlRouter { /** * Routes DML queries and returns a RelNode. */ - AlgNode routeDml( LogicalModify node, Statement statement ); + AlgNode routeDml( LogicalRelModify node, Statement statement ); /** * Routes conditional executes and directly returns a RelNode. @@ -48,6 +48,6 @@ public interface DmlRouter { AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Integer adapterId ); - AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, CatalogGraphDatabase catalogGraph, List placements ); + AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ); } diff --git a/core/src/main/java/org/polypheny/db/runtime/functions/CrossModelFunctions.java b/core/src/main/java/org/polypheny/db/runtime/functions/CrossModelFunctions.java index 3dce641da2..17dc10f951 100644 --- a/core/src/main/java/org/polypheny/db/runtime/functions/CrossModelFunctions.java +++ b/core/src/main/java/org/polypheny/db/runtime/functions/CrossModelFunctions.java @@ -34,7 +34,8 @@ import org.bson.BsonValue; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.runtime.PolyCollections.PolyDictionary; import org.polypheny.db.schema.graph.PolyEdge; @@ -60,10 +61,10 @@ private CrossModelFunctions() { */ @SuppressWarnings("unused") public static Enumerable sendGraphModifies( DataContext context, List>> enumerables, List order, Operation operation ) { - if ( operation == Operation.DELETE ) { + if ( operation == Modify.Operation.DELETE ) { return sendDeletes( context, enumerables, order ); } - if ( operation == Operation.UPDATE ) { + if ( operation == Modify.Operation.UPDATE ) { return sendUpdates( context, enumerables, order ); } diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java index 0c8038f61e..230e849e89 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java @@ -25,9 +25,9 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; @@ -86,7 +86,7 @@ public String toString() { @Override - public Modify toModificationAlg( + public RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, CatalogReader catalogReader, @@ -95,7 +95,7 @@ public Modify toModificationAlg( List updateColumnList, List sourceExpressionList, boolean flattened ) { - return new LogicalModify( + return new LogicalRelModify( cluster, cluster.traitSetOf( Convention.NONE ), table, diff --git a/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java b/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java index 399a9a0aa9..9810e00dbc 100644 --- a/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java +++ b/core/src/main/java/org/polypheny/db/schema/ModifiableCollection.java @@ -18,7 +18,7 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; diff --git a/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java b/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java index 5d8f5f85b3..38e893abf4 100644 --- a/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/ModifiableEntity.java @@ -36,7 +36,7 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -51,12 +51,12 @@ public interface ModifiableEntity extends QueryableEntity { /** * Creates a relational expression that modifies this table. */ - Modify toModificationAlg( + RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, CatalogReader catalogReader, AlgNode child, - Modify.Operation operation, + RelModify.Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ); diff --git a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java index 73048f221b..fa8f909499 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -43,12 +43,12 @@ import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogEntityPlacement; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; @@ -126,7 +126,7 @@ private Map, CatalogEntity> buildRelationalLogical( Catalog cat continue; } - for ( CatalogTable catalogTable : catalog.getTables( catalogSchema.id, null ) ) { + for ( LogicalTable catalogTable : catalog.getTables( catalogSchema.id, null ) ) { entities.put( Pair.of( catalogSchema.id, catalogTable.id ), catalogTable ); } } @@ -153,7 +153,7 @@ private Map, CatalogEntity> buildDocumentLogical( Catalog catal private Map, CatalogEntityPlacement> buildPhysicalGraphs( Catalog catalog, CatalogDatabase catalogDatabase ) { Map, CatalogEntityPlacement> placements = new HashMap<>(); // Build adapter schema (physical schema) GRAPH - for ( CatalogGraphDatabase graph : catalog.getGraphs( catalogDatabase.id, null ) ) { + for ( LogicalGraph graph : catalog.getGraphs( catalogDatabase.id, null ) ) { for ( int adapterId : graph.placements ) { CatalogGraphPlacement placement = catalog.getGraphPlacement( graph.id, adapterId ); @@ -256,7 +256,7 @@ private Map, CatalogEntityPlacement> buildPhysicalTable } - private void buildView( Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { + private void buildView( Map tableMap, SchemaPlus s, LogicalTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { LogicalRelView view = new LogicalRelView( catalogTable.id, catalogTable.getNamespaceName(), @@ -269,7 +269,7 @@ private void buildView( Map tableMap, SchemaPlus s, Catal } - private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map tableMap, SchemaPlus s, CatalogTable catalogTable, List columnNames, AlgDataType rowType, List columnIds ) { + private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map tableMap, SchemaPlus s, LogicalTable catalogTable, List columnNames, AlgDataType rowType, List columnIds ) { LogicalEntity table; if ( catalogSchema.namespaceType == NamespaceType.RELATIONAL ) { table = new LogicalEntity( diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 3dca5a90f2..ee2b851ac2 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -24,9 +24,9 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; import org.polypheny.db.catalog.entity.CatalogCollection; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogNamespace; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.util.BuiltInMethod; @@ -34,7 +34,7 @@ public interface PolyphenyDbSchema { - default CatalogTable getTable( List names ) { + default LogicalTable getTable( List names ) { switch ( names.size() ) { case 3: return Catalog.getInstance().getTables( Pattern.of( names.get( 0 ) ), Pattern.of( names.get( 1 ) ), Pattern.of( names.get( 2 ) ) ).get( 0 ); @@ -65,7 +65,7 @@ default CatalogCollection getCollection( List names ) { } } - default CatalogGraphDatabase getGraph( List names ) { + default LogicalGraph getGraph( List names ) { if ( names.size() == 1 ) {// TODO add methods return Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ) ).get( 0 ); diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 705ea3c01b..0d62ae90e5 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -54,7 +54,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.config.PolyphenyDbConnectionConfigImpl; import org.polypheny.db.config.PolyphenyDbConnectionProperty; @@ -203,7 +203,7 @@ public static Queryable queryable( DataContext root, Class clazz, Iter */ public static Queryable queryable( DataContext root, PolyphenyDbSchema schema, Class clazz, String tableName ) { //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); - CatalogTable table = schema.getTable( List.of( tableName ) ); + LogicalTable table = schema.getTable( List.of( tableName ) ); return table.asQueryable( root, schema, tableName ); } @@ -244,7 +244,7 @@ private static int[] identity( int count ) { /** * Returns an {@link org.apache.calcite.linq4j.Enumerable} over object arrays, given a fully-qualified table name which leads to a {@link ScannableEntity}. */ - public static CatalogTable table( DataContext root, String... names ) { + public static LogicalTable table( DataContext root, String... names ) { PolyphenyDbSchema schema = root.getRootSchema(); return schema.getTable( List.of( names ) ); } diff --git a/core/src/main/java/org/polypheny/db/schema/TranslatableGraph.java b/core/src/main/java/org/polypheny/db/schema/TranslatableGraph.java index 1b221bb7eb..0c50d47234 100644 --- a/core/src/main/java/org/polypheny/db/schema/TranslatableGraph.java +++ b/core/src/main/java/org/polypheny/db/schema/TranslatableGraph.java @@ -20,7 +20,7 @@ import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.schema.graph.Graph; -public interface TranslatableGraph extends Graph { +public interface TranslatableGraph { /** * Converts this table into a {@link AlgNode relational expression}. diff --git a/core/src/main/java/org/polypheny/db/schema/graph/ModifiableGraph.java b/core/src/main/java/org/polypheny/db/schema/graph/ModifiableGraph.java index 0790d0e6e7..b2fc06ab0b 100644 --- a/core/src/main/java/org/polypheny/db/schema/graph/ModifiableGraph.java +++ b/core/src/main/java/org/polypheny/db/schema/graph/ModifiableGraph.java @@ -19,11 +19,11 @@ import java.util.List; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; @@ -32,9 +32,9 @@ import org.polypheny.db.schema.Statistic; -public interface ModifiableGraph extends Graph { +public interface ModifiableGraph { - LpgModify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, CatalogGraphDatabase graph, PolyphenyDbCatalogReader catalogReader, AlgNode input, Operation operation, List ids, List operations ); + LpgModify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, LogicalGraph graph, PolyphenyDbCatalogReader catalogReader, AlgNode input, Operation operation, List ids, List operations ); Expression getExpression( SchemaPlus schema, String tableName, Class clazz ); diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 6065f38fee..3a6e437fe7 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -82,11 +82,11 @@ import org.polypheny.db.algebra.core.Match; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; @@ -103,7 +103,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; @@ -164,7 +165,7 @@ public class AlgBuilder { @Getter protected final AlgOptCluster cluster; - protected final AlgOptSchema algOptSchema; + protected final PolyphenyDbSchema schema; private final AlgFactories.FilterFactory filterFactory; private final AlgFactories.ProjectFactory projectFactory; private final AlgFactories.AggregateFactory aggregateFactory; @@ -184,9 +185,9 @@ public class AlgBuilder { private final RexSimplify simplifier; - protected AlgBuilder( Context context, AlgOptCluster cluster, AlgOptSchema algOptSchema ) { + protected AlgBuilder( Context context, AlgOptCluster cluster, PolyphenyDbSchema schema ) { this.cluster = cluster; - this.algOptSchema = algOptSchema; + this.schema = schema; if ( context == null ) { context = Contexts.EMPTY_CONTEXT; } @@ -273,17 +274,17 @@ public int stackSize() { */ public static AlgBuilder create( FrameworkConfig config ) { final AlgOptCluster[] clusters = { null }; - final AlgOptSchema[] algOptSchemas = { null }; + final PolyphenyDbSchema[] schemas = { null }; Frameworks.withPrepare( new Frameworks.PrepareAction( config ) { @Override - public Void apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, PolyphenyDbSchema rootSchema ) { + public Void apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { clusters[0] = cluster; - algOptSchemas[0] = algOptSchema; + schemas[0] = rootSchema; return null; } } ); - return new AlgBuilder( config.getContext(), clusters[0], algOptSchemas[0] ); + return new AlgBuilder( config.getContext(), clusters[0], schemas[0] ); } @@ -295,7 +296,7 @@ public static AlgBuilder create( Statement statement ) { public static AlgBuilder create( Statement statement, AlgOptCluster cluster ) { - return new AlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader() ); + return new AlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader().getRootSchema() ); } @@ -1317,7 +1318,7 @@ public RexNode patternExclude( RexNode node ) { /** - * Creates a {@link Scan} of the table with a given name. + * Creates a {@link RelScan} of the table with a given name. * * Throws if the table does not exist. * @@ -1327,7 +1328,7 @@ public RexNode patternExclude( RexNode node ) { */ public AlgBuilder scan( Iterable tableNames ) { final List names = ImmutableList.copyOf( tableNames ); - final AlgOptEntity algOptEntity = algOptSchema.getTableForMember( names ); + final CatalogEntity algOptEntity = schema.getTable( names ); if ( algOptEntity == null ) { throw RESOURCE.tableNotFound( String.join( ".", names ) ).ex(); } @@ -1338,16 +1339,16 @@ public AlgBuilder scan( Iterable tableNames ) { } - public AlgBuilder scan( @Nonnull AlgOptEntity algOptEntity ) { - final AlgNode scan = scanFactory.createScan( cluster, algOptEntity ); + public AlgBuilder scan( @Nonnull CatalogEntity entity ) { + final AlgNode scan = scanFactory.createScan( cluster, entity ); push( scan ); - rename( algOptEntity.getRowType().getFieldNames() ); + rename( entity.getRowType().getFieldNames() ); return this; } /** - * Creates a {@link Scan} of the table with a given name. + * Creates a {@link RelScan} of the table with a given name. * * Throws if the table does not exist. * @@ -1373,7 +1374,7 @@ public AlgBuilder documentProject( List projects, List { - R apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, PolyphenyDbSchema rootSchema ); + R apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ); } @@ -118,7 +118,6 @@ public FrameworkConfig getConfig() { public abstract R apply( AlgOptCluster cluster, - AlgOptSchema algOptSchema, PolyphenyDbSchema rootSchema ); } @@ -135,8 +134,8 @@ public static R withPlanner( final PlannerAction action, final FrameworkC return withPrepare( new Frameworks.PrepareAction<>( config ) { @Override - public R apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, PolyphenyDbSchema rootSchema ) { - return action.apply( cluster, algOptSchema, rootSchema ); + public R apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { + return action.apply( cluster, rootSchema ); } } ); } diff --git a/core/src/main/java/org/polypheny/db/transaction/Transaction.java b/core/src/main/java/org/polypheny/db/transaction/Transaction.java index c4e07a6c25..2800ea35cc 100644 --- a/core/src/main/java/org/polypheny/db/transaction/Transaction.java +++ b/core/src/main/java/org/polypheny/db/transaction/Transaction.java @@ -23,7 +23,7 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.information.InformationManager; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; @@ -82,7 +82,7 @@ public interface Transaction { boolean getUseCache(); - Set getCatalogTables(); + Set getCatalogTables(); void setAcceptsOutdated( boolean acceptsOutdated ); diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index 16b23c8266..645bac35ac 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -79,7 +79,7 @@ import org.polypheny.db.adapter.enumerable.lpg.EnumerableLpgMatch.MatchEnumerable; import org.polypheny.db.adapter.java.ReflectiveSchema; import org.polypheny.db.algebra.constant.ExplainLevel; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.json.JsonConstructorNullClause; import org.polypheny.db.algebra.json.JsonQueryEmptyOrErrorBehavior; import org.polypheny.db.algebra.json.JsonQueryWrapperBehavior; diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index 3a8c80e340..2bd14eb56b 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -24,11 +24,11 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.AlgShuttleImpl; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogMaterializedView; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.transaction.Transaction; @@ -89,10 +89,10 @@ public static class TableUpdateVisitor extends AlgShuttleImpl { @Override - public AlgNode visit( LogicalModify modify ) { - if ( modify.getOperation() != Operation.MERGE ) { + public AlgNode visit( LogicalRelModify modify ) { + if ( modify.getOperation() != Modify.Operation.MERGE ) { if ( (modify.getEntity().getCatalogEntity() != null) ) { - CatalogTable table = modify.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); + LogicalTable table = modify.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); names.add( table.getNamespaceName() ); names.add( table.name ); } diff --git a/core/src/main/java/org/polypheny/db/view/ViewManager.java b/core/src/main/java/org/polypheny/db/view/ViewManager.java index 355b65ecad..0b4e20794b 100644 --- a/core/src/main/java/org/polypheny/db/view/ViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/ViewManager.java @@ -27,7 +27,7 @@ import org.polypheny.db.algebra.BiAlg; import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.TableFunctionScan; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; @@ -38,7 +38,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; @@ -48,7 +48,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.entity.CatalogMaterializedView; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; @@ -116,7 +116,7 @@ public AlgNode visit( LogicalMatch match ) { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { if ( depth == 0 ) { return checkNode( scan ); } @@ -231,7 +231,7 @@ public AlgNode visit( AlgNode other ) { @Override - public AlgNode visit( LogicalModify modify ) { + public AlgNode visit( LogicalRelModify modify ) { handleNodeType( modify ); depth++; return modify; @@ -253,7 +253,7 @@ public AlgNode checkNode( AlgNode other ) { return expandViewNode( other ); } else if ( doesSubstituteOrderBy && other instanceof LogicalRelScan ) { if ( other.getEntity() instanceof AlgOptEntityImpl ) { - CatalogTable catalogTable = other.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); + LogicalTable catalogTable = other.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((CatalogMaterializedView) catalogTable).isOrdered() ) { return orderMaterialized( other ); } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 6eb1f7b9de..1ec9aa900b 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -36,7 +36,7 @@ import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -47,7 +47,7 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; @@ -222,13 +222,13 @@ public CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ) { @Override - public CatalogGraphDatabase getGraph( long id ) { + public LogicalGraph getGraph( long id ) { throw new NotImplementedException(); } @Override - public List getGraphs( long databaseId, Pattern graphName ) { + public List getGraphs( long databaseId, Pattern graphName ) { throw new NotImplementedException(); } @@ -300,49 +300,49 @@ public void deleteSchema( long schemaId ) { @Override - public List getTables( long schemaId, Pattern tableNamePattern ) { + public List getTables( long schemaId, Pattern tableNamePattern ) { throw new NotImplementedException(); } @Override - public List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ) { throw new NotImplementedException(); } @Override - public CatalogTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException { + public LogicalTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException { throw new NotImplementedException(); } @Override - public CatalogTable getTableFromPartition( long partitionId ) { + public LogicalTable getTableFromPartition( long partitionId ) { throw new NotImplementedException(); } @Override - public List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { throw new NotImplementedException(); } @Override - public CatalogTable getTable( long tableId ) { + public LogicalTable getTable( long tableId ) { throw new NotImplementedException(); } @Override - public CatalogTable getTable( long schemaId, String tableName ) throws UnknownTableException { + public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { throw new NotImplementedException(); } @Override - public CatalogTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException { + public LogicalTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException { throw new NotImplementedException(); } @@ -1169,7 +1169,7 @@ public void addTableToPeriodicProcessing( long tableId ) { @Override - public List getTablesForPeriodicProcessing() { + public List getTablesForPeriodicProcessing() { throw new NotImplementedException(); } diff --git a/core/src/test/java/org/polypheny/db/test/JdbcTest.java b/core/src/test/java/org/polypheny/db/test/JdbcTest.java index 5d7b1f7740..4ca1238649 100644 --- a/core/src/test/java/org/polypheny/db/test/JdbcTest.java +++ b/core/src/test/java/org/polypheny/db/test/JdbcTest.java @@ -19,8 +19,8 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; @@ -45,18 +45,17 @@ protected AbstractModifiableEntity( String tableName ) { @Override - public Modify toModificationAlg( + public RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode child, - Modify.Operation operation, + RelModify.Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { - return LogicalModify.create( + return LogicalRelModify.create( table, - catalogReader, child, operation, updateColumnList, diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 23d404aec6..7205fc4dcf 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -70,7 +70,7 @@ import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -79,7 +79,7 @@ import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; @@ -157,7 +157,7 @@ private void checkIfDdlPossible( EntityType entityType ) throws DdlOnSourceExcep } - private void checkViewDependencies( CatalogTable catalogTable ) { + private void checkViewDependencies( LogicalTable catalogTable ) { if ( catalogTable.connectedViews.size() > 0 ) { List views = new ArrayList<>(); for ( Long id : catalogTable.connectedViews ) { @@ -291,7 +291,7 @@ public void addAdapter( String uniqueName, String adapterName, AdapterType adapt } try { catalog.addPrimaryKey( tableId, primaryKeyColIds ); - CatalogTable catalogTable = catalog.getTable( tableId ); + LogicalTable catalogTable = catalog.getTable( tableId ); catalog.addPartitionPlacement( catalogTable.namespaceId, adapter.getAdapterId(), @@ -362,7 +362,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Drop tables for ( Long tableId : tablesToDrop ) { - CatalogTable table = catalog.getTable( tableId ); + LogicalTable table = catalog.getTable( tableId ); // Make sure that there is only one adapter if ( table.dataPlacements.size() != 1 ) { @@ -427,7 +427,7 @@ public void renameSchema( String newName, String oldName, long databaseId ) thro @Override - public void addColumnToSourceTable( CatalogTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException { + public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException { if ( catalog.checkIfExistsColumn( catalogTable.id, columnLogicalName ) ) { throw new ColumnAlreadyExistsException( columnLogicalName, catalogTable.name ); @@ -508,7 +508,7 @@ public void addColumnToSourceTable( CatalogTable catalogTable, String columnPhys } - private int updateAdjacentPositions( CatalogTable catalogTable, CatalogColumn beforeColumn, CatalogColumn afterColumn ) { + private int updateAdjacentPositions( LogicalTable catalogTable, CatalogColumn beforeColumn, CatalogColumn afterColumn ) { List columns = catalog.getColumns( catalogTable.id ); int position = columns.size() + 1; if ( beforeColumn != null || afterColumn != null ) { @@ -527,7 +527,7 @@ private int updateAdjacentPositions( CatalogTable catalogTable, CatalogColumn be @Override - public void addColumn( String columnName, CatalogTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws NotNullAndDefaultValueException, ColumnAlreadyExistsException, ColumnNotExistsException { + public void addColumn( String columnName, LogicalTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws NotNullAndDefaultValueException, ColumnAlreadyExistsException, ColumnNotExistsException { columnName = adjustNameIfNeeded( columnName, catalogTable.namespaceId ); // Check if the column either allows null values or has a default value defined. if ( defaultValue == null && !nullable ) { @@ -583,7 +583,7 @@ public void addColumn( String columnName, CatalogTable catalogTable, String befo @Override - public void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException { + public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); @@ -599,7 +599,7 @@ public void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, Lis @Override - public void addIndex( CatalogTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { + public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); @@ -680,7 +680,7 @@ public void addIndex( CatalogTable catalogTable, String indexMethodName, List columnIds, IndexType type ) throws MissingColumnPlacementException, UnknownIndexMethodException, GenericCatalogException { + private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodName, String indexName, boolean isUnique, DataStore location, Statement statement, List columnIds, IndexType type ) throws MissingColumnPlacementException, UnknownIndexMethodException, GenericCatalogException { // Check if all required columns are present on this store for ( long columnId : columnIds ) { if ( !catalog.checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { @@ -724,7 +724,7 @@ private void addDataStoreIndex( CatalogTable catalogTable, String indexMethodNam } - public void addPolyphenyIndex( CatalogTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { + public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { indexName = indexName.toLowerCase(); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { @@ -778,7 +778,7 @@ public void addPolyphenyIndex( CatalogTable catalogTable, String indexMethodName @Override - public void addDataPlacement( CatalogTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) throws PlacementAlreadyExistsException { + public void addDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) throws PlacementAlreadyExistsException { List addedColumns = new LinkedList<>(); List tempPartitionGroupList = new ArrayList<>(); @@ -921,7 +921,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { @Override - public void addPrimaryKey( CatalogTable catalogTable, List columnNames, Statement statement ) throws DdlOnSourceException { + public void addPrimaryKey( LogicalTable catalogTable, List columnNames, Statement statement ) throws DdlOnSourceException { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -965,7 +965,7 @@ public void addPrimaryKey( CatalogTable catalogTable, List columnNames, @Override - public void addUniqueConstraint( CatalogTable catalogTable, List columnNames, String constraintName ) throws DdlOnSourceException { + public void addUniqueConstraint( LogicalTable catalogTable, List columnNames, String constraintName ) throws DdlOnSourceException { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -985,7 +985,7 @@ public void addUniqueConstraint( CatalogTable catalogTable, List columnN @Override - public void dropColumn( CatalogTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { + public void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { if ( catalogTable.fieldIds.size() < 2 ) { throw new RuntimeException( "Cannot drop sole column of table " + catalogTable.name ); } @@ -1040,14 +1040,14 @@ public void dropColumn( CatalogTable catalogTable, String columnName, Statement } - private void checkModelLogic( CatalogTable catalogTable ) { + private void checkModelLogic( LogicalTable catalogTable ) { if ( catalogTable.getNamespaceType() == NamespaceType.DOCUMENT ) { throw new RuntimeException( "Modification operation is not allowed by schema type DOCUMENT" ); } } - private void checkModelLogic( CatalogTable catalogTable, String columnName ) { + private void checkModelLogic( LogicalTable catalogTable, String columnName ) { if ( catalogTable.getNamespaceType() == NamespaceType.DOCUMENT && (columnName.equals( "_data" ) || columnName.equals( "_id" )) ) { throw new RuntimeException( "Modification operation is not allowed by schema type DOCUMENT" ); @@ -1056,7 +1056,7 @@ private void checkModelLogic( CatalogTable catalogTable, String columnName ) { @Override - public void dropConstraint( CatalogTable catalogTable, String constraintName ) throws DdlOnSourceException { + public void dropConstraint( LogicalTable catalogTable, String constraintName ) throws DdlOnSourceException { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -1070,7 +1070,7 @@ public void dropConstraint( CatalogTable catalogTable, String constraintName ) t @Override - public void dropForeignKey( CatalogTable catalogTable, String foreignKeyName ) throws DdlOnSourceException { + public void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) throws DdlOnSourceException { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -1084,7 +1084,7 @@ public void dropForeignKey( CatalogTable catalogTable, String foreignKeyName ) t @Override - public void dropIndex( CatalogTable catalogTable, String indexName, Statement statement ) throws DdlOnSourceException { + public void dropIndex( LogicalTable catalogTable, String indexName, Statement statement ) throws DdlOnSourceException { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -1106,7 +1106,7 @@ public void dropIndex( CatalogTable catalogTable, String indexName, Statement st @Override - public void dropDataPlacement( CatalogTable catalogTable, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, LastPlacementException { + public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, LastPlacementException { // Check whether this placement exists if ( !catalogTable.dataPlacements.contains( storeInstance.getAdapterId() ) ) { throw new PlacementNotExistsException(); @@ -1148,7 +1148,7 @@ public void dropDataPlacement( CatalogTable catalogTable, DataStore storeInstanc @Override - public void dropPrimaryKey( CatalogTable catalogTable ) throws DdlOnSourceException { + public void dropPrimaryKey( LogicalTable catalogTable ) throws DdlOnSourceException { try { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -1160,7 +1160,7 @@ public void dropPrimaryKey( CatalogTable catalogTable ) throws DdlOnSourceExcept @Override - public void setColumnType( CatalogTable catalogTable, String columnName, ColumnTypeInformation type, Statement statement ) throws DdlOnSourceException, ColumnNotExistsException, GenericCatalogException { + public void setColumnType( LogicalTable catalogTable, String columnName, ColumnTypeInformation type, Statement statement ) throws DdlOnSourceException, ColumnNotExistsException, GenericCatalogException { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -1191,7 +1191,7 @@ public void setColumnType( CatalogTable catalogTable, String columnName, ColumnT @Override - public void setColumnNullable( CatalogTable catalogTable, String columnName, boolean nullable, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException, GenericCatalogException { + public void setColumnNullable( LogicalTable catalogTable, String columnName, boolean nullable, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException, GenericCatalogException { CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); // Make sure that this is a table of type TABLE (and not SOURCE) @@ -1208,7 +1208,7 @@ public void setColumnNullable( CatalogTable catalogTable, String columnName, boo @Override - public void setColumnPosition( CatalogTable catalogTable, String columnName, String beforeColumnName, String afterColumnName, Statement statement ) throws ColumnNotExistsException { + public void setColumnPosition( LogicalTable catalogTable, String columnName, String beforeColumnName, String afterColumnName, Statement statement ) throws ColumnNotExistsException { // Check if model permits operation checkModelLogic( catalogTable, columnName ); @@ -1259,7 +1259,7 @@ public void setColumnPosition( CatalogTable catalogTable, String columnName, Str @Override - public void setColumnCollation( CatalogTable catalogTable, String columnName, Collation collation, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException { + public void setColumnCollation( LogicalTable catalogTable, String columnName, Collation collation, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException { CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); // Check if model permits operation @@ -1276,7 +1276,7 @@ public void setColumnCollation( CatalogTable catalogTable, String columnName, Co @Override - public void setDefaultValue( CatalogTable catalogTable, String columnName, String defaultValue, Statement statement ) throws ColumnNotExistsException { + public void setDefaultValue( LogicalTable catalogTable, String columnName, String defaultValue, Statement statement ) throws ColumnNotExistsException { CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); // Check if model permits operation @@ -1290,7 +1290,7 @@ public void setDefaultValue( CatalogTable catalogTable, String columnName, Strin @Override - public void dropDefaultValue( CatalogTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { + public void dropDefaultValue( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); // check if model permits operation @@ -1304,7 +1304,7 @@ public void dropDefaultValue( CatalogTable catalogTable, String columnName, Stat @Override - public void modifyDataPlacement( CatalogTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) + public void modifyDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException { // Check whether this placement already exists @@ -1483,7 +1483,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { @Override - public void modifyPartitionPlacement( CatalogTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ) throws LastPlacementException { + public void modifyPartitionPlacement( LogicalTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ) throws LastPlacementException { int storeId = storeInstance.getAdapterId(); List newPartitions = new ArrayList<>(); List removedPartitions = new ArrayList<>(); @@ -1554,7 +1554,7 @@ public void modifyPartitionPlacement( CatalogTable catalogTable, List part @Override - public void addColumnPlacement( CatalogTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, PlacementAlreadyExistsException, ColumnNotExistsException { + public void addColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, PlacementAlreadyExistsException, ColumnNotExistsException { columnName = adjustNameIfNeeded( columnName, catalogTable.namespaceId ); if ( storeInstance == null ) { @@ -1603,7 +1603,7 @@ public void addColumnPlacement( CatalogTable catalogTable, String columnName, Da @Override - public void dropColumnPlacement( CatalogTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException, PlacementIsPrimaryException, ColumnNotExistsException { + public void dropColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException, PlacementIsPrimaryException, ColumnNotExistsException { if ( storeInstance == null ) { throw new UnknownAdapterException( "" ); } @@ -1645,14 +1645,14 @@ public void dropColumnPlacement( CatalogTable catalogTable, String columnName, D @Override - public void alterTableOwner( CatalogTable catalogTable, String newOwnerName ) throws UnknownUserException { + public void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) throws UnknownUserException { CatalogUser catalogUser = catalog.getUser( newOwnerName ); catalog.setTableOwner( catalogTable.id, catalogUser.id ); } @Override - public void renameTable( CatalogTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException { + public void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException { if ( catalog.checkIfExistsEntity( catalogTable.namespaceId, newTableName ) ) { throw new EntityAlreadyExistsException(); } @@ -1674,7 +1674,7 @@ public void renameTable( CatalogTable catalogTable, String newTableName, Stateme @Override - public void renameColumn( CatalogTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException { + public void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException { CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); if ( catalog.checkIfExistsColumn( catalogColumn.tableId, newColumnName ) ) { @@ -1878,7 +1878,7 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR private void checkModelLangCompatibility( QueryLanguage language, Long tableId ) { - CatalogTable catalogTable = catalog.getTable( tableId ); + LogicalTable catalogTable = catalog.getTable( tableId ); if ( catalogTable.getNamespaceType() != language.getNamespaceType() ) { throw new RuntimeException( String.format( @@ -1924,7 +1924,7 @@ public long addGraphPlacement( long graphId, List stores, boolean onl throw new RuntimeException(); } - CatalogGraphDatabase graph = catalog.getGraph( graphId ); + LogicalGraph graph = catalog.getGraph( graphId ); PolySchemaBuilder.getInstance().getCurrent(); List preExistingPlacements = graph.placements @@ -1980,10 +1980,10 @@ private void afterGraphDropLogistics( DataStore store, long graphId ) { private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { CatalogGraphMapping mapping = catalog.getGraphMapping( graphId ); - CatalogTable nodes = catalog.getTable( mapping.nodesId ); - CatalogTable nodeProperty = catalog.getTable( mapping.nodesPropertyId ); - CatalogTable edges = catalog.getTable( mapping.edgesId ); - CatalogTable edgeProperty = catalog.getTable( mapping.edgesPropertyId ); + LogicalTable nodes = catalog.getTable( mapping.nodesId ); + LogicalTable nodeProperty = catalog.getTable( mapping.nodesPropertyId ); + LogicalTable edges = catalog.getTable( mapping.edgesId ); + LogicalTable edgeProperty = catalog.getTable( mapping.edgesPropertyId ); catalog.addDataPlacement( store.getAdapterId(), mapping.nodesId ); catalog.addDataPlacement( store.getAdapterId(), mapping.nodesPropertyId ); @@ -2062,7 +2062,7 @@ public void replaceGraphAlias( long graphId, String oldAlias, String alias ) { @Override public void removeGraphDatabase( long graphId, boolean ifExists, Statement statement ) { - CatalogGraphDatabase graph = catalog.getGraph( graphId ); + LogicalGraph graph = catalog.getGraph( graphId ); if ( graph == null ) { if ( !ifExists ) { @@ -2156,7 +2156,7 @@ private Map> findUnderlyingTablesOfView( AlgNode algNode, Map getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList ) { - CatalogTable table = algNode.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); + LogicalTable table = algNode.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); List columnIds = table.fieldIds; List logicalColumnNames = table.getColumnNames(); List underlyingColumns = new ArrayList<>(); @@ -2225,7 +2225,7 @@ public void createTable( long schemaId, String name, List fiel } //catalog.updateTablePartitionProperties(tableId, partitionProperty); - CatalogTable catalogTable = catalog.getTable( tableId ); + LogicalTable catalogTable = catalog.getTable( tableId ); // Trigger rebuild of schema; triggers schema creation on adapters PolySchemaBuilder.getInstance().getCurrent(); @@ -2330,7 +2330,7 @@ public void dropCollection( CatalogCollection catalogCollection, Statement state public void removeDocumentLogistics( CatalogCollection catalogCollection, Statement statement ) { CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); - CatalogTable table = catalog.getTable( mapping.collectionId ); + LogicalTable table = catalog.getTable( mapping.collectionId ); catalog.deleteTable( table.id ); } @@ -2381,7 +2381,7 @@ public void dropCollectionPlacement( long namespaceId, CatalogCollection collect private void removeDocumentPlacementLogistics( CatalogCollection collection, DataStore store, Statement statement ) { CatalogCollectionMapping mapping = catalog.getCollectionMapping( collection.id ); - CatalogTable table = catalog.getTable( mapping.collectionId ); + LogicalTable table = catalog.getTable( mapping.collectionId ); try { dropDataPlacement( table, store, statement ); } catch ( PlacementNotExistsException | LastPlacementException e ) { @@ -2392,7 +2392,7 @@ private void removeDocumentPlacementLogistics( CatalogCollection collection, Dat private void afterDocumentLogistics( DataStore store, long collectionId ) { CatalogCollectionMapping mapping = catalog.getCollectionMapping( collectionId ); - CatalogTable table = catalog.getTable( mapping.collectionId ); + LogicalTable table = catalog.getTable( mapping.collectionId ); catalog.addDataPlacement( store.getAdapterId(), collectionId ); @@ -2472,7 +2472,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List log.debug( "Creating partition group for table: {} with id {} on schema: {} on column: {}", partitionInfo.table.name, partitionInfo.table.id, partitionInfo.table.getNamespaceName(), catalogColumn.id ); } - CatalogTable unPartitionedTable = catalog.getTable( partitionInfo.table.id ); + LogicalTable unPartitionedTable = catalog.getTable( partitionInfo.table.id ); // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); @@ -2666,7 +2666,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Now get the partitioned table, partitionInfo still contains the basic/unpartitioned table. - CatalogTable partitionedTable = catalog.getTable( partitionInfo.table.id ); + LogicalTable partitionedTable = catalog.getTable( partitionInfo.table.id ); DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); for ( DataStore store : stores ) { for ( long partitionId : partitionIds ) { @@ -2738,7 +2738,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List @Override - public void removePartitioning( CatalogTable partitionedTable, Statement statement ) throws UnknownDatabaseException, GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { + public void removePartitioning( LogicalTable partitionedTable, Statement statement ) throws UnknownDatabaseException, GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { long tableId = partitionedTable.id; if ( log.isDebugEnabled() ) { @@ -2757,7 +2757,7 @@ public void removePartitioning( CatalogTable partitionedTable, Statement stateme catalog.mergeTable( tableId ); // Now get the merged table - CatalogTable mergedTable = catalog.getTable( tableId ); + LogicalTable mergedTable = catalog.getTable( tableId ); List stores = new ArrayList<>(); // Get primary key of table and use PK to find all DataPlacements of table @@ -2929,8 +2929,8 @@ public void dropSchema( long databaseId, String schemaName, boolean ifExists, St } // Drop all tables in this schema - List catalogEntities = catalog.getTables( catalogSchema.id, null ); - for ( CatalogTable catalogTable : catalogEntities ) { + List catalogEntities = catalog.getTables( catalogSchema.id, null ); + for ( LogicalTable catalogTable : catalogEntities ) { dropTable( catalogTable, statement ); } @@ -2951,7 +2951,7 @@ public void dropSchema( long databaseId, String schemaName, boolean ifExists, St @Override - public void dropView( CatalogTable catalogView, Statement statement ) throws DdlOnSourceException { + public void dropView( LogicalTable catalogView, Statement statement ) throws DdlOnSourceException { // Make sure that this is a table of type VIEW if ( catalogView.entityType == EntityType.VIEW ) { // Empty on purpose @@ -2979,7 +2979,7 @@ public void dropView( CatalogTable catalogView, Statement statement ) throws Ddl @Override - public void dropMaterializedView( CatalogTable materializedView, Statement statement ) throws DdlOnSourceException { + public void dropMaterializedView( LogicalTable materializedView, Statement statement ) throws DdlOnSourceException { // Make sure that this is a table of type Materialized View if ( materializedView.entityType == EntityType.MATERIALIZED_VIEW ) { // Empty on purpose @@ -3001,7 +3001,7 @@ public void dropMaterializedView( CatalogTable materializedView, Statement state @Override - public void dropTable( CatalogTable catalogTable, Statement statement ) throws DdlOnSourceException { + public void dropTable( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException { // Make sure that this is a table of type TABLE (and not SOURCE) //checkIfDdlPossible( catalogEntity.tableType ); @@ -3114,7 +3114,7 @@ public void dropTable( CatalogTable catalogTable, Statement statement ) throws D @Override - public void truncate( CatalogTable catalogTable, Statement statement ) { + public void truncate( LogicalTable catalogTable, Statement statement ) { // Make sure that the table can be modified if ( !catalogTable.modifiable ) { throw new RuntimeException( "Unable to modify a read-only table!" ); @@ -3130,12 +3130,12 @@ public void truncate( CatalogTable catalogTable, Statement statement ) { } - private void prepareMonitoring( Statement statement, Kind kind, CatalogTable catalogTable ) { + private void prepareMonitoring( Statement statement, Kind kind, LogicalTable catalogTable ) { prepareMonitoring( statement, kind, catalogTable, null ); } - private void prepareMonitoring( Statement statement, Kind kind, CatalogTable catalogTable, CatalogColumn catalogColumn ) { + private void prepareMonitoring( Statement statement, Kind kind, LogicalTable catalogTable, CatalogColumn catalogColumn ) { // Initialize Monitoring if ( statement.getMonitoringEvent() == null ) { StatementEvent event = new DdlEvent(); diff --git a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java index 70683660bb..8db18411bf 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java @@ -26,7 +26,7 @@ import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; @Slf4j @@ -38,11 +38,11 @@ public abstract class AbstractPartitionManager implements PartitionManager { // Returns the Index of the partition where to place the object @Override - public abstract long getTargetPartitionId( CatalogTable catalogTable, String columnValue ); + public abstract long getTargetPartitionId( LogicalTable catalogTable, String columnValue ); @Override - public boolean probePartitionGroupDistributionChange( CatalogTable catalogTable, int storeId, long columnId, int threshold ) { + public boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ) { // Check for the specified columnId if we still have a ColumnPlacement for every partitionGroup for ( Long partitionGroupId : catalogTable.partitionProperty.partitionGroupIds ) { List ccps = catalog.getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); @@ -59,7 +59,7 @@ public boolean probePartitionGroupDistributionChange( CatalogTable catalogTable, @Override - public Map> getRelevantPlacements( CatalogTable catalogTable, List partitionIds, List excludedAdapters ) { + public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { Catalog catalog = Catalog.getInstance(); Map> placementDistribution = new HashMap<>(); @@ -127,7 +127,7 @@ public String getUnifiedNullValue() { @Override - public Map>> getAllPlacements( CatalogTable catalogTable, List partitionIds ) { + public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { Map>> adapterPlacements = new HashMap<>(); // adapterId -> partitionId ; placements if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 0c2cc5daba..accc4cc8d3 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -37,7 +37,7 @@ import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -123,9 +123,9 @@ private void processAllPeriodicTables() { Catalog catalog = Catalog.getInstance(); long invocationTimestamp = System.currentTimeMillis(); - List periodicTables = catalog.getTablesForPeriodicProcessing(); + List periodicTables = catalog.getTablesForPeriodicProcessing(); // Retrieve all Tables which rely on periodic processing - for ( CatalogTable table : periodicTables ) { + for ( LogicalTable table : periodicTables ) { if ( table.partitionProperty.partitionType == PartitionType.TEMPERATURE ) { determinePartitionFrequency( table, invocationTimestamp ); } @@ -153,7 +153,7 @@ private void incrementPartitionAccess( long identifiedPartitionId, List pa * * @param table Temperature partitioned Table */ - private void determinePartitionDistribution( CatalogTable table ) { + private void determinePartitionDistribution( LogicalTable table ) { if ( log.isDebugEnabled() ) { log.debug( "Determine access frequency of partitions of table: {}", table.name ); } @@ -250,7 +250,7 @@ private void determinePartitionDistribution( CatalogTable table ) { * @param partitionsFromColdToHot Partitions which should be moved from COLD to HOT PartitionGroup * @param partitionsFromHotToCold Partitions which should be moved from HOT to COLD PartitionGroup */ - private void redistributePartitions( CatalogTable table, List partitionsFromColdToHot, List partitionsFromHotToCold ) { + private void redistributePartitions( LogicalTable table, List partitionsFromColdToHot, List partitionsFromHotToCold ) { if ( log.isDebugEnabled() ) { log.debug( "Execute physical redistribution of partitions for table: {}", table.name ); log.debug( "Partitions to move from HOT to COLD: {}", partitionsFromHotToCold ); @@ -427,7 +427,7 @@ private List filterList( int adapterId, long tableId, List partition * @param invocationTimestamp Timestamp do determine the interval for which monitoring metrics should be collected. */ @Override - public void determinePartitionFrequency( CatalogTable table, long invocationTimestamp ) { + public void determinePartitionFrequency( LogicalTable table, long invocationTimestamp ) { Timestamp queryStart = new Timestamp( invocationTimestamp - ((TemperaturePartitionProperty) table.partitionProperty).getFrequencyInterval() * 1000 ); accessCounter = new HashMap<>(); diff --git a/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java index 7aa8436552..b5c600b8e4 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java @@ -21,7 +21,7 @@ import java.util.List; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; import org.polypheny.db.type.PolyType; @@ -35,7 +35,7 @@ public class HashPartitionManager extends AbstractPartitionManager { @Override - public long getTargetPartitionId( CatalogTable catalogTable, String columnValue ) { + public long getTargetPartitionId( LogicalTable catalogTable, String columnValue ) { long hashValue = columnValue.hashCode() * -1; // Don't want any neg. value for now diff --git a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java index a11d2c29b3..222e05a3f6 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java @@ -24,7 +24,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; import org.polypheny.db.type.PolyType; @@ -40,7 +40,7 @@ public class ListPartitionManager extends AbstractPartitionManager { @Override - public long getTargetPartitionId( CatalogTable catalogTable, String columnValue ) { + public long getTargetPartitionId( LogicalTable catalogTable, String columnValue ) { long unboundPartitionId = -1; long selectedPartitionId = -1; diff --git a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java index befe0fe209..b40ad64a31 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java @@ -26,7 +26,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; import org.polypheny.db.type.PolyType; @@ -42,7 +42,7 @@ public class RangePartitionManager extends AbstractPartitionManager { @Override - public long getTargetPartitionId( CatalogTable catalogTable, String columnValue ) { + public long getTargetPartitionId( LogicalTable catalogTable, String columnValue ) { long unboundPartitionId = -1; long selectedPartitionId = -1; diff --git a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java index e41b3556d3..e10556f704 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java @@ -23,7 +23,7 @@ import java.util.Map; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; import org.polypheny.db.partition.properties.TemperaturePartitionProperty; @@ -38,7 +38,7 @@ public class TemperatureAwarePartitionManager extends AbstractPartitionManager { @Override - public long getTargetPartitionId( CatalogTable catalogTable, String columnValue ) { + public long getTargetPartitionId( LogicalTable catalogTable, String columnValue ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( @@ -50,7 +50,7 @@ public long getTargetPartitionId( CatalogTable catalogTable, String columnValue @Override - public Map> getRelevantPlacements( CatalogTable catalogTable, List partitionIds, List excludedAdapters ) { + public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( @@ -62,7 +62,7 @@ public Map> getRelevantPlacements( CatalogTab @Override - public Map>> getAllPlacements( CatalogTable catalogTable, List partitionIds ) { + public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 7208655cd1..3adbc61fe8 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -71,8 +71,8 @@ import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; import org.polypheny.db.algebra.logical.document.LogicalDocumentModify; import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; -import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; @@ -80,7 +80,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationCode; import org.polypheny.db.information.InformationGroup; @@ -613,10 +613,10 @@ private AlgRoot indexUpdate( AlgRoot root, Statement statement, AlgDataType para @Override public AlgNode visit( AlgNode node ) { RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - if ( node instanceof LogicalModify ) { + if ( node instanceof LogicalRelModify ) { final Catalog catalog = Catalog.getInstance(); - final LogicalModify ltm = (LogicalModify) node; - final CatalogTable table = ltm.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); + final LogicalRelModify ltm = (LogicalRelModify) node; + final LogicalTable table = ltm.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); final CatalogSchema schema = catalog.getSchema( table.namespaceId ); final List indices = IndexManager.getInstance().getIndices( schema, table ); @@ -911,7 +911,7 @@ public AlgNode visit( LogicalProject project ) { } // Retrieve the catalog schema and database representations required for index lookup final CatalogSchema schema = statement.getTransaction().getDefaultSchema(); - final CatalogTable ctable = scan.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); + final LogicalTable ctable = scan.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); // Retrieve any index and use for simplification final Index idx = IndexManager.getInstance().getIndex( schema, ctable, columns ); if ( idx == null ) { @@ -958,8 +958,8 @@ private List route( AlgRoot logicalRoot, Statement statemen return routeGraph( logicalRoot, queryInformation, dmlRouter ); } else if ( logicalRoot.getModel() == ModelTrait.DOCUMENT ) { return routeDocument( logicalRoot, queryInformation, dmlRouter ); - } else if ( logicalRoot.alg instanceof LogicalModify ) { - AlgNode routedDml = dmlRouter.routeDml( (LogicalModify) logicalRoot.alg, statement ); + } else if ( logicalRoot.alg instanceof LogicalRelModify ) { + AlgNode routedDml = dmlRouter.routeDml( (LogicalRelModify) logicalRoot.alg, statement ); return Lists.newArrayList( new ProposedRoutingPlanImpl( routedDml, logicalRoot, queryInformation.getQueryClass() ) ); } else if ( logicalRoot.alg instanceof ConditionalExecute ) { AlgNode routedConditionalExecute = dmlRouter.handleConditionalExecute( logicalRoot.alg, statement, queryInformation ); @@ -1316,7 +1316,7 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< } // Get placements of this table - CatalogTable catalogTable = table.getCatalogEntity().unwrap( CatalogTable.class ); + LogicalTable catalogTable = table.getCatalogEntity().unwrap( LogicalTable.class ); if ( aggregatedPartitionValues.containsKey( scanId ) ) { if ( aggregatedPartitionValues.get( scanId ) != null ) { diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 151b6a30ea..a5997279a6 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -37,7 +37,7 @@ import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.JoinAlgType; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.common.ConditionalExecute.Condition; @@ -60,7 +60,7 @@ import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -128,7 +128,7 @@ public static AlgRoot attachOnQueryConstraints( AlgRoot root, Statement statemen public static void attachOnCommitConstraints( AlgNode node, Statement statement ) { ModifyExtractor extractor = new ModifyExtractor(); node.accept( extractor ); - Modify modify = extractor.getModify(); + RelModify modify = extractor.getModify(); if ( modify == null ) { throw new RuntimeException( "The tree did no conform, while generating the constraint enforcement query!" ); @@ -138,7 +138,7 @@ public static void attachOnCommitConstraints( AlgNode node, Statement statement } - public static List getConstraintAlg( Set catalogTables, Statement statement, EnforcementTime enforcementTime ) { + public static List getConstraintAlg( Set catalogTables, Statement statement, EnforcementTime enforcementTime ) { return catalogTables .stream() .map( t -> LogicalConstraintEnforcer.getControl( t, statement, enforcementTime ) ) @@ -194,18 +194,18 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme if ( !logicalRoot.kind.belongsTo( Kind.DML ) ) { return logicalRoot; } - if ( !(logicalRoot.alg instanceof Modify) ) { + if ( !(logicalRoot.alg instanceof RelModify) ) { return logicalRoot; } - final Modify root = (Modify) logicalRoot.alg; + final RelModify root = (RelModify) logicalRoot.alg; final Catalog catalog = Catalog.getInstance(); - final CatalogTable table; + final LogicalTable table; final CatalogPrimaryKey primaryKey; final List constraints; final List foreignKeys; final List exportedKeys; - table = root.getEntity().getCatalogEntity().unwrap( CatalogTable.class ); + table = root.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); primaryKey = catalog.getPrimaryKey( table.primaryKey ); constraints = new ArrayList<>( Catalog.getInstance().getConstraints( table.id ) ); foreignKeys = Catalog.getInstance().getForeignKeys( table.id ); @@ -484,7 +484,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme AlgNode input = root.getInput().accept( new DeepCopyShuttle() ); final List projects = new ArrayList<>( foreignKey.columnIds.size() ); final List foreignProjects = new ArrayList<>( foreignKey.columnIds.size() ); - final CatalogTable foreignTable = Catalog.getInstance().getTable( foreignKey.referencedKeyTableId ); + final LogicalTable foreignTable = Catalog.getInstance().getTable( foreignKey.referencedKeyTableId ); builder.push( input ); for ( int i = 0; i < foreignKey.columnIds.size(); ++i ) { final String columnName = foreignKey.getColumnNames().get( i ); @@ -560,7 +560,7 @@ public RexNode visitFieldAccess( RexFieldAccess fieldAccess ) { } final List projects = new ArrayList<>( foreignKey.columnIds.size() ); final List foreignProjects = new ArrayList<>( foreignKey.columnIds.size() ); - final CatalogTable foreignTable = Catalog.getInstance().getTable( foreignKey.tableId ); + final LogicalTable foreignTable = Catalog.getInstance().getTable( foreignKey.tableId ); for ( int i = 0; i < foreignKey.columnIds.size(); ++i ) { final String columnName = foreignKey.getReferencedKeyColumnNames().get( i ); final String foreignColumnName = foreignKey.getColumnNames().get( i ); @@ -655,7 +655,7 @@ public void restart( Config c ) { private boolean testConstraintsValid() { if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() || RuntimeConfig.UNIQUE_CONSTRAINT_ENFORCEMENT.getBoolean() ) { try { - List tables = Catalog + List tables = Catalog .getInstance() .getTables( null, null, null ) .stream() diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index d411474ab9..7e91705f79 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -37,7 +37,7 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.AlgStructuredTypeFlattener; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; import org.polypheny.db.algebra.logical.lpg.LogicalLpgValues; @@ -51,9 +51,9 @@ import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; @@ -79,7 +79,7 @@ public class DataMigratorImpl implements DataMigrator { @Override - public void copyGraphData( CatalogGraphDatabase target, Transaction transaction, Integer existingAdapterId, CatalogAdapter to ) { + public void copyGraphData( LogicalGraph target, Transaction transaction, Integer existingAdapterId, CatalogAdapter to ) { Statement statement = transaction.createStatement(); AlgBuilder builder = AlgBuilder.create( statement ); @@ -122,7 +122,7 @@ public void copyGraphData( CatalogGraphDatabase target, Transaction transaction, LogicalLpgValues values = getLogicalLpgValues( builder, graph ); - LogicalLpgModify modify = new LogicalLpgModify( builder.getCluster(), builder.getCluster().traitSetOf( ModelTrait.GRAPH ), target, values, Operation.INSERT, null, null ); + LogicalLpgModify modify = new LogicalLpgModify( builder.getCluster(), builder.getCluster().traitSetOf( ModelTrait.GRAPH ), target, values, Modify.Operation.INSERT, null, null ); AlgNode routedModify = RoutingManager.getInstance().getDmlRouter().routeGraphDml( modify, statement, target, List.of( to.id ) ); @@ -164,7 +164,7 @@ private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGra @Override public void copyData( Transaction transaction, CatalogAdapter store, List columns, List partitionIds ) { - CatalogTable table = Catalog.getInstance().getTable( columns.get( 0 ).tableId ); + LogicalTable table = Catalog.getInstance().getTable( columns.get( 0 ).tableId ); CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( table.primaryKey ); // Check Lists @@ -345,7 +345,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List selectSourcePlacements( CatalogTable table, List columns, int excludingAdapterId ) { + public static List selectSourcePlacements( LogicalTable table, List columns, int excludingAdapterId ) { // Find the adapter with the most column placements Catalog catalog = Catalog.getInstance(); int adapterIdWithMostPlacements = -1; @@ -520,7 +520,7 @@ public static List selectSourcePlacements( CatalogTable /** * Currently used to to transfer data if partitioned table is about to be merged. - * For Table Partitioning use {@link #copyPartitionData(Transaction, CatalogAdapter, CatalogTable, CatalogTable, List, List, List)} } instead + * For Table Partitioning use {@link #copyPartitionData(Transaction, CatalogAdapter, LogicalTable, LogicalTable, List, List, List)} } instead * * @param transaction Transactional scope * @param store Target Store where data should be migrated to @@ -531,7 +531,7 @@ public static List selectSourcePlacements( CatalogTable * @param targetPartitionIds Target Partitions where data should be inserted */ @Override - public void copySelectiveData( Transaction transaction, CatalogAdapter store, CatalogTable sourceTable, CatalogTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { + public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getPrimaryKey( sourceTable.primaryKey ); // Check Lists @@ -618,7 +618,7 @@ public void copySelectiveData( Transaction transaction, CatalogAdapter store, Ca /** * Currently used to transfer data if unpartitioned is about to be partitioned. - * For Table Merge use {@link #copySelectiveData(Transaction, CatalogAdapter, CatalogTable, CatalogTable, List, Map, List)} } instead + * For Table Merge use {@link #copySelectiveData(Transaction, CatalogAdapter, LogicalTable, LogicalTable, List, Map, List)} } instead * * @param transaction Transactional scope * @param store Target Store where data should be migrated to @@ -629,7 +629,7 @@ public void copySelectiveData( Transaction transaction, CatalogAdapter store, Ca * @param targetPartitionIds Target Partitions where data should be inserted */ @Override - public void copyPartitionData( Transaction transaction, CatalogAdapter store, CatalogTable sourceTable, CatalogTable targetTable, List columns, List sourcePartitionIds, List targetPartitionIds ) { + public void copyPartitionData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, List sourcePartitionIds, List targetPartitionIds ) { if ( sourceTable.id != targetTable.id ) { throw new RuntimeException( "Unsupported migration scenario. Table ID mismatch" ); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java b/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java index 48cb668779..3bf5c387e8 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java +++ b/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgFilter; import org.polypheny.db.algebra.logical.lpg.LogicalLpgProject; import org.polypheny.db.algebra.logical.relational.LogicalFilter; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalModifyCollect; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalValues; @@ -237,12 +237,12 @@ public AlgNode visit( AlgNode other ) { @Override - public AlgNode visit( LogicalModify initial ) { + public AlgNode visit( LogicalRelModify initial ) { if ( asymmetric ) { return visitAsymmetricModify( initial ); } - LogicalModify modify = (LogicalModify) super.visit( initial ); + LogicalRelModify modify = (LogicalRelModify) super.visit( initial ); List newSourceExpression = null; if ( modify.getSourceExpressionList() != null ) { newSourceExpression = new ArrayList<>(); @@ -288,7 +288,7 @@ public AlgNode visit( LogicalModify initial ) { input.getRowType() ); } - return new LogicalModify( + return new LogicalRelModify( modify.getCluster(), modify.getTraitSet(), modify.getEntity(), @@ -303,8 +303,8 @@ public AlgNode visit( LogicalModify initial ) { } - public AlgNode visitAsymmetricModify( LogicalModify initial ) { - LogicalModify modify = (LogicalModify) super.visit( initial ); + public AlgNode visitAsymmetricModify( LogicalRelModify initial ) { + LogicalRelModify modify = (LogicalRelModify) super.visit( initial ); List newSourceExpression = null; if ( modify.getSourceExpressionList() != null ) { newSourceExpression = new ArrayList<>(); @@ -357,7 +357,7 @@ public AlgNode visitAsymmetricModify( LogicalModify initial ) { input.getRowType() ); } - return new LogicalModify( + return new LogicalRelModify( modify.getCluster(), modify.getTraitSet(), modify.getEntity(), diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index 10c6f56a33..5fb864d657 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -28,7 +28,7 @@ import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; @@ -93,7 +93,7 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P proposedRoutingPlan.getPhysicalPlacementsOfPartitions().forEach( ( k, v ) -> { CatalogPartition catalogPartition = Catalog.getInstance().getPartition( k ); CatalogPartitionGroup catalogPartitionGroup = Catalog.getInstance().getPartitionGroup( catalogPartition.partitionGroupId ); - CatalogTable catalogTable = Catalog.getInstance().getTable( catalogPartition.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getTable( catalogPartition.tableId ); v.forEach( p -> { CatalogColumnPlacement catalogColumnPlacement = Catalog.getInstance().getColumnPlacement( p.left, p.right ); CatalogPartitionPlacement catalogPartitionPlacement = Catalog.getInstance().getPartitionPlacement( p.left, k ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 0609869fd1..49d36dacd9 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -38,13 +38,13 @@ import org.polypheny.db.algebra.logical.common.LogicalTransformer; import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.rex.RexBuilder; @@ -90,7 +90,7 @@ public abstract class AbstractDqlRouter extends BaseRouter implements Router { */ protected abstract List handleHorizontalPartitioning( AlgNode node, - CatalogTable catalogTable, + LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, @@ -99,7 +99,7 @@ protected abstract List handleHorizontalPartitioning( protected abstract List handleVerticalPartitioningOrReplication( AlgNode node, - CatalogTable catalogTable, + LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, @@ -108,7 +108,7 @@ protected abstract List handleVerticalPartitioningOrReplicatio protected abstract List handleNonePartitioning( AlgNode node, - CatalogTable catalogTable, + LogicalTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, @@ -123,7 +123,7 @@ public List route( AlgRoot logicalRoot, Statement statement, L // Reset cancel query this run this.cancelQuery = false; - if ( logicalRoot.alg instanceof LogicalModify ) { + if ( logicalRoot.alg instanceof LogicalRelModify ) { throw new IllegalStateException( "Should never happen for DML" ); } else if ( logicalRoot.alg instanceof ConditionalExecute ) { throw new IllegalStateException( "Should never happen for conditional executes" ); @@ -225,7 +225,7 @@ protected List buildSelect( AlgNode node, List> selectPlacement( CatalogTable table ) { + protected static Map> selectPlacement( LogicalTable table ) { // Find the adapter with the most column placements int adapterIdWithMostPlacements = -1; int numOfPlacements = 0; @@ -424,7 +424,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab return handleGraphOnDocument( alg, namespace, statement, placementId ); } - CatalogGraphDatabase catalogGraph = alg.getGraph(); + LogicalGraph catalogGraph = alg.getGraph(); List scans = new ArrayList<>(); @@ -438,7 +438,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab CatalogGraphPlacement graphPlacement = catalog.getGraphPlacement( catalogGraph.id, adapterId ); String name = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, catalogGraph.name, graphPlacement.physicalName ); - CatalogGraphDatabase graph = reader.getGraph( name ); + LogicalGraph graph = reader.getGraph( name ); if ( !(graph instanceof TranslatableGraph) ) { // needs substitution later on @@ -460,7 +460,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogSchema namespace, Statement statement, Integer placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = catalog.getTables( Catalog.defaultDatabaseId, new Pattern( namespace.name ), null ); + List tables = catalog.getTables( Catalog.defaultDatabaseId, new Pattern( namespace.name ), null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, selectPlacement( t ) ) ) ) .collect( Collectors.toList() ); @@ -511,7 +511,7 @@ public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement s protected PreparingEntity getSubstitutionTable( Statement statement, long tableId, long columnId, int adapterId ) { - CatalogTable nodes = Catalog.getInstance().getTable( tableId ); + LogicalTable nodes = Catalog.getInstance().getTable( tableId ); CatalogColumnPlacement placement = Catalog.getInstance().getColumnPlacement( adapterId, columnId ); List qualifiedTableName = ImmutableList.of( PolySchemaBuilder.buildAdapterSchemaName( diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index ee64f2fde7..3aeaaea795 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -28,7 +28,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.routing.LogicalQueryInformation; @@ -63,7 +63,7 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build if ( node instanceof LogicalRelScan && node.getEntity() != null ) { AlgOptEntityImpl table = (AlgOptEntityImpl) node.getEntity(); - CatalogTable catalogTable = table.getCatalogEntity().unwrap( CatalogTable.class ); + LogicalTable catalogTable = table.getCatalogEntity().unwrap( LogicalTable.class ); List partitionIds = catalogTable.partitionProperty.partitionIds; Map> placement = new HashMap<>(); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 0a6334e0dd..31202b82d9 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -33,13 +33,14 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.ModifyCollect; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.common.BatchIterator; import org.polypheny.db.algebra.core.common.ConditionalExecute; import org.polypheny.db.algebra.core.common.ConstraintEnforcer; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.document.DocumentAlg; import org.polypheny.db.algebra.core.document.DocumentProject; import org.polypheny.db.algebra.core.document.DocumentScan; @@ -66,7 +67,7 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgTransformer; import org.polypheny.db.algebra.logical.lpg.LogicalLpgValues; import org.polypheny.db.algebra.logical.relational.LogicalFilter; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalModifyCollect; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; @@ -85,11 +86,11 @@ import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; @@ -127,7 +128,7 @@ public class DmlRouterImpl extends BaseRouter implements DmlRouter { * Default implementation: Execute DML on all placements */ @Override - public AlgNode routeDml( LogicalModify modify, Statement statement ) { + public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { AlgOptCluster cluster = modify.getCluster(); if ( modify.getEntity() == null ) { @@ -137,7 +138,7 @@ public AlgNode routeDml( LogicalModify modify, Statement statement ) { AlgOptEntityImpl table = (AlgOptEntityImpl) modify.getEntity(); // Get placements of this table - CatalogTable catalogTable = table.getCatalogEntity().unwrap( CatalogTable.class ); + LogicalTable catalogTable = table.getCatalogEntity().unwrap( LogicalTable.class ); // Make sure that this table can be modified if ( !catalogTable.modifiable ) { @@ -193,7 +194,7 @@ public AlgNode routeDml( LogicalModify modify, Statement statement ) { List sourceExpressionList = modify.getSourceExpressionList(); if ( placementsOnAdapter.size() != catalogTable.fieldIds.size() ) { - if ( modify.getOperation() == Operation.UPDATE ) { + if ( modify.getOperation() == Modify.Operation.UPDATE ) { updateColumnList = new LinkedList<>( modify.getUpdateColumnList() ); sourceExpressionList = new LinkedList<>( modify.getSourceExpressionList() ); Iterator updateColumnListIterator = updateColumnList.iterator(); @@ -262,7 +263,7 @@ public AlgNode visit( LogicalFilter filter ) { String partitionValue = ""; // Set true if partitionColumn is part of UPDATE Statement, else assume worst case routing - if ( modify.getOperation() == Operation.UPDATE ) { + if ( modify.getOperation() == Modify.Operation.UPDATE ) { // In case of update always use worst case routing for now. // Since you have to identify the current partition to delete the entry and then create a new entry on the correct partitions int index = 0; @@ -333,7 +334,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { accessedPartitionList = new HashSet<>( catalogTable.partitionProperty.partitionIds ); } - } else if ( modify.getOperation() == Operation.INSERT ) { + } else if ( modify.getOperation() == Modify.Operation.INSERT ) { int i; if ( modify.getInput() instanceof LogicalValues ) { @@ -415,7 +416,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML - Modify adjustedModify = modifiableTable.toModificationAlg( + RelModify adjustedModify = modifiableTable.toModificationAlg( cluster, physical, catalogReader, @@ -437,7 +438,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { String partitionColumnName = catalog.getColumn( catalogTable.partitionProperty.partitionColumnId ).name; List fieldNames = modify.getInput().getRowType().getFieldNames(); - LogicalModify ltm = modify; + LogicalRelModify ltm = modify; LogicalProject lproject = (LogicalProject) ltm.getInput(); List fieldValues = lproject.getProjects(); @@ -506,7 +507,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML - Modify adjustedModify = modifiableTable.toModificationAlg( + RelModify adjustedModify = modifiableTable.toModificationAlg( cluster, physical, catalogReader, @@ -550,7 +551,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { } - } else if ( modify.getOperation() == Operation.DELETE ) { + } else if ( modify.getOperation() == Modify.Operation.DELETE ) { if ( whereClauseValues == null ) { worstCaseRouting = true; } else { @@ -594,7 +595,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); // Build DML - Modify adjustedModify; + RelModify adjustedModify; AlgNode input = buildDml( super.recursiveCopy( modify.getInput( 0 ) ), RoutedAlgBuilder.create( statement, cluster ), @@ -620,9 +621,8 @@ else if ( identifiedPartitionForSetValue != -1 ) { modify.isFlattened() ); } else { - adjustedModify = LogicalModify.create( + adjustedModify = LogicalRelModify.create( physical, - catalogReader, input, modify.getOperation(), updateColumnList, @@ -663,8 +663,8 @@ public AlgNode handleConditionalExecute( AlgNode node, Statement statement, Logi AlgNode action; if ( lce.getRight() instanceof LogicalConditionalExecute ) { action = handleConditionalExecute( lce.getRight(), statement, queryInformation ); - } else if ( lce.getRight() instanceof LogicalModify ) { - action = routeDml( (LogicalModify) lce.getRight(), statement ); + } else if ( lce.getRight() instanceof LogicalRelModify ) { + action = routeDml( (LogicalRelModify) lce.getRight(), statement ); } else { throw new IllegalArgumentException(); } @@ -679,9 +679,9 @@ public AlgNode handleConstraintEnforcer( AlgNode alg, Statement statement, Logic RoutedAlgBuilder builder = RoutedAlgBuilder.create( statement, alg.getCluster() ); builder = RoutingManager.getInstance().getFallbackRouter().routeFirst( constraint.getRight(), builder, statement, alg.getCluster(), queryInformation ); - if ( constraint.getLeft() instanceof Modify ) { + if ( constraint.getLeft() instanceof RelModify ) { return LogicalConstraintEnforcer.create( - routeDml( (LogicalModify) constraint.getLeft(), statement ), + routeDml( (LogicalRelModify) constraint.getLeft(), statement ), builder.build(), constraint.getExceptionClasses(), constraint.getExceptionMessages() ); @@ -701,8 +701,8 @@ public AlgNode handleConstraintEnforcer( AlgNode alg, Statement statement, Logic public AlgNode handleBatchIterator( AlgNode alg, Statement statement, LogicalQueryInformation queryInformation ) { LogicalBatchIterator iterator = (LogicalBatchIterator) alg; AlgNode input; - if ( iterator.getInput() instanceof Modify ) { - input = routeDml( (LogicalModify) iterator.getInput(), statement ); + if ( iterator.getInput() instanceof RelModify ) { + input = routeDml( (LogicalRelModify) iterator.getInput(), statement ); } else if ( iterator.getInput() instanceof ConditionalExecute ) { input = handleConditionalExecute( iterator.getInput(), statement, queryInformation ); } else if ( iterator.getInput() instanceof ConstraintEnforcer ) { @@ -762,13 +762,13 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, @Override public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement ) { - CatalogGraphDatabase catalogGraph = alg.getGraph(); + LogicalGraph catalogGraph = alg.getGraph(); return routeGraphDml( alg, statement, catalogGraph, catalogGraph.placements ); } @Override - public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, CatalogGraphDatabase catalogGraph, List placements ) { + public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ) { if ( alg.getGraph() == null ) { throw new RuntimeException( "Error while routing graph" ); } @@ -783,7 +783,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Catalog CatalogGraphPlacement graphPlacement = Catalog.getInstance().getGraphPlacement( catalogGraph.id, adapterId ); String name = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, catalogGraph.name, graphPlacement.physicalName ); - CatalogGraphDatabase graph = reader.getGraph( name ); + LogicalGraph graph = reader.getGraph( name ); if ( graph == null ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, adapterId, statement ) ); @@ -878,7 +878,7 @@ private List attachRelationalDoc( LogicalDocumentModify alg, Statement RoutedAlgBuilder builder = attachDocUpdate( alg.getInput(), statement, collectionTable, RoutedAlgBuilder.create( statement, alg.getCluster() ), queryInformation, adapterId ); RexBuilder rexBuilder = alg.getCluster().getRexBuilder(); AlgBuilder algBuilder = AlgBuilder.create( statement ); - if ( alg.operation == Operation.UPDATE ) { + if ( alg.operation == Modify.Operation.UPDATE ) { assert alg.getUpdates().size() == 1; AlgNode old = builder.build(); builder.push( @@ -896,11 +896,11 @@ private List attachRelationalDoc( LogicalDocumentModify alg, Statement rexBuilder.makeInputRef( collectionTable.getRowType().getFieldList().get( 0 ).getType(), 0 ), rexBuilder.makeDynamicParam( collectionTable.getRowType().getFieldList().get( 0 ).getType(), 0 ) ) ); - LogicalModify modify; - if ( alg.operation == Operation.UPDATE ) { - modify = (LogicalModify) getModify( collectionTable, builder.build(), statement, alg.operation, List.of( "_data_" ), List.of( rexBuilder.makeDynamicParam( alg.getCluster().getTypeFactory().createPolyType( PolyType.JSON ), 1 ) ) ); + LogicalRelModify modify; + if ( alg.operation == Modify.Operation.UPDATE ) { + modify = (LogicalRelModify) getModify( collectionTable, builder.build(), statement, alg.operation, List.of( "_data_" ), List.of( rexBuilder.makeDynamicParam( alg.getCluster().getTypeFactory().createPolyType( PolyType.JSON ), 1 ) ) ); } else { - modify = (LogicalModify) getModify( collectionTable, builder.build(), statement, alg.operation, null, null ); + modify = (LogicalRelModify) getModify( collectionTable, builder.build(), statement, alg.operation, null, null ); } modify.isStreamed( true ); @@ -967,7 +967,7 @@ private List attachRelationalDocInsert( LogicalDocumentModify alg, Stat private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Statement statement ) { - CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.getGraph().id ); + CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); PreparingEntity nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); PreparingEntity nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); @@ -1036,7 +1036,7 @@ private AlgNode attachRelationalGraphUpdate( LogicalLpgModify alg, Statement sta } } AlgRecordType updateRowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "ROWCOUNT", 0, alg.getCluster().getTypeFactory().createPolyType( PolyType.BIGINT ) ) ) ); - LogicalLpgTransformer transformer = new LogicalLpgTransformer( alg.getCluster(), alg.getTraitSet(), inputs, updateRowType, sequence, Operation.UPDATE ); + LogicalLpgTransformer transformer = new LogicalLpgTransformer( alg.getCluster(), alg.getTraitSet(), inputs, updateRowType, sequence, Modify.Operation.UPDATE ); return new LogicalStreamer( alg.getCluster(), alg.getTraitSet(), project, transformer ); } @@ -1058,7 +1058,7 @@ private AlgNode attachRelationalGraphDelete( LogicalLpgModify alg, Statement sta } } AlgRecordType updateRowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "ROWCOUNT", 0, alg.getCluster().getTypeFactory().createPolyType( PolyType.BIGINT ) ) ) ); - LogicalLpgTransformer transformer = new LogicalLpgTransformer( alg.getCluster(), alg.getTraitSet(), inputs, updateRowType, sequence, Operation.DELETE ); + LogicalLpgTransformer transformer = new LogicalLpgTransformer( alg.getCluster(), alg.getTraitSet(), inputs, updateRowType, sequence, Modify.Operation.DELETE ); return new LogicalStreamer( alg.getCluster(), alg.getTraitSet(), project, transformer ); } @@ -1079,7 +1079,7 @@ private List attachPreparedGraphNodeModifyDelete( AlgOptCluster cluster rexBuilder.makeInputRef( typeFactory.createPolyType( PolyType.VARCHAR, 36 ), 0 ), rexBuilder.makeDynamicParam( typeFactory.createPolyType( PolyType.VARCHAR, 36 ), 0 ) ) ); - inputs.add( getModify( nodesTable, algBuilder.build(), statement, Operation.DELETE, null, null ) ); + inputs.add( getModify( nodesTable, algBuilder.build(), statement, Modify.Operation.DELETE, null, null ) ); // id = ? algBuilder @@ -1089,7 +1089,7 @@ private List attachPreparedGraphNodeModifyDelete( AlgOptCluster cluster rexBuilder.makeInputRef( typeFactory.createPolyType( PolyType.VARCHAR, 36 ), 0 ), rexBuilder.makeDynamicParam( typeFactory.createPolyType( PolyType.VARCHAR, 36 ), 0 ) ) ); - inputs.add( getModify( nodePropertiesTable, algBuilder.build(), statement, Operation.DELETE, null, null ) ); + inputs.add( getModify( nodePropertiesTable, algBuilder.build(), statement, Modify.Operation.DELETE, null, null ) ); return inputs; } @@ -1111,7 +1111,7 @@ private AlgNode attachRelationalRelatedInsert( LogicalLpgModify alg, Statement s } } AlgRecordType updateRowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "ROWCOUNT", 0, alg.getCluster().getTypeFactory().createPolyType( PolyType.BIGINT ) ) ) ); - LogicalLpgTransformer transformer = new LogicalLpgTransformer( alg.getCluster(), alg.getTraitSet(), inputs, updateRowType, sequence, Operation.INSERT ); + LogicalLpgTransformer transformer = new LogicalLpgTransformer( alg.getCluster(), alg.getTraitSet(), inputs, updateRowType, sequence, Modify.Operation.INSERT ); return new LogicalStreamer( alg.getCluster(), alg.getTraitSet(), project, transformer ); } @@ -1129,7 +1129,7 @@ private List attachPreparedGraphNodeModifyInsert( AlgOptCluster cluster rexBuilder.makeDynamicParam( typeFactory.createPolyType( PolyType.VARCHAR, 255 ), 1 ) ), // label nodesTable.getRowType() ); - inputs.add( getModify( nodesTable, preparedNodes, statement, Operation.INSERT, null, null ) ); + inputs.add( getModify( nodesTable, preparedNodes, statement, Modify.Operation.INSERT, null, null ) ); LogicalProject preparedNProperties = LogicalProject.create( LogicalValues.createOneRow( cluster ), @@ -1139,7 +1139,7 @@ private List attachPreparedGraphNodeModifyInsert( AlgOptCluster cluster rexBuilder.makeDynamicParam( typeFactory.createPolyType( PolyType.VARCHAR, 255 ), 2 ) ), // value nodePropertiesTable.getRowType() ); - inputs.add( getModify( nodePropertiesTable, preparedNProperties, statement, Operation.INSERT, null, null ) ); + inputs.add( getModify( nodePropertiesTable, preparedNProperties, statement, Modify.Operation.INSERT, null, null ) ); return inputs; } @@ -1159,7 +1159,7 @@ private List attachPreparedGraphEdgeModifyDelete( AlgOptCluster cluster rexBuilder.makeInputRef( typeFactory.createPolyType( PolyType.VARCHAR, 36 ), 0 ), rexBuilder.makeDynamicParam( typeFactory.createPolyType( PolyType.VARCHAR, 36 ), 0 ) ) ); - inputs.add( getModify( edgesTable, algBuilder.build(), statement, Operation.DELETE, null, null ) ); + inputs.add( getModify( edgesTable, algBuilder.build(), statement, Modify.Operation.DELETE, null, null ) ); // id = ? algBuilder @@ -1188,7 +1188,7 @@ private List attachPreparedGraphEdgeModifyInsert( AlgOptCluster cluster rexBuilder.makeDynamicParam( typeFactory.createPolyType( PolyType.VARCHAR, 36 ), 3 ) ), // target edgesTable.getRowType() ); - inputs.add( getModify( edgesTable, preparedEdges, statement, Operation.INSERT, null, null ) ); + inputs.add( getModify( edgesTable, preparedEdges, statement, Modify.Operation.INSERT, null, null ) ); LogicalProject preparedEProperties = LogicalProject.create( LogicalValues.createOneRow( cluster ), @@ -1198,7 +1198,7 @@ private List attachPreparedGraphEdgeModifyInsert( AlgOptCluster cluster rexBuilder.makeDynamicParam( typeFactory.createPolyType( PolyType.VARCHAR, 255 ), 2 ) ), // value edgePropertiesTable.getRowType() ); - inputs.add( getModify( edgePropertiesTable, preparedEProperties, statement, Operation.INSERT, null, null ) ); + inputs.add( getModify( edgePropertiesTable, preparedEProperties, statement, Modify.Operation.INSERT, null, null ) ); return inputs; @@ -1210,7 +1210,7 @@ private AlgNode switchContext( AlgNode node ) { } - private Modify getModify( AlgOptEntity table, AlgNode input, Statement statement, Operation operation, List updateList, List sourceList ) { + private RelModify getModify( AlgOptEntity table, AlgNode input, Statement statement, Operation operation, List updateList, List sourceList ) { return table.unwrap( ModifiableEntity.class ).toModificationAlg( input.getCluster(), table, statement.getTransaction().getCatalogReader(), input, operation, updateList, sourceList, true ); } @@ -1218,7 +1218,7 @@ private Modify getModify( AlgOptEntity table, AlgNode input, Statement statement private AlgBuilder buildDml( AlgNode node, RoutedAlgBuilder builder, - CatalogTable catalogTable, + LogicalTable catalogTable, List placements, CatalogPartitionPlacement partitionPlacement, Statement statement, @@ -1341,10 +1341,10 @@ private AlgBuilder buildDml( } - private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, CatalogTable catalogTable, Statement statement, AlgOptEntityImpl table ) { - CatalogTable fromTable; + private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, LogicalTable catalogTable, Statement statement, AlgOptEntityImpl table ) { + LogicalTable fromTable; // Select from other table - fromTable = table.getCatalogEntity().unwrap( CatalogTable.class ); + fromTable = table.getCatalogEntity().unwrap( LogicalTable.class ); if ( fromTable.partitionProperty.isPartitioned ) { throw new UnsupportedOperationException( "DMLs from other partitioned tables is not supported" ); @@ -1359,7 +1359,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Catalog for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { catalog.getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); - fromTable = table.getCatalogEntity().unwrap( CatalogTable.class ); + fromTable = table.getCatalogEntity().unwrap( LogicalTable.class ); CatalogPartitionPlacement partition = catalog.getPartitionPlacement( pkPlacement.adapterId, fromTable.partitionProperty.partitionIds.get( 0 ) ); @@ -1385,7 +1385,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Catalog } - private void dmlConditionCheck( LogicalFilter node, CatalogTable catalogTable, List placements, RexNode operand ) { + private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, List placements, RexNode operand ) { if ( operand instanceof RexInputRef ) { int index = ((RexInputRef) operand).getIndex(); AlgDataTypeField field = node.getInput().getRowType().getFieldList().get( index ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index ff795eb351..e05de2b83e 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -28,7 +28,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; @@ -46,7 +46,7 @@ public class FullPlacementQueryRouter extends AbstractDqlRouter { @Override protected List handleHorizontalPartitioning( AlgNode node, - CatalogTable catalogTable, + LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, @@ -79,7 +79,7 @@ protected List handleHorizontalPartitioning( @Override protected List handleVerticalPartitioningOrReplication( AlgNode node, - CatalogTable catalogTable, + LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, @@ -93,7 +93,7 @@ protected List handleVerticalPartitioningOrReplication( @Override protected List handleNonePartitioning( AlgNode node, - CatalogTable catalogTable, + LogicalTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, @@ -125,7 +125,7 @@ protected List handleNonePartitioning( } - protected Collection>> selectPlacementHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, LogicalQueryInformation queryInformation ) { + protected Collection>> selectPlacementHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( catalogTable.partitionProperty.partitionType ); @@ -138,7 +138,7 @@ protected Collection>> selectPlacementHor } - protected Set> selectPlacement( CatalogTable catalogTable, LogicalQueryInformation queryInformation ) { + protected Set> selectPlacement( LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { // Get used columns from analyze List usedColumns = queryInformation.getAllColumnsPerTable( catalogTable.id ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index ad2c6d6883..7874f11f0c 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -28,7 +28,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.schema.LogicalEntity; @@ -41,21 +41,21 @@ public class IcarusRouter extends FullPlacementQueryRouter { @Override - protected List handleHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { this.cancelQuery = true; return Collections.emptyList(); } @Override - protected List handleVerticalPartitioningOrReplication( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleVerticalPartitioningOrReplication( AlgNode node, LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // same as no partitioning return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); } @Override - protected List handleNonePartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleNonePartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { if ( log.isDebugEnabled() ) { log.debug( "{} is NOT partitioned - Routing will be easy", catalogTable.name ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java index 3c7a357bef..17b4d5377a 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java @@ -23,7 +23,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; @@ -44,14 +44,14 @@ private SimpleRouter() { @Override - protected List handleVerticalPartitioningOrReplication( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleVerticalPartitioningOrReplication( AlgNode node, LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // Do same as without any partitioning return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); } @Override - protected List handleNonePartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleNonePartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // Get placements and convert into placement distribution final Map> placements = selectPlacement( catalogTable ); @@ -64,7 +64,7 @@ protected List handleNonePartitioning( AlgNode node, CatalogTa @Override - protected List handleHorizontalPartitioning( AlgNode node, CatalogTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { + protected List handleHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( catalogTable.partitionProperty.partitionType ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java index 453fa5eac5..4f04b7089a 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java @@ -22,7 +22,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; /** @@ -32,7 +32,7 @@ public class CreateAllPlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( CatalogColumn addedColumn ) { - CatalogTable catalogTable = Catalog.getInstance().getTable( addedColumn.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getTable( addedColumn.tableId ); return catalogTable.dataPlacements.stream() .map( elem -> AdapterManager.getInstance().getStore( elem ) ) .collect( Collectors.toList() ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index f885450392..a7c5ff21d7 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -23,14 +23,14 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; public class CreateSinglePlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( CatalogColumn addedColumn ) { - CatalogTable catalogTable = Catalog.getInstance().getTable( addedColumn.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getTable( addedColumn.tableId ); return ImmutableList.of( AdapterManager.getInstance().getStore( catalogTable.dataPlacements.get( 0 ) ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index eed1adcecc..d64ffcf958 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -33,14 +33,14 @@ import org.jetbrains.annotations.NotNull; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgVisitor; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.document.DocumentAlg; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.lpg.LpgAlg; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; @@ -255,7 +255,7 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { // FIXME: Don't rely on object type here; eventually someone is going to write a rule which transforms to // something which doesn't inherit TableModify, and this will break. Need to make this explicit in the // {@link AlgNode} interface. - if ( p instanceof Modify ) { + if ( p instanceof RelModify ) { newAccess = Mode.WRITE_ACCESS; if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { extractWriteConstraints( (LogicalEntity) table.getEntity() ); @@ -272,7 +272,7 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { relevantPartitions = accessedPartitions.get( p.getId() ); } else if ( table.getCatalogEntity() != null ) { if ( table.getCatalogEntity().namespaceType == NamespaceType.RELATIONAL ) { - relevantPartitions = table.getCatalogEntity().unwrap( CatalogTable.class ).partitionProperty.partitionIds; + relevantPartitions = table.getCatalogEntity().unwrap( LogicalTable.class ).partitionProperty.partitionIds; } else { relevantPartitions = List.of(); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java index d4a403ad38..4af7f4f851 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java @@ -42,7 +42,7 @@ import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.config.RuntimeConfig; @@ -100,7 +100,7 @@ public class TransactionImpl implements Transaction, Comparable { private final List changedTables = new ArrayList<>(); @Getter - private final Set catalogTables = new TreeSet<>(); + private final Set catalogTables = new TreeSet<>(); @Getter private final List involvedAdapters = new CopyOnWriteArrayList<>(); diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index d4fe0f0f03..6e6634209b 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -45,7 +45,7 @@ import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogMaterializedView; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -178,7 +178,7 @@ public synchronized void addMaterializedInfo( Long materializedId, MaterializedC public void addTables( Transaction transaction, List tableNames ) { if ( tableNames.size() > 1 ) { try { - CatalogTable catalogTable = Catalog.getInstance().getTable( 1, tableNames.get( 0 ), tableNames.get( 1 ) ); + LogicalTable catalogTable = Catalog.getInstance().getTable( 1, tableNames.get( 0 ), tableNames.get( 1 ) ); long id = catalogTable.id; if ( !catalogTable.getConnectedViews().isEmpty() ) { updateCandidates.put( transaction.getXid(), id ); @@ -211,11 +211,11 @@ public void updateCommittedXid( PolyXid xid ) { */ public void materializedUpdate( Long potentialInteresting ) { Catalog catalog = Catalog.getInstance(); - CatalogTable catalogTable = catalog.getTable( potentialInteresting ); + LogicalTable catalogTable = catalog.getTable( potentialInteresting ); List connectedViews = catalogTable.getConnectedViews(); for ( Long id : connectedViews ) { - CatalogTable view = catalog.getTable( id ); + LogicalTable view = catalog.getTable( id ); if ( view.entityType == EntityType.MATERIALIZED_VIEW ) { MaterializedCriteria materializedCriteria = materializedInfo.get( view.id ); if ( materializedCriteria.getCriteriaType() == CriteriaType.UPDATE ) { @@ -272,7 +272,7 @@ private void updatingIntervalMaterialized() { */ public void prepareToUpdate( Long materializedId ) { Catalog catalog = Catalog.getInstance(); - CatalogTable catalogTable = catalog.getTable( materializedId ); + LogicalTable catalogTable = catalog.getTable( materializedId ); try { Transaction transaction = getTransactionManager().startTransaction( diff --git a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java index 3db89bd821..c1e43d4e99 100644 --- a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java @@ -27,7 +27,7 @@ import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.excluded.CassandraExcluded; import org.polypheny.db.webui.models.Result; @@ -43,7 +43,7 @@ public void addCollectionTest() { execute( "CREATE DATABASE " + graphName ); - CatalogGraphDatabase graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + LogicalGraph graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); assertEquals( 1, catalog.getGraphs( graph.databaseId, new Pattern( graphName ) ).size() ); @@ -65,7 +65,7 @@ public void addPlacementTest() throws SQLException { try { execute( "CREATE DATABASE " + graphName ); - CatalogGraphDatabase graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + LogicalGraph graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); assertEquals( 1, graph.placements.size() ); @@ -95,7 +95,7 @@ public void initialPlacementTest() throws SQLException { execute( String.format( "CREATE DATABASE %s ON STORE %s", graphName, "store1" ) ); - CatalogGraphDatabase graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + LogicalGraph graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); assertEquals( 1, graph.placements.size() ); @@ -122,7 +122,7 @@ public void deletePlacementTest() throws SQLException { execute( "CREATE DATABASE " + graphName ); - CatalogGraphDatabase graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + LogicalGraph graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); assertEquals( 1, graph.placements.size() ); diff --git a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java index 3845aaba99..89892a3437 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java @@ -39,7 +39,7 @@ import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.Config; import org.polypheny.db.config.ConfigManager; import org.polypheny.db.excluded.CassandraExcluded; @@ -532,7 +532,7 @@ public void rangePartitioningTest() throws SQLException { + "( PARTITION parta VALUES(5,4), " + "PARTITION partb VALUES(10,6))" ); - CatalogTable table = Catalog.getInstance().getTables( null, null, new Pattern( "rangepartitioning3" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "rangepartitioning3" ) ).get( 0 ); List catalogPartitions = Catalog.getInstance().getPartitionsByTable( table.id ); @@ -642,7 +642,7 @@ public void partitionPlacementTest() throws SQLException { + "WITH (foo, bar, foobar, barfoo) " ); try { - CatalogTable table = Catalog.getInstance().getTables( null, null, new Pattern( "physicalpartitiontest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "physicalpartitiontest" ) ).get( 0 ); // Check if sufficient PartitionPlacements have been created // Check if initially as many partitionPlacements are created as requested @@ -703,7 +703,7 @@ public void temperaturePartitionTest() throws SQLException { + " USING FREQUENCY write INTERVAL 10 minutes WITH 20 HASH PARTITIONS" ); try { - CatalogTable table = Catalog.getInstance().getTables( null, null, new Pattern( "temperaturetest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "temperaturetest" ) ).get( 0 ); // Check if partition properties are correctly set and parsed Assert.assertEquals( 600, ((TemperaturePartitionProperty) table.partitionProperty).getFrequencyInterval() ); @@ -761,7 +761,7 @@ public void temperaturePartitionTest() throws SQLException { // This should execute two DML INSERTS on the target PartitionId and therefore redistribute the data // Verify that the partition is now in HOT and was not before - CatalogTable updatedTable = Catalog.getInstance().getTables( null, null, new Pattern( "temperaturetest" ) ).get( 0 ); + LogicalTable updatedTable = Catalog.getInstance().getTables( null, null, new Pattern( "temperaturetest" ) ).get( 0 ); // Manually get the target partitionID of query PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); @@ -1176,7 +1176,7 @@ public void dataPlacementTest() throws SQLException { + "WITH (foo, bar, foobar, barfoo) " ); try { - CatalogTable table = Catalog.getInstance().getTables( null, null, new Pattern( "horizontaldataplacementtest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "horizontaldataplacementtest" ) ).get( 0 ); // Check if sufficient PartitionPlacements have been created // Check if initially as many DataPlacements are created as requested diff --git a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java index 8e344db5f7..cc39db093b 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java @@ -33,7 +33,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.excluded.CassandraExcluded; @@ -167,7 +167,7 @@ public void dataPlacementTest() throws SQLException { + "PRIMARY KEY (tprimary) )" ); try { - CatalogTable table = Catalog.getInstance().getTables( null, null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); // Check if initially as many DataPlacements are created as requested (one for each store) Assert.assertEquals( 1, table.dataPlacements.size() ); @@ -310,7 +310,7 @@ public void dataDistributionTest() throws SQLException { + "PRIMARY KEY (tprimary) )" ); try { - CatalogTable table = Catalog.getInstance().getTables( null, null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); CatalogDataPlacement dataPlacement = Catalog.getInstance().getDataPlacement( table.dataPlacements.get( 0 ), table.id ); diff --git a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java index fed1fd844d..ae4266cbde 100644 --- a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java +++ b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java @@ -32,7 +32,7 @@ import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -254,8 +254,8 @@ public void testSimpleRowCount() throws SQLException { ); waiter.await( 20, TimeUnit.SECONDS ); try { - CatalogTable catalogTableNation = Catalog.getInstance().getTable( "APP", "statisticschema", "nation" ); - CatalogTable catalogTableRegion = Catalog.getInstance().getTable( "APP", "statisticschema", "region" ); + LogicalTable catalogTableNation = Catalog.getInstance().getTable( "APP", "statisticschema", "nation" ); + LogicalTable catalogTableRegion = Catalog.getInstance().getTable( "APP", "statisticschema", "region" ); Integer rowCountNation = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); Integer rowCountRegion = StatisticsManager.getInstance().rowCountPerTable( catalogTableRegion.id ); @@ -314,7 +314,7 @@ private void assertStatisticsConvertTo( int maxSeconds, int target ) { continue; } inCatalog = true; - CatalogTable catalogTableNation = Catalog.getInstance().getTable( "APP", "statisticschema", "nationdelete" ); + LogicalTable catalogTableNation = Catalog.getInstance().getTable( "APP", "statisticschema", "nationdelete" ); Integer rowCount = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); // potentially table exists not yet in statistics but in catalog if ( rowCount != null && rowCount == target ) { diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 6b4d9e974e..5853a13bc9 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -31,7 +31,7 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; @@ -93,8 +93,8 @@ public List> getSchemaTree() { List schemas = catalog.getSchemas( databaseId, null ); for ( CatalogSchema schema : schemas ) { List tables = new ArrayList<>(); - List childTables = catalog.getTables( schema.id, null ); - for ( CatalogTable childTable : childTables ) { + List childTables = catalog.getTables( schema.id, null ); + for ( LogicalTable childTable : childTables ) { List table = new ArrayList<>(); List childColumns = catalog.getColumns( childTable.id ); for ( CatalogColumn catalogColumn : childColumns ) { @@ -142,15 +142,15 @@ public List getAllColumns() { * * @return all the tables ids */ - public List getAllTable() { + public List getAllTable() { Catalog catalog = Catalog.getInstance(); - List catalogEntities = catalog.getTables( + List catalogEntities = catalog.getTables( null, null, null ); - List allTables = new ArrayList<>(); + List allTables = new ArrayList<>(); - for ( CatalogTable catalogTable : catalogEntities ) { + for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType != EntityType.VIEW ) { allTables.add( catalogTable ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index d2383d31a4..20841637a2 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -25,7 +25,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; /** @@ -76,7 +76,7 @@ public StatisticTable( Long tableId ) { Catalog catalog = Catalog.getInstance(); if ( catalog.checkIfExistsEntity( tableId ) ) { - CatalogTable catalogTable = catalog.getTable( tableId ); + LogicalTable catalogTable = catalog.getTable( tableId ); this.table = catalogTable.name; this.namespaceType = catalogTable.getNamespaceType(); this.dataPlacements = catalogTable.dataPlacements; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 22f517d23b..dd02e7380f 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -44,7 +44,7 @@ import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.AggregateCall; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalProject; @@ -56,7 +56,7 @@ import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -159,7 +159,7 @@ public void updateColumnName( CatalogColumn catalogColumn, String newName ) { @Override - public void updateTableName( CatalogTable catalogTable, String newName ) { + public void updateTableName( LogicalTable catalogTable, String newName ) { if ( statisticSchemaMap.containsKey( catalogTable.namespaceId ) && statisticSchemaMap.get( catalogTable.namespaceId ).containsKey( catalogTable.id ) ) { Map> columnsInformation = statisticSchemaMap.get( catalogTable.namespaceId ).get( catalogTable.id ); for ( Entry> columnInfo : columnsInformation.entrySet() ) { @@ -573,7 +573,7 @@ private LogicalRelScan getLogicalScan( String schema, String table, CatalogReade /** * Queries the database with an aggregate query, to get the min value or max value. */ - private AlgNode getAggregateColumn( QueryResult queryResult, NodeType nodeType, Scan tableScan, RexBuilder rexBuilder, AlgOptCluster cluster ) { + private AlgNode getAggregateColumn( QueryResult queryResult, NodeType nodeType, RelScan tableScan, RexBuilder rexBuilder, AlgOptCluster cluster ) { for ( int i = 0; i < tableScan.getRowType().getFieldNames().size(); i++ ) { if ( queryResult.getColumn() != null && tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn() ) ) { LogicalProject logicalProject = LogicalProject.create( @@ -623,7 +623,7 @@ private AlgNode getAggregateColumn( QueryResult queryResult, NodeType nodeType, } - private AlgNode getUniqueValues( QueryResult queryResult, Scan tableScan, RexBuilder rexBuilder ) { + private AlgNode getUniqueValues( QueryResult queryResult, RelScan tableScan, RexBuilder rexBuilder ) { for ( int i = 0; i < tableScan.getRowType().getFieldNames().size(); i++ ) { if ( queryResult.getColumn() != null && tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn() ) ) { LogicalProject logicalProject = LogicalProject.create( @@ -652,7 +652,7 @@ private AlgNode getUniqueValues( QueryResult queryResult, Scan tableScan, RexBui /** * Gets the amount of entries for a column */ - private AlgNode getColumnCount( QueryResult queryResult, Scan tableScan, RexBuilder rexBuilder, AlgOptCluster cluster ) { + private AlgNode getColumnCount( QueryResult queryResult, RelScan tableScan, RexBuilder rexBuilder, AlgOptCluster cluster ) { for ( int i = 0; i < tableScan.getRowType().getFieldNames().size(); i++ ) { if ( queryResult.getColumn() != null && tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn() ) ) { LogicalProject logicalProject = LogicalProject.create( @@ -676,7 +676,7 @@ private AlgNode getColumnCount( QueryResult queryResult, Scan tableScan, RexBuil /** * Gets the amount of entries for a table. */ - private AlgNode getTableCount( Scan tableScan, AlgOptCluster cluster ) { + private AlgNode getTableCount( RelScan tableScan, AlgOptCluster cluster ) { AggregateCall aggregateCall = getRowCountAggregateCall( cluster ); return LogicalAggregate.create( tableScan, @@ -909,7 +909,7 @@ private void handleDrop( long tableId, Map> changedValues, lo private void handleTruncate( long tableId, long schemaId, Catalog catalog ) { - CatalogTable catalogTable = catalog.getTable( tableId ); + LogicalTable catalogTable = catalog.getTable( tableId ); for ( int i = 0; i < catalogTable.fieldIds.size(); i++ ) { PolyType polyType = catalog.getColumn( catalogTable.fieldIds.get( i ) ).type; QueryResult queryResult = new QueryResult( schemaId, catalogTable.id, catalogTable.fieldIds.get( i ), polyType ); @@ -937,7 +937,7 @@ private > StatisticColumn createNewStatisticColumns( private void handleInsert( long tableId, Map> changedValues, long schemaId, Catalog catalog ) { - CatalogTable catalogTable = catalog.getTable( tableId ); + LogicalTable catalogTable = catalog.getTable( tableId ); List columns = catalogTable.fieldIds; if ( this.statisticSchemaMap.get( schemaId ) != null ) { if ( this.statisticSchemaMap.get( schemaId ).get( tableId ) != null ) { @@ -962,7 +962,7 @@ private void handleInsert( long tableId, Map> changedValues, /** * Creates new StatisticColumns and inserts the values. */ - private void addInserts( Map> changedValues, Catalog catalog, CatalogTable catalogTable, List columns ) { + private void addInserts( Map> changedValues, Catalog catalog, LogicalTable catalogTable, List columns ) { for ( int i = 0; i < columns.size(); i++ ) { PolyType polyType = catalog.getColumn( columns.get( i ) ).type; QueryResult queryResult = new QueryResult( catalogTable.namespaceId, catalogTable.id, columns.get( i ), polyType ); diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 673f39d8af..a9107aebff 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -87,8 +87,8 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey.CatalogPrimaryKeyColumn.PrimitiveCatalogPrimaryKeyColumn; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogSchema.PrimitiveCatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.catalog.entity.CatalogTable.PrimitiveCatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable.PrimitiveCatalogTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -269,7 +269,7 @@ public MetaResultSet getTables( final ConnectionHandle ch, final String database log.trace( "getTables( ConnectionHandle {}, String {}, Pat {}, Pat {}, List {} )", ch, database, schemaPattern, tablePattern, typeList ); } - final List tables = catalog.getTables( + final List tables = catalog.getTables( database == null ? null : new Pattern( database ), (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ), (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ) @@ -517,9 +517,9 @@ public MetaResultSet getPrimaryKeys( final ConnectionHandle ch, final String dat final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); + final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); List primaryKeyColumns = new LinkedList<>(); - for ( CatalogTable catalogTable : catalogEntities ) { + for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.primaryKey != null ) { final CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); primaryKeyColumns.addAll( primaryKey.getCatalogPrimaryKeyColumns() ); @@ -554,9 +554,9 @@ public MetaResultSet getImportedKeys( final ConnectionHandle ch, final String da final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); + final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); - for ( CatalogTable catalogTable : catalogEntities ) { + for ( LogicalTable catalogTable : catalogEntities ) { List importedKeys = catalog.getForeignKeys( catalogTable.id ); importedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } @@ -597,9 +597,9 @@ public MetaResultSet getExportedKeys( final ConnectionHandle ch, final String da final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); + final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); - for ( CatalogTable catalogTable : catalogEntities ) { + for ( LogicalTable catalogTable : catalogEntities ) { List exportedKeys = catalog.getExportedKeys( catalogTable.id ); exportedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } @@ -714,9 +714,9 @@ public MetaResultSet getIndexInfo( final ConnectionHandle ch, final String datab final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); + final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); List catalogIndexColumns = new LinkedList<>(); - for ( CatalogTable catalogTable : catalogEntities ) { + for ( LogicalTable catalogTable : catalogEntities ) { List catalogIndexInfos = catalog.getIndexes( catalogTable.id, unique ); catalogIndexInfos.forEach( info -> catalogIndexColumns.addAll( info.getCatalogIndexColumns() ) ); } diff --git a/plugins/cassandra-adapter/.gitignore b/plugins/cassandra-adapter/.gitignore deleted file mode 100644 index 093078b931..0000000000 --- a/plugins/cassandra-adapter/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -# Created by .ignore support plugin (hsz.mobi) -/target/ -/.toDelete diff --git a/plugins/cassandra-adapter/build.gradle b/plugins/cassandra-adapter/build.gradle deleted file mode 100644 index 76e69ab749..0000000000 --- a/plugins/cassandra-adapter/build.gradle +++ /dev/null @@ -1,94 +0,0 @@ -group "org.polypheny" - - -dependencies { - compileOnly project(":core") - compileOnly project(":plugins:sql-language") - - implementation group: "com.datastax.oss", name: "java-driver-core", version: cassandra_driver_core_version // Apache 2.0 - implementation group: "com.datastax.oss", name: "java-driver-query-builder", version: cassandra_driver_query_builder_version // Apache 2.0 - implementation group: "com.datastax.oss", name: "java-driver-mapper-runtime", version: cassandra_driver_query_builder_version // Apache 2.0 - - // Embedded Cassandra - implementation group: 'com.github.nosan', name: 'embedded-cassandra', version: cassandra_embedded_version // Apache 2.0 - - - // --- Test Compile --- - testImplementation project(path: ':dbms', configuration: 'test') - testImplementation project(path: ':core', configuration: 'tests') - testImplementation project(path: ':core') - - testImplementation group: "junit", name: "junit", version: junit_version - -} - - -sourceSets { - main { - java { - srcDirs = ["src/main/java"] - outputDir = file(project.buildDir.absolutePath + "/classes") - } - resources { - srcDirs = ["src/main/resources"] - } - output.resourcesDir = file(project.buildDir.absolutePath + "/classes") - } - test { - java { - srcDirs = ["src/test/java"] - outputDir = file(project.buildDir.absolutePath + "/test-classes") - } - resources { - srcDirs = ["src/test/resources"] - } - output.resourcesDir = file(project.buildDir.absolutePath + "/test-classes") - } -} - -compileJava { - dependsOn(":core:processResources") - dependsOn(":plugins:sql-language:processResources") -} - -delombok { - dependsOn(":plugins:sql-language:processResources") -} - -/** - * JARs - */ -jar { - manifest { - attributes "Manifest-Version": "1.0" - attributes "Copyright": "The Polypheny Project (polypheny.org)" - attributes "Version": "$project.version" - } -} -java { - withJavadocJar() - withSourcesJar() -} - -licensee { - allowUrl('http://www.opensource.org/licenses/Apache-2.0') // Apache 2.0 - allowUrl('http://www.jcabi.com/LICENSE.txt') // Own licensee but conforms - - allowDependency('org.ow2.asm', 'asm-util', '7.1') { because 'removed on release branches' } - allowDependency('com.github.spotbugs', 'spotbugs-annotations', '3.1.12') { because 'removed on release branches' } - - allowDependency('org.ow2.asm', 'asm', '7.1') { because 'BSD 3-Clause' } - allowDependency('org.ow2.asm', 'asm-analysis', '7.1') { because 'BSD 3-Clause' } - allowDependency('org.ow2.asm', 'asm-commons', '7.1') { because 'BSD 3-Clause' } - allowDependency('org.ow2.asm', 'asm-tree', '7.1') { because 'BSD 3-Clause' } - allowDependency('com.github.jnr', 'jnr-posix', '3.0.50') { because 'removed on release branches' } - - allow('Apache-2.0') - allow('MIT') - allow('BSD-2-Clause') - allow('CC0-1.0') - allow('JSON') // extremely free - -} - - diff --git a/plugins/cassandra-adapter/gradle.properties b/plugins/cassandra-adapter/gradle.properties deleted file mode 100644 index 07c500681e..0000000000 --- a/plugins/cassandra-adapter/gradle.properties +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019-2023 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -pluginVersion = 0.0.1 - -pluginId = cassandra-adapter -pluginClass = org.polypheny.db.adapter.cassandra.CassandraPlugin -pluginProvider = The Polypheny Project -pluginDependencies = -pluginUrlPath = -pluginCategories = store -pluginPolyDependencies = -pluginIsSystemComponent = false -pluginIsUiVisible = true \ No newline at end of file diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraAlg.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraAlg.java deleted file mode 100644 index edbf6a0263..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraAlg.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; -import com.datastax.oss.driver.api.querybuilder.relation.Relation; -import com.datastax.oss.driver.api.querybuilder.select.Selector; -import com.datastax.oss.driver.api.querybuilder.term.Term; -import com.datastax.oss.driver.api.querybuilder.update.Assignment; -import org.polypheny.db.algebra.AlgCollation; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptTable; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - - -/** - * Relational expression that uses Cassandra calling convention. - */ -public interface CassandraAlg extends AlgNode { - - void implement( CassandraImplementContext context ); - - - /** - * Context to convert a tree of {@link CassandraAlg} nodes into a CQL query. - */ - class CassandraImplementContext { - // TODO JS: Find a better name for this class - - Type type = null; - - final List selectFields = new ArrayList<>(); - final List whereClause = new ArrayList<>(); - int offset = 0; - int fetch = -1; - final Map order = new LinkedHashMap<>(); - - final List> insertValues = new ArrayList<>(); - boolean ifNotExists = false; - - final List setAssignments = new ArrayList<>(); - - AlgOptTable table; - CassandraTable cassandraTable; - - AlgCollation filterCollation = null; - - - public enum Type { - SELECT, - INSERT, - UPDATE, - DELETE - } - - - public void addWhereRelations( List relations ) { - if ( relations != null ) { - whereClause.addAll( relations ); - } - } - - - public void addInsertValues( List> additionalValues ) { - this.insertValues.addAll( additionalValues ); - } - - - public void addSelectColumns( List selectFields ) { - this.selectFields.addAll( selectFields ); - } - - - public void addAssignments( List assignments ) { - this.setAssignments.addAll( assignments ); - } - - - public void addOrder( Map newOrder ) { - order.putAll( newOrder ); - } - - - public void visitChild( int ordinal, AlgNode input ) { - assert ordinal == 0; - ((CassandraAlg) input).implement( this ); - } - - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraConvention.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraConvention.java deleted file mode 100644 index b3fab8c864..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraConvention.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.core.type.UserDefinedType; -import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.adapter.cassandra.rules.CassandraRules; -import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.Convention; - - -public class CassandraConvention extends Convention.Impl { - - public static final double COST_MULTIPLIER = 0.8d; - - public final Expression expression; - public final CassandraPhysicalNameProvider physicalNameProvider; - public final UserDefinedType arrayContainerUdt; - - - public CassandraConvention( String name, Expression expression, CassandraPhysicalNameProvider physicalNameProvider, UserDefinedType arrayContainerUdt ) { - super( "CASSANDRA." + name, CassandraAlg.class ); - this.expression = expression; - this.physicalNameProvider = physicalNameProvider; - this.arrayContainerUdt = arrayContainerUdt; - } - - - @Override - public void register( AlgOptPlanner planner ) { - for ( AlgOptRule rule : CassandraRules.rules( this ) ) { - planner.addRule( rule ); - } - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraEnumerable.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraEnumerable.java deleted file mode 100644 index 00ed4108c8..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraEnumerable.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.cql.ResultSet; -import lombok.extern.slf4j.Slf4j; -import org.apache.calcite.linq4j.AbstractEnumerable; -import org.apache.calcite.linq4j.Enumerator; -import org.apache.calcite.linq4j.Linq4j; - - -@Slf4j -public class CassandraEnumerable extends AbstractEnumerable { - - final CqlSession session; - final String stringStatement; - final Integer offset; - - - public CassandraEnumerable( CqlSession session, String statement, Integer offset ) { - this.session = session; - this.stringStatement = statement; - this.offset = offset; - } - - - public CassandraEnumerable( CqlSession session, String statement ) { - this( session, statement, 0 ); - } - - - public static CassandraEnumerable of( CqlSession session, String statement ) { - return CassandraEnumerable.of( session, statement, 0 ); - } - - - public static CassandraEnumerable of( CqlSession session, String statement, Integer offset ) { - log.debug( "Creating string enumerable with: {}, offset: {}", statement, offset ); - return new CassandraEnumerable( session, statement ); - } - - - @Override - public Enumerator enumerator() { - - final ResultSet results = session.execute( this.stringStatement ); - // Skip results until we get to the right offset - if ( results.getColumnDefinitions().size() == 0 ) { - return Linq4j.singletonEnumerator( (Object) 0 ); - } - int skip = 0; - Enumerator enumerator = new CassandraEnumerator( results ); - while ( skip < offset && enumerator.moveNext() ) { - skip++; - } - return enumerator; - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraEnumerator.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraEnumerator.java deleted file mode 100644 index aa2410b848..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraEnumerator.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; -import com.datastax.oss.driver.api.core.cql.ResultSet; -import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.UserDefinedType; -import org.apache.calcite.linq4j.Enumerator; -import org.polypheny.db.adapter.cassandra.util.CassandraTypesUtils; - -import java.util.Iterator; - - -/** - * Enumerator that reads from a Cassandra column family. - */ -class CassandraEnumerator implements Enumerator { - - private final Iterator iterator; - private final ColumnDefinitions columnDefinitions; - private Row current; - - - /** - * Creates a CassandraEnumerator. - * - * @param results Cassandra result set ({@link com.datastax.oss.driver.api.core.cql.ResultSet}) - */ - CassandraEnumerator( ResultSet results ) { - this.iterator = results.iterator(); - this.current = null; - this.columnDefinitions = results.getColumnDefinitions(); - } - - - /** - * Produce the next row from the results - * - * @return A new row from the results - */ - @Override - public Object current() { - if ( columnDefinitions.size() == 0 ) { - return 0; - } else if ( columnDefinitions.size() == 1 ) { - // If we just have one field, produce it directly - return currentRowField( 0 ); - } else { - // Build an array with all fields in this row - Object[] row = new Object[columnDefinitions.size()]; - for ( int i = 0; i < columnDefinitions.size(); i++ ) { - row[i] = currentRowField( i ); - } - return row; - } - } - - - /** - * Get a field for the current row from the underlying object. - * - * @param index Index of the field within the Row object - */ - private Object currentRowField( int index ) { - DataType type = this.columnDefinitions.get( index ).getType(); - - if ( type instanceof UserDefinedType ) { - return CassandraTypesUtils.unparseArrayContainerUdt( current.getUdtValue( index ) ); - } - - if ( type == DataTypes.ASCII || type == DataTypes.TEXT ) { - return current.getString( index ); - } else if ( type == DataTypes.INT || type == DataTypes.VARINT ) { - return current.getInt( index ); - } else if ( type == DataTypes.BIGINT ) { - return current.getLong( index ); - } else if ( type == DataTypes.DOUBLE ) { - return current.getDouble( index ); - } else if ( type == DataTypes.FLOAT ) { - return current.getFloat( index ); - } else if ( type == DataTypes.UUID || type == DataTypes.TIMEUUID ) { - return current.getUuid( index ).toString(); - } else if ( type == DataTypes.DATE ) { - return (int) current.getLocalDate( index ).toEpochDay(); - } else if ( type == DataTypes.TIME ) { - // Time is represented in Polypheny-DB as an integer counting the number of milliseconds since the start of the day. - return ((int) current.getLocalTime( index ).toNanoOfDay()) / 1000000; - } else if ( type == DataTypes.TIMESTAMP ) { - // Timestamp is represented in Polypheny-DB as a long counting the number of milliseconds since 1970-01-01T00:00:00+0000 - return current.getInstant( index ).getEpochSecond() * 1000L + current.getInstant( index ).getNano() / 1000000L; - } else if ( type == DataTypes.BOOLEAN ) { - return current.getBoolean( index ); - } else { - return null; - } - } - - - @Override - public boolean moveNext() { - if ( iterator.hasNext() ) { - current = iterator.next(); - return true; - } else { - return false; - } - } - - - @Override - public void reset() { - throw new UnsupportedOperationException(); - } - - - @Override - public void close() { - // Nothing to do here - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraFilter.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraFilter.java deleted file mode 100644 index aba4653f58..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraFilter.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - -import com.datastax.oss.driver.api.querybuilder.QueryBuilder; -import com.datastax.oss.driver.api.querybuilder.relation.ColumnRelationBuilder; -import com.datastax.oss.driver.api.querybuilder.relation.Relation; -import com.datastax.oss.driver.api.querybuilder.term.Term; -import org.polypheny.db.adapter.cassandra.rules.CassandraRules; -import org.polypheny.db.algebra.AlgCollation; -import org.polypheny.db.algebra.AlgCollations; -import org.polypheny.db.algebra.AlgFieldCollation; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Filter; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.plan.*; -import org.polypheny.db.prepare.JavaTypeFactoryImpl; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexInputRef; -import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.util.Pair; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - - -/** - * Implementation of a {@link Filter} - * relational expression in Cassandra. - */ -public class CassandraFilter extends Filter implements CassandraAlg { - - private Boolean singlePartition; - private AlgCollation implicitCollation; - - - public CassandraFilter( - AlgOptCluster cluster, - AlgTraitSet traitSet, - AlgNode child, - RexNode condition, - List partitionKeys, - List clusteringKeys, - List implicitFieldCollations ) { - super( cluster, traitSet.replace( ModelTrait.RELATIONAL ), child, condition ); - - this.singlePartition = false; -// List clusteringKeys1 = new ArrayList<>( clusteringKeys ); - -// Translator translator = new Translator( getRowType(), partitionKeys, clusteringKeys, implicitFieldCollations ); -// this.match = translator.translateMatch( condition ); - // Testing if this is really needed... -// this.singlePartition = translator.isSinglePartition(); -// this.implicitCollation = translator.getImplicitCollation(); - - // TODO JS: Check this -// assert getConvention() == CONVENTION; -// assert getConvention() == child.getConvention(); - } - - - public CassandraFilter( AlgOptCluster cluster, AlgTraitSet traitSet, AlgNode convert, RexNode condition ) { - this( cluster, traitSet, convert, condition, new ArrayList<>(), new ArrayList<>(), new ArrayList<>() ); - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - return super.computeSelfCost( planner, mq ).multiplyBy( CassandraConvention.COST_MULTIPLIER ); - } - - - @Override - public CassandraFilter copy( AlgTraitSet traitSet, AlgNode input, RexNode condition ) { - return new CassandraFilter( getCluster(), traitSet, input, condition ); - } - - - @Override - public void implement( CassandraImplementContext context ) { - context.visitChild( 0, getInput() ); - - context.filterCollation = this.getImplicitCollation(); - - final Pair, List> keyFields = context.cassandraTable.getPhysicalKeyFields(); - - Translator translator = new Translator( context.cassandraTable.getRowType( new JavaTypeFactoryImpl() ), keyFields.left, keyFields.right, context.cassandraTable.getClusteringOrder() ); - - List match = translator.translateMatch( condition ); - - context.addWhereRelations( match ); - } - - - /** - * Check if the filter restricts to a single partition. - * - * @return True if the filter will restrict the underlying to a single partition - */ - public boolean isSinglePartition() { - return singlePartition; - } - - - /** - * Get the resulting collation by the clustering keys after filtering. - * - * @return The implicit collation based on the natural sorting by clustering keys - */ - public AlgCollation getImplicitCollation() { - return implicitCollation; - } - - - /** - * Translates {@link RexNode} expressions into Cassandra expression strings. - */ - static class Translator { - - private final AlgDataType rowType; - private final List fieldNames; - private final Set partitionKeys; - private final List clusteringKeys; - private int restrictedClusteringKeys; - private final List implicitFieldCollations; - - - Translator( AlgDataType rowType, List partitionKeys, List clusteringKeys, List implicitFieldCollations ) { - this.rowType = rowType; - this.fieldNames = CassandraRules.cassandraPhysicalFieldNames( rowType ); - this.partitionKeys = new HashSet<>( partitionKeys ); - this.clusteringKeys = clusteringKeys; - this.restrictedClusteringKeys = 0; - this.implicitFieldCollations = implicitFieldCollations; - } - - - /** - * Check if the query spans only one partition. - * - * @return True if the matches translated so far have resulted in a single partition - */ - public boolean isSinglePartition() { - return partitionKeys.isEmpty(); - } - - - /** - * Infer the implicit correlation from the unrestricted clustering keys. - * - * @return The collation of the filtered results - */ - public AlgCollation getImplicitCollation() { - // No collation applies if we aren't restricted to a single partition - if ( !isSinglePartition() ) { - return AlgCollations.EMPTY; - } - - // Pull out the correct fields along with their original collations - List fieldCollations = new ArrayList<>(); - for ( int i = restrictedClusteringKeys; i < clusteringKeys.size(); i++ ) { - int fieldIndex = fieldNames.indexOf( clusteringKeys.get( i ) ); - AlgFieldCollation.Direction direction = implicitFieldCollations.get( i ).getDirection(); - fieldCollations.add( new AlgFieldCollation( fieldIndex, direction ) ); - } - - return AlgCollations.of( fieldCollations ); - } - - - /** - * Produce the CQL predicate string for the given condition. - * - * @param condition Condition to translate - * @return CQL predicate string - */ - private List translateMatch( RexNode condition ) { - // CQL does not support disjunctions - List disjunctions = AlgOptUtil.disjunctions( condition ); - if ( disjunctions.size() == 1 ) { - return translateAnd( disjunctions.get( 0 ) ); - } else { - throw new AssertionError( "cannot translate " + condition ); - } - } - - - /** - * Convert the value of a literal to a string. - * - * @param literal Literal to translate - * @return String representation of the literal - */ - private static Object literalValue( RexLiteral literal ) { - Object value = CassandraValues.literalValue( literal ); - return value; - } - - - /** - * Translate a conjunctive predicate to a CQL string. - * - * @param condition A conjunctive predicate - * @return CQL string for the predicate - */ - private List translateAnd( RexNode condition ) { - List predicates = new ArrayList<>(); - for ( RexNode node : AlgOptUtil.conjunctions( condition ) ) { - predicates.add( translateMatch2( node ) ); - } - - return predicates; - } - - - /** - * Translate a binary relation. - */ - private Relation translateMatch2( RexNode node ) { - // We currently only use equality, but inequalities on clustering keys should be possible in the future - switch ( node.getKind() ) { - case EQUALS: - return translateBinary( Kind.EQUALS, Kind.EQUALS, (RexCall) node ); - case LESS_THAN: - return translateBinary( Kind.LESS_THAN, Kind.GREATER_THAN, (RexCall) node ); - case LESS_THAN_OR_EQUAL: - return translateBinary( Kind.LESS_THAN_OR_EQUAL, Kind.GREATER_THAN_OR_EQUAL, (RexCall) node ); - case GREATER_THAN: - return translateBinary( Kind.GREATER_THAN, Kind.LESS_THAN, (RexCall) node ); - case GREATER_THAN_OR_EQUAL: - return translateBinary( Kind.GREATER_THAN_OR_EQUAL, Kind.LESS_THAN_OR_EQUAL, (RexCall) node ); - default: - throw new AssertionError( "cannot translate " + node ); - } - } - - - /** - * Translates a call to a binary operator, reversing arguments if necessary. - */ - private Relation translateBinary( Kind op, Kind rop, RexCall call ) { - final RexNode left = call.operands.get( 0 ); - final RexNode right = call.operands.get( 1 ); - Relation expression = translateBinary2( op, left, right ); - if ( expression != null ) { - return expression; - } - expression = translateBinary2( rop, right, left ); - if ( expression != null ) { - return expression; - } - throw new AssertionError( "cannot translate op " + op + " call " + call ); - } - - - /** - * Translates a call to a binary operator. Returns null on failure. - */ - private Relation translateBinary2( Kind op, RexNode left, RexNode right ) { - switch ( right.getKind() ) { - case LITERAL: - break; - default: - return null; - } - final RexLiteral rightLiteral = (RexLiteral) right; - switch ( left.getKind() ) { - case INPUT_REF: - final RexInputRef left1 = (RexInputRef) left; - String name = fieldNames.get( left1.getIndex() ); - return translateOp2( op, name, rightLiteral ); - case CAST: - // FIXME This will not work in all cases (for example, we ignore string encoding) - return translateBinary2( op, ((RexCall) left).operands.get( 0 ), right ); - default: - return null; - } - } - - - /** - * Combines a field name, operator, and literal to produce a predicate string. - */ - private Relation translateOp2( Kind op, String name, RexLiteral right ) { - // In case this is a key, record that it is now restricted - if ( op.equals( "=" ) ) { - partitionKeys.remove( name ); - if ( clusteringKeys.contains( name ) ) { - restrictedClusteringKeys++; - } - } - - Object value = literalValue( right ); - String valueString = value.toString(); - if ( value instanceof String ) { - PolyType typeName = rowType.getField( name, true, false ).getType().getPolyType(); - if ( typeName != PolyType.CHAR ) { - valueString = "'" + valueString + "'"; - } - } - - ColumnRelationBuilder alg = Relation.column( name ); - Term term = QueryBuilder.literal( value ); - switch ( op ) { - case EQUALS: - return alg.isEqualTo( term ); - case LESS_THAN: - return alg.isLessThan( term ); - case LESS_THAN_OR_EQUAL: - return alg.isLessThanOrEqualTo( term ); - case GREATER_THAN: - return alg.isGreaterThan( term ); - case GREATER_THAN_OR_EQUAL: - return alg.isLessThanOrEqualTo( term ); - default: - throw new AssertionError( "cannot translate op " + op + " name " + name + " valuestring " + valueString ); - } - } - - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraLimit.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraLimit.java deleted file mode 100644 index ebdd599293..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraLimit.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.AlgWriter; -import org.polypheny.db.algebra.SingleAlg; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.rex.RexNode; - -import java.util.List; - - -/** - * Implementation of limits in Cassandra. - */ -public class CassandraLimit extends SingleAlg implements CassandraAlg { - - public final RexNode offset; - public final RexNode fetch; - - - public CassandraLimit( AlgOptCluster cluster, AlgTraitSet traitSet, AlgNode input, RexNode offset, RexNode fetch ) { - super( cluster, traitSet, input ); - this.offset = offset; - this.fetch = fetch; - assert getConvention() == input.getConvention(); - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - // We do this so we get the limit for free - return planner.getCostFactory().makeZeroCost(); - } - - - @Override - public CassandraLimit copy( AlgTraitSet traitSet, List newInputs ) { - return new CassandraLimit( getCluster(), traitSet, sole( newInputs ), offset, fetch ); - } - - - @Override - public String algCompareString() { - return this.getClass().getSimpleName() + "$" + - input.algCompareString() + "$" + - (getConvention() != null ? getConvention().getName() : "") + "$" + - (offset != null ? offset.hashCode() + "$" : "") + - (fetch != null ? fetch.hashCode() : "") + "&"; - } - - - @Override - public void implement( CassandraImplementContext context ) { - context.visitChild( 0, getInput() ); - if ( offset != null ) { - context.offset = RexLiteral.intValue( offset ); - } - if ( fetch != null ) { - context.fetch = RexLiteral.intValue( fetch ); - } - } - - - @Override - public AlgWriter explainTerms( AlgWriter pw ) { - super.explainTerms( pw ); - pw.itemIf( "offset", offset, offset != null ); - pw.itemIf( "fetch", fetch, fetch != null ); - return pw; - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraMethod.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraMethod.java deleted file mode 100644 index a547f59bf5..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraMethod.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.core.CqlSession; -import com.google.common.collect.ImmutableMap; -import org.apache.calcite.linq4j.tree.Types; - -import java.lang.reflect.Method; -import java.util.List; - - -/** - * Builtin methods in the Cassandra adapter. - */ -public enum CassandraMethod { - - CASSANDRA_QUERYABLE_QUERY( CassandraTable.CassandraQueryable.class, "query", List.class, List.class, List.class, List.class, Integer.class, Integer.class ), - CASSANDRA_STRING_ENUMERABLE( CassandraEnumerable.class, "of", CqlSession.class, String.class ), - CASSANDRA_STRING_ENUMERABLE_OFFSET( CassandraEnumerable.class, "of", CqlSession.class, String.class, Integer.class ); -// CASSANDRA_STRING_ENUMERABLE(CassandraTable.CassandraQueryable.class, "insert", String.class ); - - public final Method method; - - public static final ImmutableMap MAP; - - - static { - final ImmutableMap.Builder builder = ImmutableMap.builder(); - for ( CassandraMethod value : CassandraMethod.values() ) { - builder.put( value.method, value ); - } - MAP = builder.build(); - } - - - CassandraMethod( Class clazz, String methodName, Class... argumentTypes ) { - this.method = Types.lookupMethod( clazz, methodName, argumentTypes ); - } -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraPhysicalNameProvider.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraPhysicalNameProvider.java deleted file mode 100644 index bafb7428cb..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraPhysicalNameProvider.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; - - -public class CassandraPhysicalNameProvider { - - private final static Pattern idRevPattern = Pattern.compile( "^(col|tab|sch)([0-9]+)(?>r([0-9]+))?$" ); - - private final Catalog catalog; - private final int storeId; - - private final String DEFAULT_SCHEMA = "public"; - - - public CassandraPhysicalNameProvider( int storeId ) { - this.catalog = Catalog.getInstance(); - this.storeId = storeId; - } - - - public String generatePhysicalColumnName( long columnId ) { - return "col" + columnId; - } - - - public String generatePhysicalTableName( long tableId ) { - return "tab" + tableId; - } - - - public String generatePhysicalSchemaName( int schemaId ) { - // TODO JS: implement cassandra schemas - return "cassandra"; -// return "sch" + schemaId; - } - - - public String getPhysicalSchemaName( int schemaId ) { - return generatePhysicalSchemaName( schemaId ); - } - - - public String getPhysicalTableName( long tableId ) { - return generatePhysicalTableName( tableId ); - } - - - public String getPhysicalColumnName( long columnId ) throws RuntimeException { - // TODO JS: This really should be a direct call to the catalog! - List placements; - placements = catalog.getColumnPlacementsOnAdapter( this.storeId ); - - for ( CatalogColumnPlacement placement : placements ) { - if ( placement.columnId == columnId ) { - return placement.physicalColumnName; - } - } - - throw new RuntimeException( "Column placement not found for data store " + this.storeId + " and column " + columnId ); - } - - - public String getLogicalColumnName( long columnId ) { - return catalog.getColumn( columnId ).name; - } - - - public CatalogColumn getLogicalColumn( long columnId ) { - return catalog.getColumn( columnId ); - } - - - private long tableId( String schemaName, String tableName ) { - CatalogTable catalogTable; - try { - catalogTable = catalog.getTable( "APP", schemaName, tableName ); - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { - throw new RuntimeException( e ); - } - return catalogTable.id; - } - - - private long columnId( String logicalSchemaName, String logicalTableName, String logicalColumnName ) { - CatalogColumn catalogColumn; - try { - catalogColumn = catalog.getColumn( "APP", logicalSchemaName, logicalTableName, logicalColumnName ); - } catch ( UnknownColumnException | UnknownSchemaException | UnknownDatabaseException | UnknownTableException e ) { - throw new RuntimeException( e ); - } - return catalogColumn.id; - } - - - private long columnId( long tableId, String logicalColumnName ) { - CatalogColumn catalogColumn; - try { - catalogColumn = catalog.getColumn( tableId, logicalColumnName ); - } catch ( UnknownColumnException e ) { - throw new RuntimeException( e ); - } - return catalogColumn.id; - } - - - public String getPhysicalColumnName( long tableId, String logicalColumnName ) { - long catalogColumnId = columnId( tableId, logicalColumnName ); - return this.catalog.getColumnPlacement( this.storeId, catalogColumnId ).physicalColumnName; - } - - - public String getPhysicalColumnName( String tableName, String logicalColumnName ) { - long tableId = tableId( this.DEFAULT_SCHEMA, tableName ); - long catalogColumnId = columnId( tableId, logicalColumnName ); - return this.catalog.getColumnPlacement( this.storeId, catalogColumnId ).physicalColumnName; - } - - - public void updatePhysicalColumnName( long columnId, String updatedName, boolean updatePosition ) { - CatalogColumnPlacement placement = this.catalog.getColumnPlacement( this.storeId, columnId ); - CatalogPartitionPlacement partitionPlacement = catalog.getPartitionPlacement( this.storeId, catalog.getTable( placement.tableId ).partitionProperty.partitionIds.get( 0 ) ); - this.catalog.updateColumnPlacementPhysicalNames( this.storeId, columnId, partitionPlacement.physicalTableName, updatedName, updatePosition ); - } - - - public String getPhysicalTableName( String schemaName, String tableName ) { - return "tab" + tableId( schemaName, tableName ); - } - - - public String getPhysicalTableName( List qualifiedName ) { - String schemaName; - String tableName; - if ( qualifiedName.size() == 1 ) { - schemaName = DEFAULT_SCHEMA; - tableName = qualifiedName.get( 0 ); - } else if ( qualifiedName.size() == 2 ) { - schemaName = qualifiedName.get( 0 ); - tableName = qualifiedName.get( 1 ); - } else { - throw new RuntimeException( "Unknown format for qualified name! Size: " + qualifiedName.size() ); - } - - return getPhysicalTableName( schemaName, tableName ); - } - - - public static String incrementNameRevision( String name ) { - Matcher m = idRevPattern.matcher( name ); - Long id; - Long rev; - String type; - if ( m.find() ) { - type = m.group( 1 ); - id = Long.valueOf( m.group( 2 ) ); - if ( m.group( 3 ) == null ) { - rev = 0L; - } else { - rev = Long.valueOf( m.group( 3 ) ); - } - } else { - throw new IllegalArgumentException( "Not a physical name!" ); - } - - rev += 1L; - - return type + id + "r" + rev; - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraPlugin.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraPlugin.java deleted file mode 100644 index d337535307..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraPlugin.java +++ /dev/null @@ -1,599 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.CqlSessionBuilder; -import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; -import com.datastax.oss.driver.api.core.cql.BatchType; -import com.datastax.oss.driver.api.core.cql.ResultSet; -import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; -import com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.UserDefinedType; -import com.datastax.oss.driver.api.querybuilder.QueryBuilder; -import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; -import com.datastax.oss.driver.api.querybuilder.relation.Relation; -import com.datastax.oss.driver.api.querybuilder.schema.CreateKeyspace; -import com.datastax.oss.driver.api.querybuilder.schema.CreateTable; -import com.datastax.oss.driver.api.querybuilder.update.Assignment; -import com.github.nosan.embedded.cassandra.EmbeddedCassandraFactory; -import com.github.nosan.embedded.cassandra.api.Cassandra; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import java.net.InetSocketAddress; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; -import org.polypheny.db.adapter.Adapter.AdapterProperties; -import org.polypheny.db.adapter.Adapter.AdapterSettingInteger; -import org.polypheny.db.adapter.Adapter.AdapterSettingString; -import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.adapter.DeployMode; -import org.polypheny.db.adapter.DeployMode.DeploySetting; -import org.polypheny.db.adapter.cassandra.util.CassandraTypesUtils; -import org.polypheny.db.catalog.Adapter; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.docker.DockerInstance; -import org.polypheny.db.docker.DockerManager; -import org.polypheny.db.docker.DockerManager.Container; -import org.polypheny.db.docker.DockerManager.ContainerBuilder; -import org.polypheny.db.plugins.PolyPluginManager; -import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; -import org.polypheny.db.transaction.PolyXid; -import org.polypheny.db.type.PolyType; - - -public class CassandraPlugin extends Plugin { - - - public static final String ADAPTER_NAME = "CASSANDRA"; - - - /** - * Constructor to be used by plugin manager for plugin instantiation. - * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. - */ - public CassandraPlugin( PluginWrapper wrapper ) { - super( wrapper ); - } - - - @Override - public void start() { - Map settings = ImmutableMap.of( - "mode", "docker", - "instanceId", "0", - "port", "9042" - ); - - Adapter.addAdapter( CassandraStore.class, ADAPTER_NAME, settings ); - } - - - @Override - public void stop() { - Adapter.removeAdapter( CassandraStore.class, ADAPTER_NAME ); - } - - - @Slf4j - @AdapterProperties( - name = "Cassandra", - description = "Apache Cassandra is an open-source wide-column store (i.e. a two-dimensional key–value store) designed to handle large amount of data. Cassandra can be deployed in a distributed manner.", - usedModes = { DeployMode.EMBEDDED, DeployMode.REMOTE, DeployMode.DOCKER }) - @AdapterSettingString(name = "host", defaultValue = "localhost", position = 0, appliesTo = DeploySetting.REMOTE) - @AdapterSettingInteger(name = "port", defaultValue = 9042, position = 1, appliesTo = { DeploySetting.REMOTE, DeploySetting.DOCKER }) - @AdapterSettingString(name = "keyspace", defaultValue = "cassandra", position = 2, appliesTo = DeploySetting.REMOTE) - @AdapterSettingString(name = "username", defaultValue = "cassandra", position = 3, appliesTo = DeploySetting.REMOTE) - @AdapterSettingString(name = "password", defaultValue = "cassandra", position = 4, appliesTo = DeploySetting.REMOTE) - public static class CassandraStore extends DataStore { - - - // Running embedded - private final Cassandra embeddedCassandra; - private Container container; - - // Connection information - private String dbHostname; - private int dbPort; - private String dbKeyspace; - private String dbUsername; - private String dbPassword; - - // Array Container UDT - private final UserDefinedType arrayContainerUdt; - - // Only display specific logging messages once - private static boolean displayedPrepareLoggingMessage = false; - private static boolean displayedCommitLoggingMessage = false; - - private final CqlSession session; - private CassandraSchema currentSchema; - - @Getter - // Apparently this cannot be static according to lombok even if it should create a non-static getter - private final List unsupportedTypes = ImmutableList.of( PolyType.ARRAY, PolyType.MAP ); - - - public CassandraStore( int storeId, String uniqueName, Map settings ) { - super( storeId, uniqueName, settings, true ); - - // Parse settings - - if ( deployMode == DeployMode.EMBEDDED ) { - // Making sure we are on java 8, as cassandra does not support anything newer! - // This is a cassandra issue. It is also marked as "won't fix"... - // See: https://issues.apache.org/jira/browse/CASSANDRA-13107 - - if ( !System.getProperty( "java.version" ).startsWith( "1.8" ) ) { - log.error( "Embedded cassandra requires Java 8 to work. Currently using: {}. Aborting!", System.getProperty( "java.version" ) ); - throw new RuntimeException( "Embedded cassandra requires Java 8 to be used!" ); - } - - // Setting up the embedded instance of cassandra. - log.debug( "Attempting to create embedded cassandra instance." ); - EmbeddedCassandraFactory cassandraFactory = new EmbeddedCassandraFactory(); -// cassandraFactory.setJavaHome( Paths.get( System.getenv( "JAVA_HOME" ) ) ); - this.embeddedCassandra = cassandraFactory.create(); - this.embeddedCassandra.start(); - - this.dbHostname = this.embeddedCassandra.getAddress().getHostAddress(); - this.dbPort = this.embeddedCassandra.getPort(); - - this.dbKeyspace = "keyspace"; - this.dbUsername = "cassandra"; - this.dbPassword = ""; - log.warn( "Embedded cassandra address: {}:{}", this.dbHostname, this.dbPort ); - } else if ( deployMode == DeployMode.DOCKER ) { - this.dbUsername = "cassandra"; - this.dbPassword = "cassandra"; - - DockerManager.Container container = new ContainerBuilder( getAdapterId(), "polypheny/cassandra", getUniqueName(), Integer.parseInt( settings.get( "instanceId" ) ) ) - .withMappedPort( 9042, Integer.parseInt( settings.get( "port" ) ) ) - // cassandra can take quite some time to start - .withReadyTest( this::testDockerConnection, 80000 ) - //.withEnvironmentVariables( Arrays.asList( "CASSANDRA_USER=" + this.dbUsername, "CASSANDRA_PASSWORD=" + this.dbPassword ) ) - .build(); - - this.container = container; - - this.dbKeyspace = "cassandra"; - this.dbPort = Integer.parseInt( settings.get( "port" ) ); - - DockerManager.getInstance().initialize( container ).start(); - - this.embeddedCassandra = null; - } else if ( deployMode == DeployMode.REMOTE ) { - this.embeddedCassandra = null; - this.dbHostname = settings.get( "host" ); - this.dbKeyspace = settings.get( "keyspace" ); - this.dbUsername = settings.get( "username" ); - this.dbPassword = settings.get( "password" ); - this.dbPort = Integer.parseInt( settings.get( "port" ) ); - } else { - throw new RuntimeException( "Unknown deploy mode: " + deployMode.name() ); - } - - try { - CqlSession mySession = getSession(); - try { - CreateKeyspace createKs = SchemaBuilder.createKeyspace( this.dbKeyspace ).ifNotExists().withSimpleStrategy( 1 ); - mySession.execute( createKs.build() ); - mySession.execute( "USE " + this.dbKeyspace ); - } catch ( Exception e ) { - log.warn( "Unable to use keyspace {}.", this.dbKeyspace, e ); - mySession.execute( "CREATE KEYSPACE " + this.dbKeyspace + " WITH replication = {'class':'SimpleStrategy', 'replication_factor' : 1}" ); - mySession.execute( "USE KEYSPACE " + this.dbKeyspace ); - } - - mySession.execute( "CREATE TYPE IF NOT EXISTS " + this.dbKeyspace + ".arraycontainer ( innertype text, dimension int, cardinality int, data text );" ); - arrayContainerUdt = - mySession.getMetadata() - .getKeyspace( this.dbKeyspace ) - .flatMap( ks -> ks.getUserDefinedType( "arraycontainer" ) ) - .orElseThrow( () -> new IllegalArgumentException( "Missing UDT definition" ) ); - - this.session = mySession; - } catch ( Exception e ) { - throw new RuntimeException( e ); - } - } - - - private CqlSession getSession() { - CqlSessionBuilder cluster = CqlSession.builder(); - cluster.withClassLoader( PolyPluginManager.getMainClassLoader() ); - cluster.withLocalDatacenter( "datacenter1" ); - List contactPoints = new ArrayList<>( 1 ); - contactPoints.add( new InetSocketAddress( this.dbHostname, this.dbPort ) ); - if ( this.dbUsername != null && this.dbPassword != null ) { - cluster.addContactPoints( contactPoints ).withAuthCredentials( this.dbUsername, this.dbPassword ); - } else { - cluster.addContactPoints( contactPoints ); - } - return cluster.build(); - } - - - @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - this.currentSchema = CassandraSchema.create( - rootSchema, - name, - this.session, - this.dbKeyspace, - new CassandraPhysicalNameProvider( this.getAdapterId() ), - this, - this.arrayContainerUdt ); - } - - - @Override - public Table createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { - String cassandraphysicalTableName = currentSchema.getConvention().physicalNameProvider.getPhysicalTableName( catalogTable.id ); - return new CassandraTable( this.currentSchema, catalogTable.name, cassandraphysicalTableName, false, catalogTable.id ); - } - - - @Override - public Schema getCurrentSchema() { - return this.currentSchema; - } - - - @Override - public void createTable( Context context, CatalogTable catalogTable, List partitionIds ) { - // This check is probably not required due to the check below it. - if ( catalogTable.primaryKey == null ) { - throw new UnsupportedOperationException( "Cannot create Cassandra Table without a primary key!" ); - } - - long primaryKeyColumn = -1; - List keyColumns = new ArrayList<>(); - - for ( CatalogKey catalogKey : catalog.getTableKeys( catalogTable.id ) ) { - keyColumns.addAll( catalogKey.columnIds ); - // TODO JS: make sure there's only one primary key! - if ( primaryKeyColumn == -1 ) { - primaryKeyColumn = catalogKey.columnIds.get( 0 ); - } - } - - if ( primaryKeyColumn == -1 ) { - throw new UnsupportedOperationException( "Cannot create Cassandra Table without a primary key!" ); - } - - final long primaryKeyColumnLambda = primaryKeyColumn; - - CassandraPhysicalNameProvider physicalNameProvider = new CassandraPhysicalNameProvider( this.getAdapterId() ); - String physicalTableName = physicalNameProvider.getPhysicalTableName( catalogTable.id ); - // List columns = combinedTable.getColumns(); - List columns = catalog.getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogTable.id ); - CatalogColumnPlacement primaryColumnPlacement = columns.stream().filter( c -> c.columnId == primaryKeyColumnLambda ).findFirst().get(); - CatalogColumn catalogColumn = catalog.getColumn( primaryColumnPlacement.columnId ); - - CreateTable createTable = SchemaBuilder.createTable( this.dbKeyspace, physicalTableName ) - .withPartitionKey( physicalNameProvider.generatePhysicalColumnName( catalogColumn.id ), CassandraTypesUtils.getDataType( catalogColumn.type, this.arrayContainerUdt ) ); - - for ( CatalogColumnPlacement placement : columns ) { - catalogColumn = catalog.getColumn( placement.columnId ); - DataType fieldType; - if ( catalogColumn.collectionsType == PolyType.ARRAY ) { - fieldType = this.arrayContainerUdt; - } else { - fieldType = CassandraTypesUtils.getDataType( catalogColumn.type, null ); - } - - if ( keyColumns.contains( placement.columnId ) ) { - if ( placement.columnId != primaryKeyColumn ) { - createTable = createTable.withClusteringColumn( physicalNameProvider.generatePhysicalColumnName( placement.columnId ), CassandraTypesUtils.getDataType( catalogColumn.type, this.arrayContainerUdt ) ); - } - } else { - createTable = createTable.withColumn( physicalNameProvider.generatePhysicalColumnName( placement.columnId ), fieldType ); - } - } - - context.getStatement().getTransaction().registerInvolvedAdapter( this ); - this.session.execute( createTable.build() ); - - for ( CatalogColumnPlacement placement : catalog.getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogTable.id ) ) { - catalog.updateColumnPlacementPhysicalNames( - getAdapterId(), - placement.columnId, - this.dbKeyspace, // TODO MV: physical schema name - physicalNameProvider.generatePhysicalColumnName( placement.columnId ), - true ); - } - } - - - @Override - public void dropTable( Context context, CatalogTable catalogTable, List partitionIds ) { - CassandraPhysicalNameProvider physicalNameProvider = new CassandraPhysicalNameProvider( this.getAdapterId() ); - String physicalTableName = physicalNameProvider.getPhysicalTableName( catalogTable.id ); - partitionIds.forEach( partitionId -> catalog.deletePartitionPlacement( getAdapterId(), partitionId ) ); - SimpleStatement dropTable = SchemaBuilder.dropTable( this.dbKeyspace, physicalTableName ).build(); - - context.getStatement().getTransaction().registerInvolvedAdapter( this ); - this.session.execute( dropTable ); - } - - - @Override - public void addColumn( Context context, CatalogTable catalogTable, CatalogColumn catalogColumn ) { - CassandraPhysicalNameProvider physicalNameProvider = new CassandraPhysicalNameProvider( this.getAdapterId() ); - String physicalTableName = physicalNameProvider.getPhysicalTableName( catalogTable.id ); - String physicalColumnName = physicalNameProvider.generatePhysicalColumnName( catalogColumn.id ); - - SimpleStatement addColumn = SchemaBuilder.alterTable( this.dbKeyspace, physicalTableName ) - .addColumn( physicalColumnName, CassandraTypesUtils.getDataType( catalogColumn.type, this.arrayContainerUdt ) ).build(); - - context.getStatement().getTransaction().registerInvolvedAdapter( this ); - // TODO JS: Wrap with error handling to check whether successful, if not, try iterative revision names to find one that works. - this.session.execute( addColumn ); - - catalog.updateColumnPlacementPhysicalNames( - getAdapterId(), - catalogColumn.id, - this.dbKeyspace, - physicalColumnName, - false ); - } - - - @Override - public void dropColumn( Context context, CatalogColumnPlacement columnPlacement ) { -// public void dropColumn( Context context, CatalogCombinedTable catalogEntity, CatalogColumn catalogColumn ) { -// CassandraPhysicalNameProvider physicalNameProvider = new CassandraPhysicalNameProvider( context.getStatement().getTransaction().getCatalog(), this.getStoreId() ); - - CatalogPartitionPlacement partitionPlacement = catalog.getPartitionPlacement( getAdapterId(), catalog.getTable( columnPlacement.tableId ).partitionProperty.partitionIds.get( 0 ) ); - - String physicalTableName = partitionPlacement.physicalTableName; - String physicalColumnName = columnPlacement.physicalColumnName; - - SimpleStatement dropColumn = SchemaBuilder.alterTable( this.dbKeyspace, physicalTableName ) - .dropColumn( physicalColumnName ).build(); - - context.getStatement().getTransaction().registerInvolvedAdapter( this ); - this.session.execute( dropColumn ); - } - - - @Override - public void addIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { - throw new RuntimeException( "Cassandra adapter does not support adding indexes" ); - } - - - @Override - public void dropIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { - throw new RuntimeException( "Cassandra adapter does not support dropping indexes" ); - } - - - @Override - public boolean prepare( PolyXid xid ) { - // TODO JS: implement cassandra prepare - if ( !displayedPrepareLoggingMessage ) { - log.warn( "Prepare is not yet supported. This warning will not be repeated!" ); - displayedPrepareLoggingMessage = true; - } - return true; - } - - - @Override - public void commit( PolyXid xid ) { - // TODO JS: implement cassandra commit - if ( !displayedCommitLoggingMessage ) { - log.warn( "Commit is not yet supported. This warning will not be repeated!" ); - displayedCommitLoggingMessage = true; - } - } - - - @Override - public void rollback( PolyXid xid ) { - // TODO JS: implement cassandra rollback - log.warn( "Rollback is not yet supported." ); - } - - - @Override - public void truncate( Context context, CatalogTable table ) { - CassandraPhysicalNameProvider physicalNameProvider = new CassandraPhysicalNameProvider( this.getAdapterId() ); - String physicalTableName = physicalNameProvider.getPhysicalTableName( table.id ); - SimpleStatement truncateTable = QueryBuilder.truncate( this.dbKeyspace, physicalTableName ).build(); - - context.getStatement().getTransaction().registerInvolvedAdapter( this ); - this.session.execute( truncateTable ); - } - - - @Override - public void updateColumnType( Context context, CatalogColumnPlacement placement, CatalogColumn catalogColumn, PolyType oldType ) { -// public void updateColumnType( Context context, CatalogColumn catalogColumn ) { - context.getStatement().getTransaction().registerInvolvedAdapter( this ); - - CassandraPhysicalNameProvider physicalNameProvider = new CassandraPhysicalNameProvider( this.getAdapterId() ); - String physicalTableName = physicalNameProvider.getPhysicalTableName( catalogColumn.tableId ); - -// SimpleStatement selectData = QueryBuilder.selectFrom( this.dbKeyspace, physicalTableName ).all().build(); - SimpleStatement selectData = QueryBuilder.selectFrom( this.dbKeyspace, physicalTableName ).all().build(); - ResultSet rs = this.session.execute( selectData ); - - if ( !rs.isFullyFetched() ) { - throw new RuntimeException( "Unable to convert column type..." ); - } - - String physicalColumnName = physicalNameProvider.getPhysicalColumnName( catalogColumn.id ); - - String newPhysicalColumnName = CassandraPhysicalNameProvider.incrementNameRevision( physicalColumnName ); - - BatchStatementBuilder builder = new BatchStatementBuilder( BatchType.LOGGED ); - RelationMetadata relationMetadata = session.getMetadata().getKeyspace( dbKeyspace ).get().getTable( physicalTableName ).get(); - List primaryKeys = relationMetadata.getPrimaryKey(); - ColumnMetadata oldColumn = relationMetadata.getColumn( physicalColumnName ).get(); - //PolyType oldType = CassandraTypesUtils.getPolyType( oldColumn.getType() ); - -// PolyTypeAssignmentRules rules = PolyTypeAssignmentRules.instance( true ); -// if ( ! rules.canCastFrom( catalogColumn.type, oldType )) { -// throw new RuntimeException( "Unable to change column type. Unable to cast " + oldType.getName() + " to " + catalogColumn.type.getName() + "." ); -// } - - Function converter = CassandraTypesUtils.convertToFrom( catalogColumn.type, oldType ); - - session.execute( SchemaBuilder.alterTable( this.dbKeyspace, physicalTableName ) - .addColumn( newPhysicalColumnName, CassandraTypesUtils.getDataType( catalogColumn.type, this.arrayContainerUdt ) ) - .build() ); - - for ( Row r : rs ) { - List whereClause = new ArrayList<>(); - for ( ColumnMetadata cm : primaryKeys ) { - Relation rl = Relation.column( cm.getName() ).isEqualTo( - QueryBuilder.literal( r.get( cm.getName(), CassandraTypesUtils.getJavaType( cm.getType() ) ) ) -// QueryBuilder.literal( r.get( cm.getName(), CassandraTypesUtils.getPolyType( cm.getType() ).getTypeJavaClass() ) ) - ); - whereClause.add( rl ); - } - - Object oldValue = r.get( physicalColumnName, CassandraTypesUtils.getJavaType( oldColumn.getType() ) ); -// Object oldValue = r.get( physicalColumnName, oldType.getTypeJavaClass() ); - - builder.addStatement( - QueryBuilder.update( this.dbKeyspace, physicalTableName ) - .set( Assignment.setColumn( - newPhysicalColumnName, - QueryBuilder.literal( converter.apply( oldValue ) ) ) ) - .where( whereClause ) - .build() - ); - } - - this.session.execute( builder.build() ); - - session.execute( SchemaBuilder.alterTable( this.dbKeyspace, physicalTableName ) - .dropColumn( physicalColumnName ).build() ); - - physicalNameProvider.updatePhysicalColumnName( catalogColumn.id, newPhysicalColumnName, true ); - } - - - @Override - public List getAvailableIndexMethods() { - return new ArrayList<>(); - } - - - @Override - public AvailableIndexMethod getDefaultIndexMethod() { - throw new RuntimeException( "Cassandra adapter does not support adding indexes" ); - } - - - @Override - public List getFunctionalIndexes( CatalogTable catalogTable ) { - List pkIds = Catalog.getInstance().getPrimaryKey( catalogTable.primaryKey ).columnIds; - return ImmutableList.of( new FunctionalIndexInfo( pkIds, "PRIMARY (unique)" ) ); - } - - - @Override - public void shutdown() { - try { - this.session.close(); - } catch ( RuntimeException e ) { - log.warn( "Exception while shutting down {}", getUniqueName(), e ); - } - - if ( deployMode == DeployMode.EMBEDDED ) { - this.embeddedCassandra.stop(); - } else if ( deployMode == DeployMode.DOCKER ) { - DockerInstance.getInstance().destroyAll( getAdapterId() ); - } - - log.info( "Shut down Cassandra store: {}", this.getUniqueName() ); - } - - - @Override - protected void reloadSettings( List updatedSettings ) { - // TODO JS: Implement - log.warn( "reloadSettings is not implemented yet." ); - } - - - private boolean testDockerConnection() { - CqlSession mySession = null; - - if ( container == null ) { - return false; - } - container.updateIpAddress(); - this.dbHostname = container.getIpAddress(); - if ( this.dbHostname == null ) { - return false; - } - - try { - mySession = getSession(); - ResultSet resultSet = mySession.execute( "SELECT release_version FROM system.local" ); - if ( resultSet.one() != null ) { - try { - mySession.close(); - } catch ( RuntimeException e ) { - log.warn( "Exception while shutting test connection down {}", getUniqueName(), e ); - } - return true; - } - - } catch ( Exception e ) { - // ignore - log.debug( e.getMessage() ); - } - if ( mySession != null ) { - try { - mySession.close(); - } catch ( RuntimeException e ) { - log.warn( "Exception while shutting test connection down {}", getUniqueName(), e ); - } - } - - return false; - } - - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraProject.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraProject.java deleted file mode 100644 index d6fb5d5bca..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraProject.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - -import com.datastax.oss.driver.api.core.data.UdtValue; -import com.datastax.oss.driver.api.querybuilder.QueryBuilder; -import com.datastax.oss.driver.api.querybuilder.select.Selector; -import com.datastax.oss.driver.api.querybuilder.term.Term; -import org.polypheny.db.adapter.cassandra.rules.CassandraRules; -import org.polypheny.db.adapter.cassandra.util.CassandraTypesUtils; -import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.JavaTypeFactoryImpl; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexInputRef; -import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.sql.language.fun.SqlArrayValueConstructor; -import org.polypheny.db.util.Pair; - -import java.util.*; -import java.util.stream.Collectors; - - -/** - * Implementation of {@link Project} relational expression in Cassandra. - */ -public class CassandraProject extends Project implements CassandraAlg { - - private final boolean arrayValueProject; - - - public CassandraProject( AlgOptCluster cluster, AlgTraitSet traitSet, AlgNode input, List projects, AlgDataType rowType, boolean arrayValueProject ) { - super( cluster, traitSet, input, projects, rowType ); - this.arrayValueProject = arrayValueProject; - // TODO JS: Check this -// assert getConvention() == CassandraRel.CONVENTION; -// assert getConvention() == input.getConvention(); - } - - - @Override - public Project copy( AlgTraitSet traitSet, AlgNode input, List projects, AlgDataType rowType ) { - // TODO js(knn): array value project stuff double check? - return new CassandraProject( getCluster(), traitSet, input, projects, rowType, this.arrayValueProject ); - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - return super.computeSelfCost( planner, mq ).multiplyBy( 0.8 ); - } - - - @Override - public void implement( CassandraImplementContext context ) { - - if ( arrayValueProject ) { - final AlgDataTypeFactory typeFactory = getCluster().getTypeFactory(); - AlgDataType rowType = context.cassandraTable.getRowType( typeFactory ); - - List> pairs = Pair.zip( rowType.getFieldList().stream().map( AlgDataTypeField::getPhysicalName ).collect( Collectors.toList() ), rowType.getFieldNames() ); - Map nameMapping = new HashMap<>(); - for ( Pair pair : pairs ) { - nameMapping.put( pair.right, pair.left ); - } - - List cassandraFieldNames = getRowType().getFieldNames(); - List cassPhysicalFields = new ArrayList<>(); - for ( String fieldName : cassandraFieldNames ) { - cassPhysicalFields.add( nameMapping.get( fieldName ) ); - } - - // Yes I am literally copying what the values implementation is doing - final List physicalFields = context.cassandraTable.getRowType( new JavaTypeFactoryImpl() ).getFieldList(); - final List logicalFields = this.rowType.getFieldList(); - final List fields = new ArrayList<>(); - for ( AlgDataTypeField field : logicalFields ) { - for ( AlgDataTypeField physicalField : physicalFields ) { - if ( field.getName().equals( physicalField.getName() ) ) { - fields.add( physicalField ); - break; - } - } - } - - Map oneInsert = new LinkedHashMap<>(); - List> namedProjects = getNamedProjects(); - for ( int i = 0; i < namedProjects.size(); i++ ) { - Pair pair = namedProjects.get( i ); - final String originalName = cassPhysicalFields.get( i ); -// final String originalName = pair.left.accept( translator ); - if ( pair.left instanceof RexLiteral ) { - // Normal literal value - final String name = pair.right; - oneInsert.put( originalName, QueryBuilder.literal( CassandraValues.literalValue( (RexLiteral) pair.left ) ) ); - } else if ( pair.left instanceof RexCall ) { - SqlArrayValueConstructor arrayValueConstructor = (SqlArrayValueConstructor) ((RexCall) pair.left).op; - UdtValue udtValue = CassandraTypesUtils.createArrayContainerDataType( - context.cassandraTable.getUnderlyingConvention().arrayContainerUdt, - arrayValueConstructor.dimension, - arrayValueConstructor.maxCardinality, - ((RexCall) pair.left).type.getComponentType().getPolyType(), - (RexCall) pair.left ); - String udtString = udtValue.getFormattedContents(); - Term udtTerm = QueryBuilder.raw( udtString ); - oneInsert.put( originalName, udtTerm ); - } - } - - List> valuesList = new ArrayList<>(); - valuesList.add( oneInsert ); - context.addInsertValues( valuesList ); - } else { - context.visitChild( 0, getInput() ); - final CassandraRules.RexToCassandraTranslator translator = new CassandraRules.RexToCassandraTranslator( - (JavaTypeFactory) getCluster().getTypeFactory(), - CassandraRules.cassandraPhysicalFieldNames( getInput().getRowType() ) ); - final List fields = new ArrayList<>(); - for ( Pair pair : getNamedProjects() ) { - if ( pair.left instanceof RexInputRef ) { - String name = pair.right; -// getRowType() -// ((RexInputRef) pair.left); - final String originalName = pair.left.accept( translator ); - if ( name.startsWith( "_" ) ) { - name = "\"" + name + "\""; - } - fields.add( Selector.column( originalName ).as( name ) ); - } - } - context.addSelectColumns( fields ); - } - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraScan.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraScan.java deleted file mode 100644 index 8561e5340e..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraScan.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - -import org.polypheny.db.adapter.cassandra.CassandraAlg.CassandraImplementContext.Type; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.plan.*; -import org.polypheny.db.schema.ModelTrait; - -import java.util.List; - - -/** - * Relational expression representing a scan of a Cassandra collection. - */ -public class CassandraScan extends Scan implements CassandraAlg { - - public final CassandraTable cassandraTable; - final AlgDataType projectRowType; - - - /** - * Creates a CassandraScan. - * - * @param cluster Cluster - * @param traitSet Traits - * @param table Table - * @param cassandraTable Cassandra table - * @param projectRowType Fields and types to project; null to project raw row - */ - protected CassandraScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptTable table, CassandraTable cassandraTable, AlgDataType projectRowType ) { - super( cluster, traitSet.replace( ModelTrait.RELATIONAL ), table ); - this.cassandraTable = cassandraTable; - this.projectRowType = projectRowType; - - assert cassandraTable != null; - // TODO JS: Check this -// assert getConvention() == CONVENTION; - } - - - @Override - public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - assert inputs.isEmpty(); - return new CassandraScan( getCluster(), traitSet, this.table, this.cassandraTable, this.projectRowType ); - } - - - @Override - public AlgDataType deriveRowType() { - return projectRowType != null ? projectRowType : super.deriveRowType(); - } - - - @Override - public void register( AlgOptPlanner planner ) { - getConvention().register( planner ); - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - return super.computeSelfCost( planner, mq ).multiplyBy( CassandraConvention.COST_MULTIPLIER ); - } - - - @Override - public void implement( CassandraImplementContext context ) { - context.cassandraTable = cassandraTable; - context.table = table; - - if ( context.type != null ) { - return; - } - - context.type = Type.SELECT; - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraSchema.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraSchema.java deleted file mode 100644 index 254bde1f91..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraSchema.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - -import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.metadata.schema.*; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.UserDefinedType; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.cassandra.CassandraPlugin.CassandraStore; -import org.polypheny.db.adapter.cassandra.util.CassandraTypesUtils; -import org.polypheny.db.algebra.AlgFieldCollation; -import org.polypheny.db.algebra.type.*; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; -import org.polypheny.db.util.Pair; -import org.polypheny.db.util.trace.PolyphenyDbTrace; -import org.slf4j.Logger; - -import java.util.*; -import java.util.Map.Entry; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - - -/** - * Schema mapped onto a Cassandra column family - */ -@Slf4j -public class CassandraSchema extends AbstractSchema { - - private final static Pattern columnIdPattern = Pattern.compile( "^col([0-9]+)(r([0-9]+))?" ); - - @Getter - final CqlSession session; - final String keyspace; - private final SchemaPlus parentSchema; - final String name; - - private final UserDefinedType arrayContainerUdt; - - - @Getter - private final CassandraConvention convention; - - private final CassandraStore cassandraStore; - - protected static final Logger LOGGER = PolyphenyDbTrace.getPlannerTracer(); - - private static final int DEFAULT_CASSANDRA_PORT = 9042; - - - private CassandraSchema( CqlSession session, String keyspace, SchemaPlus parentSchema, String name, CassandraConvention convention, CassandraStore cassandraStore, UserDefinedType arrayContainerUdt ) { - super(); - this.session = session; - this.keyspace = keyspace; - this.parentSchema = parentSchema; - this.name = name; - this.convention = convention; - this.cassandraStore = cassandraStore; - this.arrayContainerUdt = arrayContainerUdt; - } - - - public static CassandraSchema create( - SchemaPlus parentSchema, - String name, - CqlSession session, - String keyspace, - CassandraPhysicalNameProvider physicalNameProvider, - CassandraStore cassandraStore, - UserDefinedType arrayContainerUdt ) { - final Expression expression = Schemas.subSchemaExpression( parentSchema, name, CassandraSchema.class ); - final CassandraConvention convention = new CassandraConvention( name, expression, physicalNameProvider, arrayContainerUdt ); - return new CassandraSchema( session, keyspace, parentSchema, name, convention, cassandraStore, arrayContainerUdt ); - } - - - public void registerStore( DataContext dataContext ) { - dataContext.getStatement().getTransaction().registerInvolvedAdapter( this.cassandraStore ); - } - - - private String logicalColumnFromPhysical( String physicalColumnName ) { - Matcher m = columnIdPattern.matcher( physicalColumnName ); - Long columnId; - if ( m.find() ) { - columnId = Long.valueOf( m.group( 1 ) ); - } else { - throw new RuntimeException( "Unable to find column id in physical column name: " + physicalColumnName ); - } - - return convention.physicalNameProvider.getLogicalColumnName( columnId ); - } - - - private CatalogColumn logicalColumnFromPhysicalColumn( String physicalColumnName ) { - Matcher m = columnIdPattern.matcher( physicalColumnName ); - Long columnId; - if ( m.find() ) { - columnId = Long.valueOf( m.group( 1 ) ); - } else { - throw new RuntimeException( "Unable to find column id in physical column name: " + physicalColumnName ); - } - - return convention.physicalNameProvider.getLogicalColumn( columnId ); - } - - - AlgProtoDataType getAlgDataType( String physicalTableName, boolean view ) { - Map columns; - if ( view ) { - throw new RuntimeException( "Views are currently broken." ); - } else { - columns = getKeyspace().getTable( "\"" + physicalTableName + "\"" ).get().getColumns(); - } - - // Temporary type factory, just for the duration of this method. Allowable because we're creating a prototype, - // not a type; before being used, the prototype will be copied into a real type factory. - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); -// Pattern columnIdPattern = Pattern.compile( "^col([0-9]+)(r([0-9]+))?" ); - -// List>> preorderedList = new ArrayList<>(); - List> preorderedList = new ArrayList<>(); - - for ( Entry column : columns.entrySet() ) { - final String physicalColumnName = column.getKey().toString(); - final DataType type = column.getValue().getType(); - - // TODO: This mapping of types can be done much better - PolyType typeName = CassandraTypesUtils.getPolyType( type ); - - // TODO (PCP) - /*Matcher m = columnIdPattern.matcher( physicalColumnName ); - Long columnId; - if ( m.find() ) { - columnId = Long.valueOf( m.group( 1 ) ); - } else { - throw new RuntimeException( "Unable to find column id in physical column name: " + physicalColumnName ); - } - String logicalColumnName = convention.physicalNameProvider.getLogicalColumnName( columnId );*/ - CatalogColumn logicalColumn = this.logicalColumnFromPhysicalColumn( physicalColumnName ); - String logicalColumnName = this.logicalColumnFromPhysical( physicalColumnName ); - - AlgDataType algDataType; - if ( logicalColumn.collectionsType == PolyType.ARRAY ) { - AlgDataType innerType = typeFactory.createPolyType( logicalColumn.type ); - algDataType = typeFactory.createArrayType( innerType, logicalColumn.cardinality, logicalColumn.dimension ); - } else { - algDataType = typeFactory.createPolyType( logicalColumn.type ); - } - - preorderedList.add( new Pair<>( logicalColumn.position, new RowTypeGeneratorContainer( logicalColumnName, physicalColumnName, algDataType ) ) ); -// fieldInfo.add( logicalColumnName, physicalColumnName, typeFactory.createPolyType( typeName ) ).nullable( true ); - } - - preorderedList.sort( Comparator.naturalOrder() ); - - for ( Pair containerPair : preorderedList ) { - RowTypeGeneratorContainer container = containerPair.right; - fieldInfo.add( container.logicalName, container.physicalName, container.dataType ).nullable( true ); - } - - return AlgDataTypeImpl.proto( fieldInfo.build() ); - } - - - /** - * Get all primary key columns from the underlying CQL table - * - * @return A list of field names that are part of the partition and clustering keys - */ - Pair, List> getKeyFields( String physicalTableName, boolean view ) { - RelationMetadata relation; -// List qualifiedNames = new LinkedList<>(); -// qualifiedNames.add( this.name ); -// qualifiedNames.add( columnFamily ); -// String physicalTableName = this.convention.physicalNameProvider.getPhysicalTableName( qualifiedNames ); - if ( view ) { - relation = getKeyspace().getView( "\"" + physicalTableName + "\"" ).get(); - } else { - relation = getKeyspace().getTable( "\"" + physicalTableName + "\"" ).get(); - } - - List partitionKey = relation.getPartitionKey(); - List pKeyFields = new ArrayList<>(); - for ( ColumnMetadata column : partitionKey ) { - pKeyFields.add( this.logicalColumnFromPhysical( column.getName().toString() ) ); -// pKeyFields.add( column.getName().toString() ); - } - - Map clusteringKey = relation.getClusteringColumns(); - List cKeyFields = new ArrayList<>(); - for ( Entry column : clusteringKey.entrySet() ) { - cKeyFields.add( this.logicalColumnFromPhysical( column.getKey().getName().asInternal() ) ); -// cKeyFields.add( column.getKey().toString() ); - } - - return Pair.of( ImmutableList.copyOf( pKeyFields ), ImmutableList.copyOf( cKeyFields ) ); - } - - - /** - * Get all primary key columns from the underlying CQL table - * - * @return A list of field names that are part of the partition and clustering keys - */ - Pair, List> getPhysicalKeyFields( String physicalTableName, boolean view ) { - RelationMetadata relation; -// List qualifiedNames = new LinkedList<>(); -// qualifiedNames.add( this.name ); -// qualifiedNames.add( columnFamily ); -// String physicalTableName = this.convention.physicalNameProvider.getPhysicalTableName( qualifiedNames ); - if ( view ) { - relation = getKeyspace().getView( "\"" + physicalTableName + "\"" ).get(); - } else { - relation = getKeyspace().getTable( "\"" + physicalTableName + "\"" ).get(); - } - - List partitionKey = relation.getPartitionKey(); - List pKeyFields = new ArrayList<>(); - for ( ColumnMetadata column : partitionKey ) { -// pKeyFields.add( this.logicalColumnFromPhysical( column.getName().toString() ) ); - pKeyFields.add( column.getName().toString() ); - } - - Map clusteringKey = relation.getClusteringColumns(); - List cKeyFields = new ArrayList<>(); - for ( Entry column : clusteringKey.entrySet() ) { -// cKeyFields.add( this.logicalColumnFromPhysical( column.getKey().toString() ) ); - cKeyFields.add( column.getKey().toString() ); - } - - return Pair.of( ImmutableList.copyOf( pKeyFields ), ImmutableList.copyOf( cKeyFields ) ); - } - - - /** - * Get the collation of all clustering key columns. - * - * @return A RelCollations representing the collation of all clustering keys - */ - public List getClusteringOrder( String physicalTableName, boolean view ) { - RelationMetadata relation; -// List qualifiedNames = new LinkedList<>(); -// qualifiedNames.add( this.name ); -// qualifiedNames.add( columnFamily ); -// String physicalTableName = this.convention.physicalNameProvider.getPhysicalTableName( qualifiedNames ); - if ( view ) { -// throw new RuntimeException( "Views are currently broken." ); - relation = getKeyspace().getView( "\"" + physicalTableName + "\"" ).get(); - } else { - relation = getKeyspace().getTable( "\"" + physicalTableName + "\"" ).get(); - } - - Map clusteringOrder = relation.getClusteringColumns(); - List keyCollations = new ArrayList<>(); - - int i = 0; - for ( Entry order : clusteringOrder.entrySet() ) { - AlgFieldCollation.Direction direction; - switch ( order.getValue() ) { - case DESC: - direction = AlgFieldCollation.Direction.DESCENDING; - break; - case ASC: - default: - direction = AlgFieldCollation.Direction.ASCENDING; - break; - } - CatalogColumn logicalColumn = this.logicalColumnFromPhysicalColumn( order.getKey().getName().asInternal() ); - keyCollations.add( new AlgFieldCollation( logicalColumn.position - 1, direction ) ); - i++; - } - - return keyCollations; - } - - - // FIXME JS: Do not regenerate TableMap every time we call this! - @Override - protected Map getTableMap() { - final ImmutableMap.Builder builder = ImmutableMap.builder(); - for ( Entry table : getKeyspace().getTables().entrySet() ) { - builder.put( table.getKey().toString(), new CassandraTable( this, table.getKey().toString() ) ); - - // TODO JS: Fix the view situation! - /*for ( MaterializedViewMetadata view : table.getValue().getViews() ) { - String viewName = view.getName(); - builder.put( viewName, new CassandraTable( this, viewName, true ) ); - }*/ - } - return builder.build(); - } - - - private KeyspaceMetadata getKeyspace() { - Optional metadata = session.getMetadata().getKeyspace( keyspace ); - if ( metadata.isPresent() ) { - return metadata.get(); - } else { - throw new RuntimeException( "There is no metadata." ); - } - } - - - @AllArgsConstructor - private class RowTypeGeneratorContainer { - - String logicalName; - String physicalName; - AlgDataType dataType; - - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraSort.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraSort.java deleted file mode 100644 index f86fec8956..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraSort.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - -import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; -import org.polypheny.db.algebra.AlgCollation; -import org.polypheny.db.algebra.AlgFieldCollation; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Sort; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.rex.RexNode; - -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - - -/** - * Implementation of {@link Sort} relational expression in Cassandra. - */ -public class CassandraSort extends Sort implements CassandraAlg { - - public CassandraSort( AlgOptCluster cluster, AlgTraitSet traitSet, AlgNode child, AlgCollation collation, RexNode offset, RexNode fetch ) { - super( cluster, traitSet, child, collation, offset, fetch ); - - // TODO JS: Check this -// assert getConvention() == CONVENTION; -// assert getConvention() == child.getConvention(); - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - AlgOptCost cost = super.computeSelfCost( planner, mq ); - if ( !collation.getFieldCollations().isEmpty() ) { - return cost.multiplyBy( 0.05 ); - } else { - return cost; - } - } - - - @Override - public Sort copy( AlgTraitSet traitSet, AlgNode input, AlgCollation newCollation, RexNode offset, RexNode fetch ) { - return new CassandraSort( getCluster(), traitSet, input, collation, offset, fetch ); - } - - - @Override - public void implement( CassandraImplementContext context ) { - context.visitChild( 0, getInput() ); - - List sortCollations = collation.getFieldCollations(); - Map fieldOrder = new LinkedHashMap<>(); - if ( !sortCollations.isEmpty() ) { - // Construct a series of order clauses from the desired collation - final List fields = getRowType().getFieldList(); - for ( AlgFieldCollation fieldCollation : sortCollations ) { - final String name = - fields.get( fieldCollation.getFieldIndex() ).getPhysicalName(); - final ClusteringOrder direction; - switch ( fieldCollation.getDirection() ) { - case DESCENDING: - direction = ClusteringOrder.DESC; - break; - default: - direction = ClusteringOrder.ASC; - } - fieldOrder.put( name, direction ); - } - - context.addOrder( fieldOrder ); - } - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTable.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTable.java deleted file mode 100644 index c47575e828..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTable.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; -import com.datastax.oss.driver.api.querybuilder.QueryBuilder; -import com.datastax.oss.driver.api.querybuilder.relation.Relation; -import com.datastax.oss.driver.api.querybuilder.select.Select; -import com.datastax.oss.driver.api.querybuilder.select.SelectFrom; -import com.datastax.oss.driver.api.querybuilder.select.Selector; -import com.google.common.collect.ImmutableList; -import org.apache.calcite.linq4j.Enumerable; -import org.apache.calcite.linq4j.Enumerator; -import org.apache.calcite.linq4j.Queryable; -import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableTable; -import org.polypheny.db.algebra.AlgFieldCollation; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.logical.relational.LogicalModify; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgOptTable.ToAlgContext; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModelTraitDef; -import org.polypheny.db.schema.ModifiableTable; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableTable; -import org.polypheny.db.schema.impl.AbstractTableQueryable; -import org.polypheny.db.util.Pair; - -import java.util.*; -import java.util.Map.Entry; - - -/** - * Table based on a Cassandra column family - */ -public class CassandraTable extends AbstractQueryableTable implements TranslatableTable, ModifiableTable { - - AlgProtoDataType protoRowType; - Pair, List> keyFields; - Pair, List> physicalKeyFields; - List clusteringOrder; - private final CassandraSchema cassandraSchema; - private final String columnFamily; - private final String physicalName; - private final boolean view; - -// private final String physicalTableName; - -// private final String logicalTableName; - - - public CassandraTable( CassandraSchema cassandraSchema, String columnFamily, boolean view ) { - super( Object[].class ); - this.cassandraSchema = cassandraSchema; - this.columnFamily = columnFamily; - this.view = view; - - List qualifiedNames = new LinkedList<>(); - qualifiedNames.add( cassandraSchema.name ); - qualifiedNames.add( columnFamily ); - this.physicalName = cassandraSchema.getConvention().physicalNameProvider.getPhysicalTableName( qualifiedNames ); - this.tableId = getCatalogTableId(); - } - - - public CassandraTable( CassandraSchema cassandraSchema, String columnFamily, String physicalName, boolean view, Long tableId ) { - super( Object[].class ); - this.cassandraSchema = cassandraSchema; - this.columnFamily = columnFamily; - this.view = view; - this.physicalName = physicalName; - this.tableId = tableId; - } - - - public CassandraTable( CassandraSchema cassandraSchema, String columnFamily ) { - this( cassandraSchema, columnFamily, false ); - } - - - private Long getCatalogTableId() { - try { - return Catalog.getInstance().getTable( cassandraSchema.name, columnFamily, physicalName ).id; - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { - throw new RuntimeException( "Not possible to get tableId within CassandraTable", e ); - } - } - - - public String toString() { - return "CassandraTable {" + columnFamily + "}"; - } - - - public CassandraConvention getUnderlyingConvention() { - return this.cassandraSchema.getConvention(); - } - - - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - if ( protoRowType == null ) { - protoRowType = cassandraSchema.getAlgDataType( physicalName, view ); - } - return protoRowType.apply( typeFactory ); - } - - - public Pair, List> getKeyFields() { - if ( keyFields == null ) { - keyFields = cassandraSchema.getKeyFields( physicalName, view ); - } - return keyFields; - } - - - public Pair, List> getPhysicalKeyFields() { - if ( physicalKeyFields == null ) { - physicalKeyFields = cassandraSchema.getPhysicalKeyFields( physicalName, view ); - } - return physicalKeyFields; - } - - - public List getClusteringOrder() { - if ( clusteringOrder == null ) { - clusteringOrder = cassandraSchema.getClusteringOrder( physicalName, view ); - } - return clusteringOrder; - } - - - public Enumerable query( final CqlSession session ) { - return query( session, ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), 0, -1 ); - } - - - /** - * Executes a CQL query on the underlying table. - * - * @param session Cassandra session - * @param fields List of fields to project - * @param predicates A list of predicates which should be used in the query - * @return Enumerator of results - */ - public Enumerable query( - final CqlSession session, - List> fields, - final List selectFields, - List predicates, - List> order, - final Integer offset, - final Integer fetch ) { - // Build the type of the resulting row based on the provided fields - /*final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl( RelDataTypeSystem.DEFAULT ); - final RelDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - final RelDataType rowType = getRowType( typeFactory ); - - Function1 addField = fieldName -> { - PolyType typeName = rowType.getField( fieldName, true, false ).getType().getPolyType(); - fieldInfo.add( fieldName, typeFactory.createSqlType( typeName ) ).nullable( true ); - return null; - };*/ - - SelectFrom selectFrom = QueryBuilder.selectFrom( columnFamily ); - -// final RelProtoDataType resultRowType = RelDataTypeImpl.proto( fieldInfo.build() ); - - Select select; - // Construct the list of fields to project - if ( selectFields.isEmpty() ) { - select = selectFrom.all(); - } else { - select = selectFrom.selectors( selectFields ); - } - - select = select.where( predicates ); - - // FIXME js: Horrible hack, but hopefully works for now till I understand everything better. - Map orderMap = new LinkedHashMap<>(); - for ( Map.Entry entry : order ) { - orderMap.put( entry.getKey(), entry.getValue() ); - } - - select = select.orderBy( orderMap ); - int limit = offset; - if ( fetch >= 0 ) { - limit += fetch; - } - if ( limit > 0 ) { - select = select.limit( limit ); - } - - select = select.allowFiltering(); - - final SimpleStatement statement = select.build(); - - return new CassandraEnumerable( session, statement.getQuery(), offset ); - } - - - public Enumerable insert() { - /*final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl( RelDataTypeSystem.DEFAULT ); - final RelDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - final RelDataType rowType = getRowType( typeFactory ); - - Function1 addField = fieldName -> { - PolyType typeName = rowType.getField( fieldName, true, false ).getType().getPolyType(); - fieldInfo.add( fieldName, typeFactory.createSqlType( typeName ) ).nullable( true ); - return null; - }; - - final RelProtoDataType resultRowType = RelDataTypeImpl.proto( fieldInfo.build() );*/ - - return null; - } - - - CqlSession getSession() { - return cassandraSchema.getSession(); - } - - - String getColumnFamily() { - return this.columnFamily; - } - - - String getPhysicalName() { - return this.physicalName; - } - - - @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - return new CassandraQueryable<>( dataContext, schema, this, tableName ); - } - - - @Override - public AlgNode toAlg( ToAlgContext context, AlgOptTable algOptTable, AlgTraitSet traitSet ) { - final AlgOptCluster cluster = context.getCluster(); - return new CassandraScan( cluster, cluster.traitSetOf( cassandraSchema.getConvention() ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptTable, this, null ); - } - - - - - @Override - public Modify toModificationAlg( AlgOptCluster cluster, AlgOptTable table, CatalogReader catalogReader, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { -// return new CassandraTableModify( cluster, ) - cassandraSchema.getConvention().register( cluster.getPlanner() ); - return new LogicalModify( cluster, cluster.traitSetOf( Convention.NONE ), table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); -// return new CassandraTableModify( cluster, cluster.traitSetOf( CassandraRel.CONVENTION ), table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened, this, this.columnFamily ); - } - - - /** - * Implementation of {@link org.apache.calcite.linq4j.Queryable} based on a {@link org.polypheny.db.adapter.cassandra.CassandraTable}. - * - * @param element type - */ - public class CassandraQueryable extends AbstractTableQueryable { - - public CassandraQueryable( DataContext dataContext, SchemaPlus schema, CassandraTable table, String tableName ) { - super( dataContext, schema, table, tableName ); - } - - - @Override - public Enumerator enumerator() { - //noinspection unchecked - final Enumerable enumerable = (Enumerable) getTable().query( getSession() ); - return enumerable.enumerator(); - } - - - private CassandraTable getTable() { - return (CassandraTable) table; - } - - - private CqlSession getSession() { - return schema.unwrap( CassandraSchema.class ).session; - } - - - /** - * Called via code-generation. - * - * @see org.polypheny.db.adapter.cassandra.CassandraMethod#CASSANDRA_QUERYABLE_QUERY - */ - @SuppressWarnings("UnusedDeclaration") - public Enumerable query( - List> fields, - List selectFields, - List predicates, - List> order, - Integer offset, - Integer fetch ) { - return getTable().query( cassandraSchema.getSession(), fields, selectFields, predicates, order, offset, fetch ); - } - - - public Enumerable insert( - String query ) { - return CassandraEnumerable.of( getSession(), query ); - } - - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTableModify.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTableModify.java deleted file mode 100644 index 72511b2c0f..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraTableModify.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.core.data.UdtValue; -import com.datastax.oss.driver.api.querybuilder.QueryBuilder; -import com.datastax.oss.driver.api.querybuilder.term.Term; -import com.datastax.oss.driver.api.querybuilder.update.Assignment; -import java.util.ArrayList; -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.adapter.cassandra.CassandraAlg.CassandraImplementContext.Type; -import org.polypheny.db.adapter.cassandra.util.CassandraTypesUtils; -import org.polypheny.db.algebra.AbstractAlgNode; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgOptTable; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.sql.language.fun.SqlArrayValueConstructor; -import org.polypheny.db.util.Pair; - - -@Slf4j -public class CassandraTableModify extends Modify implements CassandraAlg { - - public final CassandraTable cassandraTable; - - - /** - * Creates a {@code Modify}. - *

    - * The UPDATE operation has format like this: - *

    - *
    UPDATE table SET iden1 = exp1, ident2 = exp2  WHERE condition
    - *
    - * - * @param cluster Cluster this relational expression belongs to - * @param traitSet Traits of this relational expression - * @param table Target table to modify - * @param catalogReader accessor to the table metadata. - * @param input Sub-query or filter condition - * @param operation Modify operation (INSERT, UPDATE, DELETE) - * @param updateColumnList List of column identifiers to be updated (e.g. ident1, ident2); null if not UPDATE - * @param sourceExpressionList List of value expressions to be set (e.g. exp1, exp2); null if not UPDATE - * @param flattened Whether set flattens the input row type - */ - public CassandraTableModify( - AlgOptCluster cluster, - AlgTraitSet traitSet, - AlgOptTable table, - CatalogReader catalogReader, - AlgNode input, - Operation operation, - List updateColumnList, - List sourceExpressionList, - boolean flattened ) { - super( cluster, traitSet, table, catalogReader, input, operation, updateColumnList, sourceExpressionList, flattened ); - this.cassandraTable = table.unwrap( CassandraTable.class ); - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - return super.computeSelfCost( planner, mq ).multiplyBy( 0.1 ); - } - - - @Override - public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new CassandraTableModify( - getCluster(), - traitSet, - getTable(), - getCatalogReader(), - AbstractAlgNode.sole( inputs ), - getOperation(), - getUpdateColumnList(), - getSourceExpressionList(), - isFlattened() ); - } - - - @Override - public void register( AlgOptPlanner planner ) { - getConvention().register( planner ); - } - - - @Override - public void implement( CassandraImplementContext context ) { - log.debug( "CTM: Implementing." ); - context.cassandraTable = cassandraTable; - context.table = table; - - switch ( this.getOperation() ) { - case INSERT: - log.debug( "CTM: Insert detected." ); - context.type = Type.INSERT; - context.visitChild( 0, getInput() ); - break; - case UPDATE: - log.debug( "CTM: Update detected." ); - context.type = Type.UPDATE; - context.visitChild( 0, getInput() ); - - List setAssignments = new ArrayList<>(); - for ( Pair entry : Pair.zip( this.getUpdateColumnList(), this.getSourceExpressionList() ) ) { - if ( !(entry.right instanceof RexLiteral) && !((entry.right instanceof RexCall) && (((RexCall) entry.right).getOperator() instanceof SqlArrayValueConstructor)) ) { - throw new RuntimeException( "Non literal values are not yet supported." ); - } - - String physicalColumnName = ((CassandraConvention) getConvention()).physicalNameProvider.getPhysicalColumnName( cassandraTable.getColumnFamily(), entry.left ); - - Term term; - if ( entry.right instanceof RexLiteral ) { - term = QueryBuilder.literal( CassandraValues.literalValue( (RexLiteral) entry.right ) ); - } else { - SqlArrayValueConstructor arrayValueConstructor = (SqlArrayValueConstructor) ((RexCall) entry.right).op; - UdtValue udtValue = CassandraTypesUtils.createArrayContainerDataType( - context.cassandraTable.getUnderlyingConvention().arrayContainerUdt, - arrayValueConstructor.dimension, - arrayValueConstructor.maxCardinality, - ((RexCall) entry.right).type.getComponentType().getPolyType(), - (RexCall) entry.right ); - String udtString = udtValue.getFormattedContents(); - term = QueryBuilder.raw( udtString ); - } - setAssignments.add( Assignment.setColumn( physicalColumnName, term ) ); - } - - context.addAssignments( setAssignments ); - - break; - case DELETE: - log.debug( "CTM: Delete detected." ); - context.type = Type.DELETE; - context.visitChild( 0, getInput() ); - break; - case MERGE: - throw new RuntimeException( "Merge is not supported." ); - } - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraToEnumerableConverter.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraToEnumerableConverter.java deleted file mode 100644 index 0493854941..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraToEnumerableConverter.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - -import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; -import com.datastax.oss.driver.api.querybuilder.QueryBuilder; -import com.datastax.oss.driver.api.querybuilder.insert.InsertInto; -import com.datastax.oss.driver.api.querybuilder.insert.RegularInsert; -import com.datastax.oss.driver.api.querybuilder.select.Select; -import com.datastax.oss.driver.api.querybuilder.select.SelectFrom; -import com.datastax.oss.driver.api.querybuilder.select.Selector; -import com.datastax.oss.driver.api.querybuilder.term.Term; -import lombok.extern.slf4j.Slf4j; -import org.apache.calcite.linq4j.tree.BlockBuilder; -import org.apache.calcite.linq4j.tree.Expression; -import org.apache.calcite.linq4j.tree.Expressions; -import org.apache.calcite.linq4j.tree.MethodCallExpression; -import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.cassandra.CassandraAlg.CassandraImplementContext; -import org.polypheny.db.adapter.cassandra.rules.CassandraRules; -import org.polypheny.db.adapter.enumerable.*; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.convert.ConverterImpl; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.plan.*; -import org.polypheny.db.schema.Schemas; -import org.polypheny.db.util.BuiltInMethod; - -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.StringJoiner; -import java.util.stream.Collectors; - - -/** - * Relational expression representing a scan of a table in a Cassandra data source. - */ -@Slf4j -public class CassandraToEnumerableConverter extends ConverterImpl implements EnumerableAlg { - - public CassandraToEnumerableConverter( AlgOptCluster cluster, AlgTraitSet traits, AlgNode input ) { - super( cluster, ConventionTraitDef.INSTANCE, traits, input ); - } - - - @Override - public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new CassandraToEnumerableConverter( getCluster(), traitSet, sole( inputs ) ); - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - return super.computeSelfCost( planner, mq ).multiplyBy( .1 ); - } - - - @Override - public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { - // Generates a call to "query" with the appropriate fields and predicates - final BlockBuilder list = new BlockBuilder(); - final CassandraImplementContext cassandraContext = new CassandraImplementContext(); - cassandraContext.visitChild( 0, getInput() ); - final CassandraConvention convention = (CassandraConvention) getInput().getConvention(); - final AlgDataType rowType = getRowType(); - final PhysType physType = PhysTypeImpl.of( implementor.getTypeFactory(), rowType, pref.prefer( JavaRowFormat.ARRAY ) ); - - String cqlString; - switch ( cassandraContext.type ) { - case SELECT: - SelectFrom selectFrom = QueryBuilder.selectFrom( cassandraContext.cassandraTable.getPhysicalName() ); - Select select; - // Construct the list of fields to project - if ( cassandraContext.selectFields.isEmpty() ) { - List physicalNames = CassandraRules.cassandraPhysicalFieldNames( getRowType() ); - for ( String physicalName : physicalNames ) { - cassandraContext.selectFields.add( Selector.column( physicalName ) ); - } - } - select = selectFrom.selectors( cassandraContext.selectFields ); - - select = select.where( cassandraContext.whereClause ); - // FIXME js: Horrible hack, but hopefully works for now till I understand everything better. - Map orderMap = new LinkedHashMap<>(); - for ( Map.Entry entry : cassandraContext.order.entrySet() ) { - orderMap.put( entry.getKey(), entry.getValue() ); - } - - select = select.orderBy( orderMap ); - int limit = cassandraContext.offset; - if ( cassandraContext.fetch >= 0 ) { - limit += cassandraContext.fetch; - } - if ( limit > 0 ) { - select = select.limit( limit ); - } - - select = select.allowFiltering(); - cqlString = select.build().getQuery(); - break; - case INSERT: - if ( cassandraContext.insertValues.size() == 1 ) { - InsertInto insertInto = QueryBuilder.insertInto( cassandraContext.cassandraTable.getPhysicalName() ); - RegularInsert insert = insertInto.values( cassandraContext.insertValues.get( 0 ) ); - cqlString = insert.build().getQuery(); - } else { -// List statements = new ArrayList<>( ); - StringJoiner joiner = new StringJoiner( ";", "BEGIN BATCH ", " APPLY BATCH;" ); - for ( Map insertValue : cassandraContext.insertValues ) { - InsertInto insertInto = QueryBuilder.insertInto( cassandraContext.cassandraTable.getPhysicalName() ); - - joiner.add( insertInto.values( insertValue ).build().getQuery() ); - } - - cqlString = joiner.toString(); - } - break; - case UPDATE: - cqlString = QueryBuilder.update( cassandraContext.cassandraTable.getPhysicalName() ) - .set( cassandraContext.setAssignments ) - .where( cassandraContext.whereClause ) - .build() - .getQuery(); - break; - case DELETE: - cqlString = QueryBuilder.deleteFrom( cassandraContext.cassandraTable.getPhysicalName() ) - .where( cassandraContext.whereClause ) - .build() - .getQuery(); - break; - default: - cqlString = ""; - } - - list.add( Expressions.statement( Expressions.call( - Schemas.unwrap( convention.expression, CassandraSchema.class ), - "registerStore", - DataContext.ROOT ) ) ); - - Expression enumerable; - - final Expression simpleStatement = list.append( "statement", Expressions.constant( cqlString ) ); - final Expression cqlSession_ = list.append( - "cqlSession", - Expressions.call( - Schemas.unwrap( convention.expression, CassandraSchema.class ), - "getSession" ) ); - - enumerable = list.append( - "enumerable", - Expressions.call( - CassandraMethod.CASSANDRA_STRING_ENUMERABLE_OFFSET.method, - cqlSession_, - simpleStatement, - Expressions.constant( cassandraContext.offset ) - ) ); - list.add( Expressions.return_( null, enumerable ) ); - - return implementor.result( physType, list.toBlock() ); - } - - - /** - * E.g. {@code constantArrayList("x", "y")} returns "Arrays.asList('x', 'y')". - */ - private static MethodCallExpression constantArrayList( List values, Class clazz ) { - return Expressions.call( BuiltInMethod.ARRAYS_AS_LIST.method, Expressions.newArrayInit( clazz, constantList( values ) ) ); - } - - - /** - * E.g. {@code constantList("x", "y")} returns {@code {ConstantExpression("x"), ConstantExpression("y")}}. - */ - private static List constantList( List values ) { - return values.stream().map( Expressions::constant ).collect( Collectors.toList() ); - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraValues.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraValues.java deleted file mode 100644 index 170fec6956..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/CassandraValues.java +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra; - - -import com.datastax.oss.driver.api.querybuilder.QueryBuilder; -import com.datastax.oss.driver.api.querybuilder.term.Term; -import com.google.common.collect.ImmutableList; -import java.math.BigDecimal; -import java.time.LocalDate; -import java.time.LocalTime; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.apache.calcite.avatica.util.ByteString; -import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.algebra.core.Values; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.JavaTypeFactoryImpl; -import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.type.BasicPolyType; -import org.polypheny.db.type.IntervalPolyType; -import org.polypheny.db.util.DateString; -import org.polypheny.db.util.Pair; -import org.polypheny.db.util.TimeString; - - -@Slf4j -public class CassandraValues extends Values implements CassandraAlg { - - private final AlgDataType logicalRowType; - - - public CassandraValues( AlgOptCluster cluster, AlgDataType rowType, ImmutableList> tuples, AlgTraitSet traits ) { - super( cluster, rowType, tuples, traits ); - this.logicalRowType = rowType; - } - - - /** - * Convert the value of a literal to a string. - * - * @param literal Literal to translate - * @return String representation of the literal - */ - public static Object literalValue( RexLiteral literal ) { - Object valueType = getJavaClass( literal ); - return valueType; - } - - - public static Object getJavaClass( RexLiteral literal ) { - AlgDataType type = literal.getType(); - if ( type instanceof BasicPolyType || type instanceof IntervalPolyType ) { - switch ( type.getPolyType() ) { - case VARCHAR: - case CHAR: - return literal.getValue2(); - case DATE: - try { - return LocalDate.parse( literal.getValueAs( DateString.class ).toString() ); - } catch ( Exception e ) { - log.error( "Unable to cast date. ", e ); - throw new RuntimeException( e ); - } - case TIME: - case TIME_WITH_LOCAL_TIME_ZONE: - try { - log.info( "Attempting to convert date." ); - return LocalTime.parse( literal.getValueAs( TimeString.class ).toString() ); - } catch ( Exception e ) { - log.error( "Unable to cast date. ", e ); - throw new RuntimeException( e ); - } - case INTEGER: - case INTERVAL_YEAR: - case INTERVAL_YEAR_MONTH: - case INTERVAL_MONTH: - return literal.getValue2(); -// return type.isNullable() ? Integer.class : int.class; - case TIMESTAMP: - case TIMESTAMP_WITH_LOCAL_TIME_ZONE: - try { - Calendar daysSinceEpoch = (Calendar) literal.getValue(); - return daysSinceEpoch.toInstant(); - } catch ( Exception e ) { - log.error( "Unable to cast timestamp. ", e ); - throw new RuntimeException( e ); - } - case BIGINT: - case INTERVAL_DAY: - case INTERVAL_DAY_HOUR: - case INTERVAL_DAY_MINUTE: - case INTERVAL_DAY_SECOND: - case INTERVAL_HOUR: - case INTERVAL_HOUR_MINUTE: - case INTERVAL_HOUR_SECOND: - case INTERVAL_MINUTE: - case INTERVAL_MINUTE_SECOND: - case INTERVAL_SECOND: - return (Long) literal.getValue2(); -// return type.isNullable() ? Long.class : long.class; - case SMALLINT: - return (Short) literal.getValue2(); -// return type.isNullable() ? Short.class : short.class; - case TINYINT: - return (Byte) literal.getValue2(); -// return type.isNullable() ? Byte.class : byte.class; - case DECIMAL: - return (BigDecimal) literal.getValue(); -// return BigDecimal.class; - case BOOLEAN: - return (Boolean) literal.getValue2(); -// return type.isNullable() ? Boolean.class : boolean.class; - case DOUBLE: - case FLOAT: // sic - return (Double) literal.getValue2(); -// return type.isNullable() ? Double.class : double.class; - case REAL: - return (Float) literal.getValue2(); -// return type.isNullable() ? Float.class : float.class; - case BINARY: - case VARBINARY: - return (ByteString) literal.getValue2(); -// return ByteString.class; - case GEOMETRY: -// return GeoFunctions.Geom.class; - case SYMBOL: -// return Enum.class; - case ANY: - return Object.class; - } - } - return null; - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - return super.computeSelfCost( planner, mq ).multiplyBy( CassandraConvention.COST_MULTIPLIER ); - } - - - @Override - public void implement( CassandraImplementContext context ) { - - List> items = new LinkedList<>(); - // TODO JS: Is this work around still needed with the fix in CassandraSchema? - final List physicalFields = context.cassandraTable.getRowType( new JavaTypeFactoryImpl() ).getFieldList(); - final List logicalFields = rowType.getFieldList(); - final List fields = new ArrayList<>(); - for ( AlgDataTypeField field : logicalFields ) { - for ( AlgDataTypeField physicalField : physicalFields ) { - if ( field.getName().equals( physicalField.getName() ) ) { - fields.add( physicalField ); - break; - } - } - } -// final List fields = rowType.getFieldList(); - for ( List tuple : tuples ) { - final List literals = new ArrayList<>(); - Map oneInsert = new LinkedHashMap<>(); - for ( Pair pair : Pair.zip( fields, tuple ) ) { - try { - oneInsert.put( pair.left.getPhysicalName(), QueryBuilder.literal( literalValue( pair.right ) ) ); -// oneInsert.put( pair.left.getName(), QueryBuilder.literal( literalValue( pair.right ) ) ); - } catch ( Exception e ) { - log.error( "Something broke while parsing cql values.", e ); - throw new RuntimeException( e ); - } - } - - items.add( oneInsert ); - } - - context.addInsertValues( items ); - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/package-info.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/package-info.java deleted file mode 100644 index cb338ff98a..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/package-info.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Cassandra adapter. - *

    - * There is one table for each Cassandra column family. - */ - -package org.polypheny.db.adapter.cassandra; diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraConverterRule.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraConverterRule.java deleted file mode 100644 index 04c8125ebc..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraConverterRule.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import java.util.function.Predicate; -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.convert.ConverterRule; -import org.polypheny.db.plan.AlgTrait; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.tools.AlgBuilderFactory; - - -/** - * Base class for planner rules that convert a relational expression to Cassandra calling convention. - */ -public abstract class CassandraConverterRule extends ConverterRule { - - protected final Convention out; - - - CassandraConverterRule( - Class clazz, - Predicate predicate, - AlgTrait in, - CassandraConvention out, - AlgBuilderFactory algBuilderFactory, - String description ) { - super( clazz, predicate, in, out, algBuilderFactory, description ); - this.out = out; - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraFilterRule.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraFilterRule.java deleted file mode 100644 index c9ce7ae266..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraFilterRule.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.adapter.cassandra.CassandraFilter; -import org.polypheny.db.adapter.cassandra.CassandraTable; -import org.polypheny.db.adapter.cassandra.util.CassandraUtils; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Filter; -import org.polypheny.db.algebra.logical.relational.LogicalFilter; -import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgOptUtil; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.plan.volcano.AlgSubset; -import org.polypheny.db.prepare.JavaTypeFactoryImpl; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexInputRef; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.tools.AlgBuilderFactory; -import org.polypheny.db.util.Pair; - - -/** - * Rule to convert a {@link LogicalFilter} to a {@link CassandraFilter}. - */ -@Slf4j -public class CassandraFilterRule extends CassandraConverterRule { - - CassandraFilterRule( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( Filter.class, r -> true, Convention.NONE, out, algBuilderFactory, "CassandraFilterRule:" + out.getName() ); - } - - - @Override - public AlgNode convert( AlgNode alg ) { - log.debug( "Attempting to convert." ); - Filter filter = (Filter) alg; - final AlgTraitSet traitSet = filter.getTraitSet().replace( out ); - return new CassandraFilter( - filter.getCluster(), - traitSet, - convert( filter.getInput(), filter.getInput().getTraitSet().replace( out ) ), - filter.getCondition() ); - } - - - @Override - public boolean matches( AlgOptRuleCall call ) { - log.debug( "Checking whether we can convert to CassandraFilter." ); - Filter filter = call.alg( 0 ); - RexNode condition = filter.getCondition(); - - List disjunctions = AlgOptUtil.disjunctions( condition ); - if ( disjunctions.size() != 1 ) { - log.debug( "Cannot convert, condition is a disjunction: {}", condition.toString() ); - return false; - } - - CassandraTable table = null; - // This is a copy in getRelList, so probably expensive! - if ( filter.getInput() instanceof AlgSubset ) { - AlgSubset subset = (AlgSubset) filter.getInput(); - table = CassandraUtils.getUnderlyingTable( subset, this.out ); - } - - if ( table == null ) { - log.debug( "Cannot convert, cannot find table as child." ); - return false; - } - - Pair, List> keyFields = table.getKeyFields(); - Set partitionKeys = new HashSet<>( keyFields.left ); - // TODO JS: Is this work around still needed with the fix in CassandraSchema? - final List physicalFields = table.getRowType( new JavaTypeFactoryImpl() ).getFieldList(); - final List logicalFields = filter.getRowType().getFieldList(); - final List fields = new ArrayList<>(); - List fieldNames = new ArrayList<>(); - for ( AlgDataTypeField field : logicalFields ) { - for ( AlgDataTypeField physicalField : physicalFields ) { - if ( field.getName().equals( physicalField.getName() ) ) { - fields.add( physicalField ); - fieldNames.add( field.getName() ); - break; - } - } - } -// List fieldNames = CassandraRules.cassandraLogicalFieldNames( filter.getInput().getRowType() ); - - // Check that all conjunctions are primary key equalities - condition = disjunctions.get( 0 ); - for ( RexNode predicate : AlgOptUtil.conjunctions( condition ) ) { - if ( !isEqualityOnKey( predicate, fieldNames, partitionKeys, keyFields.right ) ) { - return false; - } - } - - return true; - } - - - /** - * Check if the node is a supported predicate (primary key equality). - * - * @param node Condition node to check - * @param fieldNames Names of all columns in the table - * @param partitionKeys Names of primary key columns - * @param clusteringKeys Names of primary key columns - * @return True if the node represents an equality predicate on a primary key - */ - private boolean isEqualityOnKey( RexNode node, List fieldNames, Set partitionKeys, List clusteringKeys ) { - if ( !(node.getKind() == Kind.EQUALS - || node.getKind() == Kind.GREATER_THAN - || node.getKind() == Kind.GREATER_THAN_OR_EQUAL - || node.getKind() == Kind.LESS_THAN - || node.getKind() == Kind.LESS_THAN_OR_EQUAL - || node.getKind() == Kind.NOT_EQUALS) ) { - return false; - } - - RexCall call = (RexCall) node; - final RexNode left = call.operands.get( 0 ); - final RexNode right = call.operands.get( 1 ); - String key = compareFieldWithLiteral( left, right, fieldNames ); - if ( key == null ) { - key = compareFieldWithLiteral( right, left, fieldNames ); - } - if ( key != null ) { - return partitionKeys.remove( key ) || clusteringKeys.contains( key ); - } else { - return false; - } - } - - - /** - * Check if an equality operation is comparing a primary key column with a literal. - * - * @param left Left operand of the equality - * @param right Right operand of the equality - * @param fieldNames Names of all columns in the table - * @return The field being compared or null if there is no key equality - */ - private String compareFieldWithLiteral( RexNode left, RexNode right, List fieldNames ) { - // FIXME Ignore casts for new and assume they aren't really necessary - if ( left.isA( Kind.CAST ) ) { - left = ((RexCall) left).getOperands().get( 0 ); - } - - if ( left.isA( Kind.INPUT_REF ) && right.isA( Kind.LITERAL ) ) { - final RexInputRef left1 = (RexInputRef) left; - if ( left1.getIndex() < fieldNames.size() ) { - return fieldNames.get( left1.getIndex() ); - } - } - return null; - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraLimitRule.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraLimitRule.java deleted file mode 100644 index 418770fc44..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraLimitRule.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.adapter.cassandra.CassandraLimit; -import org.polypheny.db.adapter.enumerable.EnumerableConvention; -import org.polypheny.db.adapter.enumerable.EnumerableLimit; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.tools.AlgBuilderFactory; - - -/** - * Rule to convert a {@link EnumerableLimit} to a {@link CassandraLimit}. - */ -public class CassandraLimitRule extends CassandraConverterRule { - - CassandraLimitRule( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( EnumerableLimit.class, r -> true, EnumerableConvention.INSTANCE, out, algBuilderFactory, "CassandraLimitRule:" + out.getName() ); - } - - - @Override - public AlgNode convert( AlgNode alg ) { - final EnumerableLimit limit = (EnumerableLimit) alg; - final AlgTraitSet traitSet = limit.getTraitSet().replace( out ); - return new CassandraLimit( - limit.getCluster(), - traitSet, - convert( limit.getInput(), limit.getInput().getTraitSet().replace( out ) ), - limit.offset, - limit.fetch ); - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraProjectRule.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraProjectRule.java deleted file mode 100644 index 7043189410..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraProjectRule.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.adapter.cassandra.CassandraProject; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.sql.language.fun.SqlArrayValueConstructor; -import org.polypheny.db.tools.AlgBuilderFactory; -import org.polypheny.db.util.UnsupportedRexCallVisitor; - - -/** - * Rule to convert a {@link LogicalProject} to a {@link CassandraProject}. - */ -public class CassandraProjectRule extends CassandraConverterRule { - - CassandraProjectRule( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( Project.class, r -> true, Convention.NONE, out, algBuilderFactory, "CassandraProjectRule:" + out.getName() ); - } - - - @Override - public boolean matches( AlgOptRuleCall call ) { - Project project = call.alg( 0 ); - return !UnsupportedRexCallVisitor.containsArrayConstructorOrModelItem( project.getProjects() ); - } - - - @Override - public AlgNode convert( AlgNode alg ) { - final Project project = (Project) alg; - final AlgTraitSet traitSet = project.getTraitSet().replace( out ); - - boolean arrayValueProject = true; - for ( RexNode e : project.getProjects() ) { - if ( !((e instanceof RexCall) && (((RexCall) e).getOperator() instanceof SqlArrayValueConstructor)) && !(e instanceof RexLiteral) ) { - arrayValueProject = false; - } - } - - return new CassandraProject( - project.getCluster(), - traitSet, - convert( project.getInput(), project.getInput().getTraitSet().replace( out ) ), - project.getProjects(), - project.getRowType(), - arrayValueProject ); - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraRules.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraRules.java deleted file mode 100644 index 192cbc4738..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraRules.java +++ /dev/null @@ -1,401 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import com.google.common.collect.ImmutableList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.function.Predicate; -import java.util.stream.Collectors; -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.adapter.cassandra.CassandraFilter; -import org.polypheny.db.adapter.cassandra.CassandraProject; -import org.polypheny.db.adapter.cassandra.CassandraScan; -import org.polypheny.db.adapter.cassandra.CassandraSort; -import org.polypheny.db.adapter.cassandra.CassandraTable; -import org.polypheny.db.adapter.cassandra.CassandraToEnumerableConverter; -import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.algebra.AlgCollation; -import org.polypheny.db.algebra.AlgCollations; -import org.polypheny.db.algebra.AlgFieldCollation; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.convert.ConverterRule; -import org.polypheny.db.algebra.core.AlgFactories; -import org.polypheny.db.algebra.core.Sort; -import org.polypheny.db.algebra.logical.relational.LogicalFilter; -import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgOptRuleOperand; -import org.polypheny.db.plan.AlgOptUtil; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexInputRef; -import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.rex.RexVisitorImpl; -import org.polypheny.db.tools.AlgBuilderFactory; -import org.polypheny.db.util.Pair; -import org.polypheny.db.util.ValidatorUtil; -import org.polypheny.db.util.trace.PolyphenyDbTrace; -import org.slf4j.Logger; - - -/** - * Rules and relational operators for {@link CassandraConvention} calling convention. - */ -public class CassandraRules { - - private CassandraRules() { - } - - - protected static final Logger LOGGER = PolyphenyDbTrace.getPlannerTracer(); - - - public static List rules( CassandraConvention out ) { - return rules( out, AlgFactories.LOGICAL_BUILDER ); - } - - - public static List rules( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - return ImmutableList.of( - new CassandraToEnumerableConverterRule( out, algBuilderFactory ), - new CassandraFilterRule( out, algBuilderFactory ), - new CassandraProjectRule( out, algBuilderFactory ), - // TODO js: Disabling sort till I have time to figure out how to properly implement it. -// new CassandraSortRule( out, algBuilderFactory ), - new CassandraLimitRule( out, algBuilderFactory ), - new CassandraValuesRule( out, algBuilderFactory ), - new CassandraTableModificationRule( out, algBuilderFactory ) - ); - } - - - public static List cassandraLogicalFieldNames( final AlgDataType rowType ) { - return ValidatorUtil.uniquify( rowType.getFieldNames(), ValidatorUtil.EXPR_SUGGESTER, true ); - } - - - public static List cassandraPhysicalFieldNames( final AlgDataType rowType ) { - List> pairs = Pair.zip( rowType.getFieldList().stream().map( AlgDataTypeField::getPhysicalName ).collect( Collectors.toList() ), rowType.getFieldNames() ); - return pairs.stream().map( it -> it.left != null ? it.left : it.right ).collect( Collectors.toList() ); - } - - - /** - * Translator from {@link RexNode} to strings in Cassandra's expression language. - */ - public static class RexToCassandraTranslator extends RexVisitorImpl { - - private final JavaTypeFactory typeFactory; - private final List inFields; - - - public RexToCassandraTranslator( JavaTypeFactory typeFactory, List inFields ) { - super( true ); - this.typeFactory = typeFactory; - this.inFields = inFields; - } - - - @Override - public String visitInputRef( RexInputRef inputRef ) { - return inFields.get( inputRef.getIndex() ); - } - - } - - - /** - * Rule to convert a {@link LogicalFilter} to a {@link CassandraFilter}. - */ - private static class CassandraFilterRuleOld extends AlgOptRule { - - // TODO: Check for an equality predicate on the partition key. Right now this just checks if we have a single top-level AND - private static final Predicate PREDICATE = filter -> AlgOptUtil.disjunctions( filter.getCondition() ).size() == 1; - - // private static final CassandraFilterRule INSTANCE = new CassandraFilterRule(); - protected final Convention out; - - - private CassandraFilterRuleOld( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( operand( LogicalFilter.class, operand( CassandraScan.class, none() ) ), "CassandraFilterRule" ); - this.out = out; - } - - - @Override - public boolean matches( AlgOptRuleCall call ) { - // Get the condition from the filter operation - LogicalFilter filter = call.alg( 0 ); - RexNode condition = filter.getCondition(); - - // Get field names from the scan operation - CassandraScan scan = call.alg( 1 ); - Pair, List> keyFields = ((CassandraTable) scan.getTable()).getKeyFields(); - Set partitionKeys = new HashSet<>( keyFields.left ); - List fieldNames = CassandraRules.cassandraLogicalFieldNames( filter.getInput().getRowType() ); - - List disjunctions = AlgOptUtil.disjunctions( condition ); - if ( disjunctions.size() != 1 ) { - return false; - } else { - // Check that all conjunctions are primary key equalities - condition = disjunctions.get( 0 ); - for ( RexNode predicate : AlgOptUtil.conjunctions( condition ) ) { - if ( !isEqualityOnKey( predicate, fieldNames, partitionKeys, keyFields.right ) ) { - return false; - } - } - } - - // Either all of the partition keys must be specified or none - return partitionKeys.size() == keyFields.left.size() || partitionKeys.size() == 0; - } - - - /** - * Check if the node is a supported predicate (primary key equality). - * - * @param node Condition node to check - * @param fieldNames Names of all columns in the table - * @param partitionKeys Names of primary key columns - * @param clusteringKeys Names of primary key columns - * @return True if the node represents an equality predicate on a primary key - */ - private boolean isEqualityOnKey( RexNode node, List fieldNames, Set partitionKeys, List clusteringKeys ) { - if ( node.getKind() != Kind.EQUALS ) { - return false; - } - - RexCall call = (RexCall) node; - final RexNode left = call.operands.get( 0 ); - final RexNode right = call.operands.get( 1 ); - String key = compareFieldWithLiteral( left, right, fieldNames ); - if ( key == null ) { - key = compareFieldWithLiteral( right, left, fieldNames ); - } - if ( key != null ) { - return partitionKeys.remove( key ) || clusteringKeys.contains( key ); - } else { - return false; - } - } - - - /** - * Check if an equality operation is comparing a primary key column with a literal. - * - * @param left Left operand of the equality - * @param right Right operand of the equality - * @param fieldNames Names of all columns in the table - * @return The field being compared or null if there is no key equality - */ - private String compareFieldWithLiteral( RexNode left, RexNode right, List fieldNames ) { - // FIXME Ignore casts for new and assume they aren't really necessary - if ( left.isA( Kind.CAST ) ) { - left = ((RexCall) left).getOperands().get( 0 ); - } - - if ( left.isA( Kind.INPUT_REF ) && right.isA( Kind.LITERAL ) ) { - final RexInputRef left1 = (RexInputRef) left; - return fieldNames.get( left1.getIndex() ); - } else { - return null; - } - } - - - /** - * @see ConverterRule - */ - @Override - public void onMatch( AlgOptRuleCall call ) { - LogicalFilter filter = call.alg( 0 ); - CassandraScan scan = call.alg( 1 ); - if ( filter.getTraitSet().contains( Convention.NONE ) ) { - final AlgNode converted = convert( filter, scan ); - if ( converted != null ) { - call.transformTo( converted ); - } - } - } - - - public AlgNode convert( LogicalFilter filter, CassandraScan scan ) { - final AlgTraitSet traitSet = filter.getTraitSet().replace( out ); - final Pair, List> keyFields = ((CassandraTable) scan.getTable()).getKeyFields(); - return new CassandraFilter( filter.getCluster(), traitSet, convert( filter.getInput(), filter.getInput().getTraitSet().replace( out ) ), filter.getCondition(), keyFields.left, keyFields.right, ((CassandraTable) scan.getTable()).getClusteringOrder() ); - } - - } - - - /** - * Rule to convert a {@link LogicalProject} to a {@link CassandraProject}. - */ - private static class CassandraProjectRuleOld extends CassandraConverterRule { - - private CassandraProjectRuleOld( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( LogicalProject.class, r -> true, Convention.NONE, out, algBuilderFactory, "CassandraProjectRule" ); - } - - - @Override - public boolean matches( AlgOptRuleCall call ) { - LogicalProject project = call.alg( 0 ); - for ( RexNode e : project.getProjects() ) { - if ( !(e instanceof RexInputRef) && !(e instanceof RexLiteral) ) { - LOGGER.debug( "Failed to match CassandraProject." ); - return false; - } - } - - LOGGER.debug( "Matched CassandraProject." ); - return true; - } - - - @Override - public AlgNode convert( AlgNode alg ) { - final LogicalProject project = (LogicalProject) alg; - final AlgTraitSet traitSet = project.getTraitSet().replace( out ); - return new CassandraProject( project.getCluster(), traitSet, convert( project.getInput(), project.getInput().getTraitSet().replace( out ) ), project.getProjects(), project.getRowType(), false ); - } - - } - - - /** - * Rule to convert a {@link Sort} to a - * {@link CassandraSort}. - */ - private static class CassandraSortRuleOld extends AlgOptRule { - - private static final AlgOptRuleOperand CASSANDRA_OP = operand( CassandraToEnumerableConverter.class, operandJ( CassandraFilter.class, null, CassandraFilter::isSinglePartition, any() ) ); // We can only use implicit sorting within a single partition - - protected final Convention out; - - - private CassandraSortRuleOld( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( - operandJ( Sort.class, null, - // Limits are handled by CassandraLimit - sort -> sort.offset == null && sort.fetch == null, CASSANDRA_OP ), - "CassandraSortRule" - ); - this.out = out; - } - - - public AlgNode convert( Sort sort, CassandraFilter filter ) { - final AlgTraitSet traitSet = sort.getTraitSet().replace( out ).replace( sort.getCollation() ); - return new CassandraSort( sort.getCluster(), traitSet, convert( sort.getInput(), traitSet.replace( AlgCollations.EMPTY ) ), sort.getCollation(), sort.offset, sort.fetch ); - } - - - @Override - public boolean matches( AlgOptRuleCall call ) { - final Sort sort = call.alg( 0 ); - final CassandraFilter filter = call.alg( 2 ); - return collationsCompatible( sort.getCollation(), filter.getImplicitCollation() ); - } - - - /** - * Check if it is possible to exploit native CQL sorting for a given collation. - * - * @return True if it is possible to achieve this sort in Cassandra - */ - private boolean collationsCompatible( AlgCollation sortCollation, AlgCollation implicitCollation ) { - List sortFieldCollations = sortCollation.getFieldCollations(); - List implicitFieldCollations = implicitCollation.getFieldCollations(); - - if ( sortFieldCollations.size() > implicitFieldCollations.size() ) { - return false; - } - if ( sortFieldCollations.size() == 0 ) { - return true; - } - - // Check if we need to reverse the order of the implicit collation - boolean reversed = reverseDirection( sortFieldCollations.get( 0 ).getDirection() ) == implicitFieldCollations.get( 0 ).getDirection(); - - for ( int i = 0; i < sortFieldCollations.size(); i++ ) { - AlgFieldCollation sorted = sortFieldCollations.get( i ); - AlgFieldCollation implied = implicitFieldCollations.get( i ); - - // Check that the fields being sorted match - if ( sorted.getFieldIndex() != implied.getFieldIndex() ) { - return false; - } - - // Either all fields must be sorted in the same direction or the opposite direction based on whether we decided if the sort direction should be reversed above - AlgFieldCollation.Direction sortDirection = sorted.getDirection(); - AlgFieldCollation.Direction implicitDirection = implied.getDirection(); - if ( (!reversed && sortDirection != implicitDirection) || (reversed && reverseDirection( sortDirection ) != implicitDirection) ) { - return false; - } - } - - return true; - } - - - /** - * Find the reverse of a given collation direction. - * - * @return Reverse of the input direction - */ - private AlgFieldCollation.Direction reverseDirection( AlgFieldCollation.Direction direction ) { - switch ( direction ) { - case ASCENDING: - case STRICTLY_ASCENDING: - return AlgFieldCollation.Direction.DESCENDING; - case DESCENDING: - case STRICTLY_DESCENDING: - return AlgFieldCollation.Direction.ASCENDING; - default: - return null; - } - } - - - /** - * @see ConverterRule - */ - @Override - public void onMatch( AlgOptRuleCall call ) { - final Sort sort = call.alg( 0 ); - CassandraFilter filter = call.alg( 2 ); - final AlgNode converted = convert( sort, filter ); - if ( converted != null ) { - call.transformTo( converted ); - } - } - - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraSortRule.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraSortRule.java deleted file mode 100644 index f8f547b084..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraSortRule.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import java.util.List; -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.adapter.cassandra.CassandraSort; -import org.polypheny.db.adapter.cassandra.CassandraTable; -import org.polypheny.db.algebra.AlgCollations; -import org.polypheny.db.algebra.AlgFieldCollation; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Sort; -import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.plan.volcano.AlgSubset; -import org.polypheny.db.tools.AlgBuilderFactory; - - -/** - * Rule to convert a {@link Sort} to a - * {@link CassandraSort}. - */ -public class CassandraSortRule extends CassandraConverterRule { - - CassandraSortRule( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( Sort.class, r -> true, Convention.NONE, out, algBuilderFactory, "CassandraFilterRuleNew" ); - } - - - @Override - public AlgNode convert( AlgNode alg ) { - Sort sort = (Sort) alg; - final AlgTraitSet traitSet = sort.getTraitSet().replace( out ).replace( sort.getCollation() ); - return new CassandraSort( sort.getCluster(), traitSet, convert( sort.getInput(), traitSet.replace( AlgCollations.EMPTY ) ), sort.getCollation(), sort.offset, sort.fetch ); - } - - - @Override - public boolean matches( AlgOptRuleCall call ) { - final Sort sort = call.alg( 0 ); - - // We only deal with limit here! -// return sort.getCollation().getFieldCollations().isEmpty(); - CassandraTable table = null; - // This is a copy in getRelList, so probably expensive! - if ( sort.getInput() instanceof AlgSubset ) { - AlgSubset subset = (AlgSubset) sort.getInput(); -// table = CassandraUtils.getUnderlyingTable( subset ); - } - - if ( table == null ) { - return false; - } - -// final CassandraFilter filter = call.rel( 2 ); - return collationsCompatible( sort.getCollation().getFieldCollations(), table.getClusteringOrder() ); - } - - - /** - * Check if it is possible to exploit native CQL sorting for a given collation. - * - * @return True if it is possible to achieve this sort in Cassandra - */ -// private boolean collationsCompatible( RelCollation sortCollation, RelCollation implicitCollation ) { - private boolean collationsCompatible( List sortFieldCollations, List implicitFieldCollations ) { -// List sortFieldCollations = sortCollation.getFieldCollations(); -// List implicitFieldCollations = implicitCollation.getFieldCollations(); - - if ( sortFieldCollations.size() > implicitFieldCollations.size() ) { - return false; - } - if ( sortFieldCollations.size() == 0 ) { - return true; - } - - // Check if we need to reverse the order of the implicit collation - boolean reversed = reverseDirection( sortFieldCollations.get( 0 ).getDirection() ) == implicitFieldCollations.get( 0 ).getDirection(); - - for ( int i = 0; i < sortFieldCollations.size(); i++ ) { - AlgFieldCollation sorted = sortFieldCollations.get( i ); - AlgFieldCollation implied = implicitFieldCollations.get( i ); - - // Check that the fields being sorted match - if ( sorted.getFieldIndex() != implied.getFieldIndex() ) { - return false; - } - - // Either all fields must be sorted in the same direction or the opposite direction based on whether we decided if the sort direction should be reversed above - AlgFieldCollation.Direction sortDirection = sorted.getDirection(); - AlgFieldCollation.Direction implicitDirection = implied.getDirection(); - if ( (!reversed && sortDirection != implicitDirection) || (reversed && reverseDirection( sortDirection ) != implicitDirection) ) { - return false; - } - } - - return true; - } - - - /** - * Find the reverse of a given collation direction. - * - * @return Reverse of the input direction - */ - private AlgFieldCollation.Direction reverseDirection( AlgFieldCollation.Direction direction ) { - switch ( direction ) { - case ASCENDING: - case STRICTLY_ASCENDING: - return AlgFieldCollation.Direction.DESCENDING; - case DESCENDING: - case STRICTLY_DESCENDING: - return AlgFieldCollation.Direction.ASCENDING; - default: - return null; - } - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraTableModificationRule.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraTableModificationRule.java deleted file mode 100644 index 26ff07036a..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraTableModificationRule.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.adapter.cassandra.CassandraTable; -import org.polypheny.db.adapter.cassandra.CassandraTableModify; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.UnsupportedFromInsertShuttle; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.schema.ModifiableTable; -import org.polypheny.db.tools.AlgBuilderFactory; - - -@Slf4j -public class CassandraTableModificationRule extends CassandraConverterRule { - - CassandraTableModificationRule( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( Modify.class, CassandraTableModificationRule::supports, Convention.NONE, out, algBuilderFactory, "CassandraTableModificationRule:" + out.getName() ); - } - - - private static boolean supports( Modify modify ) { - return !modify.isInsert() || !UnsupportedFromInsertShuttle.contains( modify ); - } - - - @Override - public boolean matches( AlgOptRuleCall call ) { - final Modify modify = call.alg( 0 ); - if ( modify.getTable().unwrap( CassandraTable.class ) == null ) { - return false; - } - - if ( !modify.getTable().unwrap( CassandraTable.class ).getUnderlyingConvention().equals( this.out ) ) { - return false; - } - return modify.getOperation() != Operation.MERGE; - } - - - @Override - public AlgNode convert( AlgNode alg ) { - final Modify modify = (Modify) alg; - log.debug( "Converting to a {} CassandraTableModify", ((Modify) alg).getOperation() ); - final ModifiableTable modifiableTable = modify.getTable().unwrap( ModifiableTable.class ); - if ( modifiableTable == null ) { - return null; - } - if ( modify.getTable().unwrap( CassandraTable.class ) == null ) { - return null; - } - final AlgTraitSet traitSet = modify.getTraitSet().replace( out ); - return new CassandraTableModify( - modify.getCluster(), - traitSet, - modify.getTable(), - modify.getCatalogReader(), - AlgOptRule.convert( modify.getInput(), traitSet ), - modify.getOperation(), - modify.getUpdateColumnList(), - modify.getSourceExpressionList(), - modify.isFlattened() - ); - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraToEnumerableConverterRule.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraToEnumerableConverterRule.java deleted file mode 100644 index 2ef5ed4506..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraToEnumerableConverterRule.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import java.util.function.Predicate; -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.adapter.cassandra.CassandraToEnumerableConverter; -import org.polypheny.db.adapter.enumerable.EnumerableConvention; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.convert.ConverterRule; -import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.tools.AlgBuilderFactory; - - -/** - * Rule to convert a relational expression from {@link CassandraConvention} to {@link EnumerableConvention}. - */ -public class CassandraToEnumerableConverterRule extends ConverterRule { - - - /** - * Creates a CassandraToEnumerableConverterRule. - * - * @param algBuilderFactory Builder for relational expressions - */ - public CassandraToEnumerableConverterRule( CassandraConvention in, AlgBuilderFactory algBuilderFactory ) { - super( AlgNode.class, (Predicate) r -> true, in, EnumerableConvention.INSTANCE, algBuilderFactory, "CassandraToEnumerableConverterRule:" + in.getName() ); - } - - - @Override - public AlgNode convert( AlgNode alg ) { - AlgTraitSet newTraitSet = alg.getTraitSet().replace( getOutTrait() ); - return new CassandraToEnumerableConverter( alg.getCluster(), newTraitSet, alg ); - } - -} - diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraValuesRule.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraValuesRule.java deleted file mode 100644 index 93cd38f683..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/rules/CassandraValuesRule.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.rules; - - -import org.polypheny.db.adapter.cassandra.CassandraConvention; -import org.polypheny.db.adapter.cassandra.CassandraValues; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Values; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.tools.AlgBuilderFactory; - - -public class CassandraValuesRule extends CassandraConverterRule { - - CassandraValuesRule( CassandraConvention out, AlgBuilderFactory algBuilderFactory ) { - super( Values.class, r -> true, Convention.NONE, out, algBuilderFactory, "CassandraValuesRule:" + out.getName() ); - } - - - @Override - public AlgNode convert( AlgNode alg ) { - Values values = (Values) alg; - return new CassandraValues( - values.getCluster(), - values.getRowType(), - values.getTuples(), - values.getTraitSet().replace( out ) ); - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/util/CassandraTypesUtils.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/util/CassandraTypesUtils.java deleted file mode 100644 index 197f725366..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/util/CassandraTypesUtils.java +++ /dev/null @@ -1,499 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.util; - - -import com.datastax.oss.driver.api.core.data.CqlDuration; -import com.datastax.oss.driver.api.core.data.UdtValue; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.UserDefinedType; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import java.lang.reflect.Type; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Function; -import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.adapter.cassandra.CassandraValues; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeUtil; - - -@Slf4j -public class CassandraTypesUtils { - - private static final Gson GSON; - - - static { - GsonBuilder gsonBuilder = new GsonBuilder().registerTypeAdapter( PolyType.class, PolyType.getSerializer() ); - GSON = gsonBuilder.create(); - } - - - public static DataType getDataType( PolyType polyType, UserDefinedType arrayContainerUdt ) { - switch ( polyType ) { - case BOOLEAN: - return DataTypes.BOOLEAN; - case TINYINT: - return DataTypes.TINYINT; - case SMALLINT: - return DataTypes.SMALLINT; - case INTEGER: - return DataTypes.INT; - case BIGINT: - return DataTypes.BIGINT; - case DECIMAL: - return DataTypes.DECIMAL; - case FLOAT: - case REAL: - // TODO: What to return for real? - return DataTypes.FLOAT; - case DOUBLE: - return DataTypes.DOUBLE; - case DATE: - return DataTypes.DATE; - case TIME: - case TIME_WITH_LOCAL_TIME_ZONE: - return DataTypes.TIME; - case TIMESTAMP: - case TIMESTAMP_WITH_LOCAL_TIME_ZONE: - return DataTypes.TIMESTAMP; - case INTERVAL_YEAR: - case INTERVAL_YEAR_MONTH: - case INTERVAL_MONTH: - case INTERVAL_DAY: - case INTERVAL_DAY_HOUR: - case INTERVAL_DAY_MINUTE: - case INTERVAL_DAY_SECOND: - case INTERVAL_HOUR: - case INTERVAL_HOUR_MINUTE: - case INTERVAL_HOUR_SECOND: - case INTERVAL_MINUTE: - case INTERVAL_MINUTE_SECOND: - case INTERVAL_SECOND: - throw new RuntimeException( "Intervals are WIP." ); -// return DataTypes.DURATION; - case CHAR: - // TODO: What to return for char? - case VARCHAR: - case JSON: - return DataTypes.TEXT; - case BINARY: - case VARBINARY: - return DataTypes.BLOB; - case ARRAY: - return arrayContainerUdt; - case NULL: - case ANY: - case SYMBOL: - case MULTISET: - case MAP: - case DISTINCT: - case STRUCTURED: - case ROW: - case OTHER: - case CURSOR: - case COLUMN_LIST: - case DYNAMIC_STAR: - case GEOMETRY: - break; - } - - throw new RuntimeException( "Unable to convert sql type: " + polyType.getName() ); - } - - - public static PolyType getPolyType( DataType dataType ) { - - if ( dataType == DataTypes.UUID || dataType == DataTypes.TIMEUUID ) { - return PolyType.CHAR; - } else if ( dataType == DataTypes.ASCII || dataType == DataTypes.TEXT ) { - return PolyType.VARCHAR; - } else if ( dataType == DataTypes.TINYINT ) { - return PolyType.TINYINT; - } else if ( dataType == DataTypes.SMALLINT ) { - return PolyType.SMALLINT; - } else if ( dataType == DataTypes.INT || dataType == DataTypes.VARINT ) { - return PolyType.INTEGER; - } else if ( dataType == DataTypes.BIGINT ) { - return PolyType.BIGINT; - } else if ( dataType == DataTypes.DOUBLE ) { - return PolyType.DOUBLE; - } else if ( dataType == DataTypes.FLOAT ) { - // TODO JS: Float vs real? - return PolyType.FLOAT; - } else if ( dataType == DataTypes.DECIMAL ) { - return PolyType.DECIMAL; - } else if ( dataType == DataTypes.TIME ) { - return PolyType.TIME; - } else if ( dataType == DataTypes.DATE ) { - return PolyType.DATE; - } else if ( dataType == DataTypes.TIMESTAMP ) { - return PolyType.TIMESTAMP; - } else if ( dataType == DataTypes.BLOB ) { - return PolyType.VARBINARY; - } else if ( dataType == DataTypes.BOOLEAN ) { - return PolyType.BOOLEAN; - } else if ( dataType instanceof UserDefinedType ) { - return PolyType.ARRAY; - } else { - log.warn( "Unable to find type for cql type: {}. Returning ANY.", dataType ); - return PolyType.ANY; - } - } - - - public static Class getJavaType( DataType dataType ) { - - if ( dataType == DataTypes.ASCII ) { - return String.class; - } else if ( dataType == DataTypes.BIGINT ) { - return Long.class; - } else if ( dataType == DataTypes.BLOB ) { - return java.nio.ByteBuffer.class; - } else if ( dataType == DataTypes.BOOLEAN ) { - return Boolean.class; - } else if ( dataType == DataTypes.COUNTER ) { - return Long.class; - } else if ( dataType == DataTypes.DATE ) { - return java.time.LocalDate.class; - } else if ( dataType == DataTypes.DECIMAL ) { - return java.math.BigDecimal.class; - } else if ( dataType == DataTypes.DOUBLE ) { - return Double.class; - } else if ( dataType == DataTypes.DURATION ) { - return CqlDuration.class; - } else if ( dataType == DataTypes.FLOAT ) { - return Float.class; - } else if ( dataType == DataTypes.INET ) { - return java.net.InetAddress.class; - } else if ( dataType == DataTypes.INT ) { - return Integer.class; - } else if ( dataType == DataTypes.SMALLINT ) { - return Short.class; - } else if ( dataType == DataTypes.TEXT ) { - return String.class; - } else if ( dataType == DataTypes.TIME ) { - return java.time.LocalTime.class; - } else if ( dataType == DataTypes.TIMESTAMP ) { - return java.time.Instant.class; - } else if ( dataType == DataTypes.TIMEUUID ) { - return java.util.UUID.class; - } else if ( dataType == DataTypes.TINYINT ) { - return Byte.class; - } else if ( dataType == DataTypes.UUID ) { - return java.util.UUID.class; - } else if ( dataType == DataTypes.VARINT ) { - return java.math.BigInteger.class; - } else { - log.warn( "Unable to find type for cql type: {}. Returning ANY.", dataType ); - return Object.class; - } - } - - - static boolean canCastInternally( PolyType to, PolyType from ) { - switch ( from ) { - case BOOLEAN: - switch ( to ) { - case CHAR: - case VARCHAR: - return true; - default: - return false; - } - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - case DECIMAL: - case FLOAT: - case REAL: - case DOUBLE: - switch ( to ) { - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - case FLOAT: - case DOUBLE: - case DECIMAL: - case CHAR: - case VARCHAR: - return true; - default: - return false; - } - case DATE: - break; - case TIME: - break; - case TIME_WITH_LOCAL_TIME_ZONE: - break; - case TIMESTAMP: - break; - case TIMESTAMP_WITH_LOCAL_TIME_ZONE: - break; - case INTERVAL_YEAR: - break; - case INTERVAL_YEAR_MONTH: - break; - case INTERVAL_MONTH: - break; - case INTERVAL_DAY: - break; - case INTERVAL_DAY_HOUR: - break; - case INTERVAL_DAY_MINUTE: - break; - case INTERVAL_DAY_SECOND: - break; - case INTERVAL_HOUR: - break; - case INTERVAL_HOUR_MINUTE: - break; - case INTERVAL_HOUR_SECOND: - break; - case INTERVAL_MINUTE: - break; - case INTERVAL_MINUTE_SECOND: - break; - case INTERVAL_SECOND: - break; - case CHAR: - break; - case VARCHAR: - break; - case BINARY: - break; - case VARBINARY: - break; - case NULL: - break; - case ANY: - break; - case SYMBOL: - break; - case MULTISET: - break; - case ARRAY: - break; - case MAP: - break; - case DISTINCT: - break; - case STRUCTURED: - break; - case ROW: - break; - case OTHER: - break; - case CURSOR: - break; - case COLUMN_LIST: - break; - case DYNAMIC_STAR: - break; - case GEOMETRY: - break; - } - - return false; - } - - - public static Function convertToFrom( PolyType to, PolyType from ) { - Function f = null; - - if ( to == from ) { - return Function.identity(); - } - - switch ( from ) { - case BOOLEAN: - switch ( to ) { - case BOOLEAN: - f = Function.identity(); - break; - case INTEGER: - f = boolIn -> (((Boolean) boolIn) ? 1 : 0); - break; - case CHAR: - case VARCHAR: - f = boolIn -> (((Boolean) boolIn) ? "true" : "false"); - break; - } - break; - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - switch ( to ) { - case BOOLEAN: - f = intInt -> (((Integer) intInt) != 0); - break; - case FLOAT: - case DOUBLE: - f = intInt -> (((Integer) intInt).doubleValue()); - break; - case CHAR: - case VARCHAR: - f = intInt -> (((Integer) intInt).toString()); - break; - } - break; - case DECIMAL: - break; - case FLOAT: - case REAL: - case DOUBLE: - switch ( to ) { - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - f = doubleVal -> (((Double) doubleVal).intValue()); - break; - case CHAR: - case VARCHAR: - f = doubleVal -> (((Double) doubleVal).toString()); - break; - } - break; - case DATE: - break; - case TIME: - case TIME_WITH_LOCAL_TIME_ZONE: - case TIMESTAMP: - case TIMESTAMP_WITH_LOCAL_TIME_ZONE: - break; - case INTERVAL_YEAR: - case INTERVAL_YEAR_MONTH: - case INTERVAL_MONTH: - case INTERVAL_DAY: - case INTERVAL_DAY_HOUR: - case INTERVAL_DAY_MINUTE: - case INTERVAL_DAY_SECOND: - case INTERVAL_HOUR: - case INTERVAL_HOUR_MINUTE: - case INTERVAL_HOUR_SECOND: - case INTERVAL_MINUTE: - case INTERVAL_MINUTE_SECOND: - case INTERVAL_SECOND: - break; - case CHAR: - case VARCHAR: - switch ( to ) { - case BOOLEAN: - f = stringVal -> { - String string = (String) stringVal; - if ( "true".equalsIgnoreCase( string ) ) { - return true; - } else if ( "false".equalsIgnoreCase( string ) ) { - return false; - } else { - throw new IllegalArgumentException( "Unable to converter string \"" + string + "\" to boolean." ); - } - }; - break; - case TINYINT: - case SMALLINT: - case INTEGER: - case BIGINT: - f = stringVal -> Integer.valueOf( (String) stringVal ); - break; - case FLOAT: - case DOUBLE: - f = stringVal -> Double.parseDouble( (String) stringVal ); - break; - } - break; - case BINARY: - case VARBINARY: - break; - case ANY: - case SYMBOL: - case MULTISET: - case ARRAY: - case MAP: - case DISTINCT: - case STRUCTURED: - case ROW: - case OTHER: - case CURSOR: - case COLUMN_LIST: - case DYNAMIC_STAR: - case GEOMETRY: - break; - } - - if ( f != null ) { - return f; - } else { - throw new RuntimeException( "Unable to cast from " + from.getName() + " to " + to.getName() + "." ); - } - } - - - public static UdtValue createArrayContainerDataType( UserDefinedType arrayUdt, int dimension, int cardinality, PolyType innerType, RexCall arrayCall ) { - return arrayUdt.newValue() - .setString( 0, innerType.getTypeName() ) -// .setString( 0, GSON.toJson( innerType, PolyType.class ) ) - .setInt( 1, dimension ) - .setInt( 2, cardinality ) - .setString( 3, GSON.toJson( createListForArrays( arrayCall.operands ) ) ); - } - - - public static List unparseArrayContainerUdt( UdtValue arrayContainer ) { - if ( arrayContainer == null ) { - return null; - } - - PolyType innerType = GSON.fromJson( arrayContainer.getString( "innertype" ), PolyType.class ); - long dimension = (long) arrayContainer.getInt( "dimension" ); - int cardinality = arrayContainer.getInt( "cardinality" ); - Type conversionType = PolyTypeUtil.createNestedListType( dimension, innerType ); - String stringValue = arrayContainer.getString( "data" ); - if ( stringValue == null ) { - return null; - } - return GSON.fromJson( stringValue.trim(), conversionType ); - } - - - private static List createListForArrays( List operands ) { - List list = new ArrayList<>( operands.size() ); - for ( RexNode node : operands ) { - if ( node instanceof RexLiteral ) { - Object value = CassandraValues.literalValue( (RexLiteral) node ); - list.add( value ); - } else if ( node instanceof RexCall ) { - list.add( createListForArrays( ((RexCall) node).operands ) ); - } else { - throw new RuntimeException( "Invalid array" ); - } - } - return list; - } - -} diff --git a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/util/CassandraUtils.java b/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/util/CassandraUtils.java deleted file mode 100644 index 81eabe935e..0000000000 --- a/plugins/cassandra-adapter/src/main/java/org/polypheny/db/adapter/cassandra/util/CassandraUtils.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.util; - - -import java.util.Deque; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import org.polypheny.db.adapter.cassandra.CassandraFilter; -import org.polypheny.db.adapter.cassandra.CassandraScan; -import org.polypheny.db.adapter.cassandra.CassandraTable; -import org.polypheny.db.adapter.cassandra.CassandraTableModify; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.Convention; -import org.polypheny.db.plan.volcano.AlgSubset; - - -public class CassandraUtils { - - /** - * Finds the underlying {@link CassandraTable} of the subset. - * - * @param algSubset the subset. - * @return the {@link CassandraTable} or null if not found. - */ - public static CassandraTable getUnderlyingTable( AlgSubset algSubset, Convention targetConvention ) { - return getUnderlyingTable( algSubset.getAlgList(), targetConvention ); - } - - - private static CassandraTable getUnderlyingTable( List algs, Convention targetConvention ) { - Set alreadyChecked = new HashSet<>(); - Deque innerLevel = new LinkedList<>(); - - innerLevel.addAll( algs ); - - while ( !innerLevel.isEmpty() ) { - AlgNode algNode = innerLevel.pop(); - alreadyChecked.add( algNode ); - if ( algNode instanceof CassandraScan ) { - if ( algNode.getConvention().equals( targetConvention ) ) { - return ((CassandraScan) algNode).cassandraTable; - } - } else if ( algNode instanceof CassandraTableModify ) { - if ( algNode.getConvention().equals( targetConvention ) ) { - return ((CassandraTableModify) algNode).cassandraTable; - } - } else { - for ( AlgNode innerNode : algNode.getInputs() ) { - if ( innerNode instanceof AlgSubset ) { - for ( AlgNode possibleNewRel : ((AlgSubset) innerNode).getAlgList() ) { - if ( !alreadyChecked.contains( possibleNewRel ) ) { - innerLevel.addLast( possibleNewRel ); - } - } - } - } - } - } - - return null; - } - - - /** - * Finds the underlying {@link CassandraFilter} of the subset. - * - * @param algSubset the subset. - * @return the {@link CassandraFilter} or null if not found. - */ - public static CassandraFilter getUnderlyingFilter( AlgSubset algSubset ) { - List algs = algSubset.getAlgList(); - for ( AlgNode algNode : algs ) { - if ( algNode instanceof CassandraFilter ) { - return (CassandraFilter) algNode; - } - } - - return null; - } - - -} diff --git a/plugins/cassandra-adapter/src/test/java/org/polypheny/db/adapter/cassandra/util/CassandraTypesUtilsTest.java b/plugins/cassandra-adapter/src/test/java/org/polypheny/db/adapter/cassandra/util/CassandraTypesUtilsTest.java deleted file mode 100644 index 64ac1cdea7..0000000000 --- a/plugins/cassandra-adapter/src/test/java/org/polypheny/db/adapter/cassandra/util/CassandraTypesUtilsTest.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.cassandra.util; - - -import static org.junit.Assert.assertEquals; -import static org.polypheny.db.adapter.cassandra.util.CassandraTypesUtils.getDataType; -import static org.polypheny.db.adapter.cassandra.util.CassandraTypesUtils.getPolyType; - -import com.datastax.oss.driver.api.core.type.DataTypes; -import org.junit.Test; -import org.polypheny.db.type.PolyType; - - -public class CassandraTypesUtilsTest { - - { - // Booleans - - // Integers - - // Floating points - - // Date - - // Time - - // Timestamp - - // Intervals - - // Char - - // Binary - } - - - @Test - public void simplePolyTypeToCassandraType() { - // Booleans - assertEquals( DataTypes.BOOLEAN, getDataType( PolyType.BOOLEAN, null ) ); - - // Integers - assertEquals( DataTypes.TINYINT, getDataType( PolyType.TINYINT, null ) ); - assertEquals( DataTypes.SMALLINT, getDataType( PolyType.SMALLINT, null ) ); - assertEquals( DataTypes.INT, getDataType( PolyType.INTEGER, null ) ); - assertEquals( DataTypes.BIGINT, getDataType( PolyType.BIGINT, null ) ); - - // Floating points - assertEquals( DataTypes.FLOAT, getDataType( PolyType.FLOAT, null ) ); - assertEquals( DataTypes.DOUBLE, getDataType( PolyType.DOUBLE, null ) ); -// assertEquals( DataTypes., getDataType( PolyType.REAL ) ); - assertEquals( DataTypes.DECIMAL, getDataType( PolyType.DECIMAL, null ) ); - - // Date - assertEquals( DataTypes.DATE, getDataType( PolyType.DATE, null ) ); - - // Time - assertEquals( DataTypes.TIME, getDataType( PolyType.TIME, null ) ); - assertEquals( DataTypes.TIME, getDataType( PolyType.TIME_WITH_LOCAL_TIME_ZONE, null ) ); - - // Timestamp - assertEquals( DataTypes.TIMESTAMP, getDataType( PolyType.TIMESTAMP, null ) ); - assertEquals( DataTypes.TIMESTAMP, getDataType( PolyType.TIMESTAMP_WITH_LOCAL_TIME_ZONE, null ) ); - - // Intervals - - // Char - assertEquals( DataTypes.TEXT, getDataType( PolyType.CHAR, null ) ); - assertEquals( DataTypes.TEXT, getDataType( PolyType.VARCHAR, null ) ); - - // Binary - assertEquals( DataTypes.BLOB, getDataType( PolyType.BINARY, null ) ); - assertEquals( DataTypes.BLOB, getDataType( PolyType.VARBINARY, null ) ); - - } - - - @Test - public void name() { - // Booleans - assertEquals( PolyType.BOOLEAN, getPolyType( DataTypes.BOOLEAN ) ); - - // Integers - assertEquals( PolyType.TINYINT, getPolyType( DataTypes.TINYINT ) ); - assertEquals( PolyType.SMALLINT, getPolyType( DataTypes.SMALLINT ) ); - assertEquals( PolyType.INTEGER, getPolyType( DataTypes.INT ) ); - assertEquals( PolyType.BIGINT, getPolyType( DataTypes.BIGINT ) ); - - // Floating points - assertEquals( PolyType.FLOAT, getPolyType( DataTypes.FLOAT ) ); - assertEquals( PolyType.DOUBLE, getPolyType( DataTypes.DOUBLE ) ); -// assertEquals( PolyType.BOOLEAN, getPolyType( DataTypes.BOOLEAN ) ); - assertEquals( PolyType.DECIMAL, getPolyType( DataTypes.DECIMAL ) ); - - // Date - assertEquals( PolyType.DATE, getPolyType( DataTypes.DATE ) ); -// assertEquals( PolyType.BOOLEAN, getPolyType( DataTypes.D ) ); - - // Time - assertEquals( PolyType.TIME, getPolyType( DataTypes.TIME ) ); -// assertEquals( PolyType.BOOLEAN, getPolyType( DataTypes.BOOLEAN ) ); - - // Timestamp - assertEquals( PolyType.TIMESTAMP, getPolyType( DataTypes.TIMESTAMP ) ); -// assertEquals( PolyType.BOOLEAN, getPolyType( DataTypes.BOOLEAN ) ); - - // Intervals - - // Char - assertEquals( PolyType.VARCHAR, getPolyType( DataTypes.TEXT ) ); -// assertEquals( PolyType.BOOLEAN, getPolyType( DataTypes.BOOLEAN ) ); - - // Binary - assertEquals( PolyType.VARBINARY, getPolyType( DataTypes.BLOB ) ); -// assertEquals( PolyType.BOOLEAN, getPolyType( DataTypes.BOOLEAN ) ); - } - - - @Test - public void doubleTest() { - // Booleans - assertEquals( PolyType.BOOLEAN, getPolyType( getDataType( PolyType.BOOLEAN, null ) ) ); - - // Integers - assertEquals( PolyType.TINYINT, getPolyType( getDataType( PolyType.TINYINT, null ) ) ); - assertEquals( PolyType.SMALLINT, getPolyType( getDataType( PolyType.SMALLINT, null ) ) ); - assertEquals( PolyType.INTEGER, getPolyType( getDataType( PolyType.INTEGER, null ) ) ); - assertEquals( PolyType.BIGINT, getPolyType( getDataType( PolyType.BIGINT, null ) ) ); - - // Floating points - assertEquals( PolyType.FLOAT, getPolyType( getDataType( PolyType.FLOAT, null ) ) ); - assertEquals( PolyType.DOUBLE, getPolyType( getDataType( PolyType.DOUBLE, null ) ) ); - // There is no REAL type in cassandra, so we use FLOAT instead. - assertEquals( PolyType.FLOAT, getPolyType( getDataType( PolyType.REAL, null ) ) ); - assertEquals( PolyType.DECIMAL, getPolyType( getDataType( PolyType.DECIMAL, null ) ) ); - - // Date - assertEquals( PolyType.DATE, getPolyType( getDataType( PolyType.DATE, null ) ) ); - - // Time - assertEquals( PolyType.TIME, getPolyType( getDataType( PolyType.TIME, null ) ) ); - // Cassandra has no TIME with timezone support, so we just use TIME - assertEquals( PolyType.TIME, getPolyType( getDataType( PolyType.TIME_WITH_LOCAL_TIME_ZONE, null ) ) ); - - // Timestamp - assertEquals( PolyType.TIMESTAMP, getPolyType( getDataType( PolyType.TIMESTAMP, null ) ) ); - // Cassandra has no TIMESTAMP with local timezone support, so we just use TIMESTAMP - assertEquals( PolyType.TIMESTAMP, getPolyType( getDataType( PolyType.TIMESTAMP_WITH_LOCAL_TIME_ZONE, null ) ) ); - - // FIXME: Intervals - - // Char - // No char support, both char and varchar are represented as text in cassandra. - assertEquals( PolyType.VARCHAR, getPolyType( getDataType( PolyType.CHAR, null ) ) ); - assertEquals( PolyType.VARCHAR, getPolyType( getDataType( PolyType.VARCHAR, null ) ) ); - - // Binary - // No binary or varbinary support, only blob, so we use it for both - assertEquals( PolyType.VARBINARY, getPolyType( getDataType( PolyType.BINARY, null ) ) ); - assertEquals( PolyType.VARBINARY, getPolyType( getDataType( PolyType.VARBINARY, null ) ) ); - -// assertEquals( PolyType.TIME, getPolyType( getDataType( PolyType.TIME ) ) ); -// assertEquals( PolyType.TIME, getPolyType( getDataType( PolyType.TIME ) ) ); - - } - -} diff --git a/plugins/cassandra-adapter/src/test/java/org/polypheny/db/test/CassandraAdapterTest.java b/plugins/cassandra-adapter/src/test/java/org/polypheny/db/test/CassandraAdapterTest.java deleted file mode 100644 index 95ba5521c3..0000000000 --- a/plugins/cassandra-adapter/src/test/java/org/polypheny/db/test/CassandraAdapterTest.java +++ /dev/null @@ -1,215 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.test; - - -import com.google.common.collect.ImmutableMap; -import org.junit.Ignore; -import org.polypheny.db.util.Bug; -import org.polypheny.db.util.Sources; -import org.polypheny.db.util.TestUtil; -import org.polypheny.db.util.Util; - - -/** - * Tests for the {@code org.polypheny.db.adapter.cassandra} package. - * - * Will start embedded cassandra cluster and populate it from local {@code twissandra.cql} file. All configuration files are located in test classpath. - * - * Note that tests will be skipped if running on JDK11 and JDK12 (which is not yet supported by cassandra) see - * CASSANDRA-9608. - */ -@Ignore -public class CassandraAdapterTest { - // TODO MV: enable - /* - @ClassRule - public static final ExternalResource RULE = initCassandraIfEnabled(); -*/ - /** - * Connection factory based on the "mongo-zips" model. - */ - private static final ImmutableMap TWISSANDRA = ImmutableMap.of( "model", Sources.of( CassandraAdapterTest.class.getResource( "/model.json" ) ).file().getAbsolutePath() ); - - - /** - * Whether to run this test. - * Enabled by default, unless explicitly disabled from command line ({@code -Dpolyphenydb.test.cassandra=false}) or running on incompatible JDK version (see below). - * - * As of this wiring Cassandra 4.x is not yet released, and we're using 3.x (which fails on JDK11 and JDK12). All cassandra tests will be skipped if running on JDK11 and JDK12. - * - * @return {@code true} if test is compatible with current environment, {@code false} otherwise - * @see CASSANDRA-9608 - */ - private static boolean enabled() { - final boolean enabled = Util.getBooleanProperty( "polyphenydb.test.cassandra", true ); - Bug.upgrade( "remove JDK version check once current adapter supports Cassandra 4.x" ); - final boolean compatibleJdk = TestUtil.getJavaMajorVersion() != 11 && TestUtil.getJavaMajorVersion() != 12; - return enabled && compatibleJdk; - } - - // TODO MV: enable -/* - private static ExternalResource initCassandraIfEnabled() { - if ( !enabled() ) { - // Return NOP resource (to avoid nulls) - return new ExternalResource() { - @Override - public Statement apply( final Statement base, final Description description ) { - return super.apply( base, description ); - } - }; - } - - String configurationFileName = "cassandra.yaml"; // use default one - // Apache Jenkins often fails with "CassandraAdapterTest Cassandra daemon did not start within timeout (20 sec by default)" - long startUpTimeoutMillis = TimeUnit.SECONDS.toMillis( 60 ); - - CassandraCQLUnit rule = new CassandraCQLUnit( new ClassPathCQLDataSet( "twissandra.cql" ), configurationFileName, startUpTimeoutMillis ); - - // This static init is necessary otherwise tests fail with CassandraUnit in IntelliJ (jdk10) should be called right after constructor - // NullPointerException for DatabaseDescriptor.getDiskFailurePolicy - // for more info see - // https://github.com/jsevellec/cassandra-unit/issues/249 - // https://github.com/jsevellec/cassandra-unit/issues/221 - DatabaseDescriptor.daemonInitialization(); - - return rule; - } - - - @BeforeClass - public static void setUp() { - // run tests only if explicitly enabled - assumeTrue( "test explicitly disabled", enabled() ); - } - - - @Test - public void testSelect() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select * from \"users\"" ) - .returnsCount( 10 ); - } - - - @Test - public void testFilter() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select * from \"userline\" where \"username\"='!PUBLIC!'" ) - .limit( 1 ) - .returns( "username=!PUBLIC!; time=e8754000-80b8-1fe9-8e73-e3698c967ddd; " + "tweet_id=f3c329de-d05b-11e5-b58b-90e2ba530b12\n" ) - .explainContains( "PLAN=CassandraToEnumerableConverter\n" + " CassandraFilter(condition=[=($0, '!PUBLIC!')])\n" + " CassandraScan(table=[[twissandra, userline]]" ); - } - - - @Test - public void testFilterUUID() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select * from \"tweets\" where \"tweet_id\"='f3cd759c-d05b-11e5-b58b-90e2ba530b12'" ) - .limit( 1 ) - .returns( "tweet_id=f3cd759c-d05b-11e5-b58b-90e2ba530b12; " + "body=Lacus augue pede posuere.; username=JmuhsAaMdw\n" ) - .explainContains( "PLAN=CassandraToEnumerableConverter\n" + " CassandraFilter(condition=[=(CAST($0):CHAR(36), 'f3cd759c-d05b-11e5-b58b-90e2ba530b12')])\n" + " CassandraScan(table=[[twissandra, tweets]]" ); - } - - - @Test - public void testSort() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select * from \"userline\" where \"username\" = '!PUBLIC!' order by \"time\" desc" ) - .returnsCount( 146 ) - .explainContains( "PLAN=CassandraToEnumerableConverter\n" + " CassandraSort(sort0=[$1], dir0=[DESC])\n" + " CassandraFilter(condition=[=($0, '!PUBLIC!')])\n" ); - } - - - @Test - public void testProject() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select \"tweet_id\" from \"userline\" where \"username\" = '!PUBLIC!' limit 2" ) - .returns( "tweet_id=f3c329de-d05b-11e5-b58b-90e2ba530b12\n" + "tweet_id=f3dbb03a-d05b-11e5-b58b-90e2ba530b12\n" ) - .explainContains( "PLAN=CassandraToEnumerableConverter\n" + " CassandraLimit(fetch=[2])\n" + " CassandraProject(tweet_id=[$2])\n" + " CassandraFilter(condition=[=($0, '!PUBLIC!')])\n" ); - } - - - @Test - public void testProjectAlias() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select \"tweet_id\" as \"foo\" from \"userline\" " + "where \"username\" = '!PUBLIC!' limit 1" ) - .returns( "foo=f3c329de-d05b-11e5-b58b-90e2ba530b12\n" ); - } - - - @Test - public void testProjectConstant() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select 'foo' as \"bar\" from \"userline\" limit 1" ) - .returns( "bar=foo\n" ); - } - - - @Test - public void testLimit() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select \"tweet_id\" from \"userline\" where \"username\" = '!PUBLIC!' limit 8" ) - .explainContains( "CassandraLimit(fetch=[8])\n" ); - } - - - @Test - public void testSortLimit() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select * from \"userline\" where \"username\"='!PUBLIC!' " + "order by \"time\" desc limit 10" ) - .explainContains( " CassandraLimit(fetch=[10])\n" + " CassandraSort(sort0=[$1], dir0=[DESC])" ); - } - - - @Test - public void testSortOffset() { - PolyphenyDbAssert.that() - .with( TWISSANDRA ) - .query( "select \"tweet_id\" from \"userline\" where " + "\"username\"='!PUBLIC!' limit 2 offset 1" ) - .explainContains( "CassandraLimit(offset=[1], fetch=[2])" ) - .returns( "tweet_id=f3dbb03a-d05b-11e5-b58b-90e2ba530b12\n" + "tweet_id=f3e4182e-d05b-11e5-b58b-90e2ba530b12\n" ); - } -*/ -} \ No newline at end of file diff --git a/plugins/cassandra-adapter/src/test/resources/cassandra.yaml b/plugins/cassandra-adapter/src/test/resources/cassandra.yaml deleted file mode 100644 index 69bbc8696d..0000000000 --- a/plugins/cassandra-adapter/src/test/resources/cassandra.yaml +++ /dev/null @@ -1,586 +0,0 @@ -# Cassandra storage config YAML - -# The name of the cluster. This is mainly used to prevent machines in -# one logical cluster from joining another. -cluster_name: 'Test Cluster' - -# You should always specify InitialToken when setting up a production -# cluster for the first time, and often when adding capacity later. -# The principle is that each node should be given an equal slice of -# the token ring; see http://wiki.apache.org/cassandra/Operations -# for more details. -# -# If blank, Cassandra will request a token bisecting the range of -# the heaviest-loaded existing node. If there is no load information -# available, such as is the case with a new cluster, it will pick -# a random token, which will lead to hot spots. -#initial_token: - -# See http://wiki.apache.org/cassandra/HintedHandoff -hinted_handoff_enabled: true -# this defines the maximum amount of time a dead host will have hints -# generated. After it has been dead this long, new hints for it will not be -# created until it has been seen alive and gone down again. -max_hint_window_in_ms: 10800000 # 3 hours -# Maximum throttle in KBs per second, per delivery thread. This will be -# reduced proportionally to the number of nodes in the cluster. (If there -# are two nodes in the cluster, each delivery thread will use the maximum -# rate; if there are three, each will throttle to half of the maximum, -# since we expect two nodes to be delivering hints simultaneously.) -hinted_handoff_throttle_in_kb: 1024 -# Number of threads with which to deliver hints; -# Consider increasing this number when you have multi-dc deployments, since -# cross-dc handoff tends to be slower -max_hints_delivery_threads: 2 - -hints_directory: build/tmp/embeddedCassandra/hints - -# The following setting populates the page cache on memtable flush and compaction -# WARNING: Enable this setting only when the whole node's data fits in memory. -# Defaults to: false -# populate_io_cache_on_flush: false - -# Authentication backend, implementing IAuthenticator; used to identify users -# Out of the box, Cassandra provides org.apache.cassandra.auth.{AllowAllAuthenticator, -# PasswordAuthenticator}. -# -# - AllowAllAuthenticator performs no checks - set it to disable authentication. -# - PasswordAuthenticator relies on username/password pairs to authenticate -# users. It keeps usernames and hashed passwords in system_auth.credentials table. -# Please increase system_auth keyspace replication factor if you use this authenticator. -authenticator: AllowAllAuthenticator - -# Authorization backend, implementing IAuthorizer; used to limit access/provide permissions -# Out of the box, Cassandra provides org.apache.cassandra.auth.{AllowAllAuthorizer, -# CassandraAuthorizer}. -# -# - AllowAllAuthorizer allows any action to any user - set it to disable authorization. -# - CassandraAuthorizer stores permissions in system_auth.permissions table. Please -# increase system_auth keyspace replication factor if you use this authorizer. -authorizer: AllowAllAuthorizer - -# Validity period for permissions cache (fetching permissions can be an -# expensive operation depending on the authorizer, CassandraAuthorizer is -# one example). Defaults to 2000, set to 0 to disable. -# Will be disabled automatically for AllowAllAuthorizer. -permissions_validity_in_ms: 2000 - - -# The partitioner is responsible for distributing rows (by key) across -# nodes in the cluster. Any IPartitioner may be used, including your/m -# own as long as it is on the classpath. Out of the box, Cassandra -# provides org.apache.cassandra.dht.{Murmur3Partitioner, RandomPartitioner -# ByteOrderedPartitioner, OrderPreservingPartitioner (deprecated)}. -# -# - RandomPartitioner distributes rows across the cluster evenly by md5. -# This is the default prior to 1.2 and is retained for compatibility. -# - Murmur3Partitioner is similar to RandomPartioner but uses Murmur3_128 -# Hash Function instead of md5. When in doubt, this is the best option. -# - ByteOrderedPartitioner orders rows lexically by key bytes. BOP allows -# scanning rows in key order, but the ordering can generate hot spots -# for sequential insertion workloads. -# - OrderPreservingPartitioner is an obsolete form of BOP, that stores -# - keys in a less-efficient format and only works with keys that are -# UTF8-encoded Strings. -# - CollatingOPP collates according to EN,US rules rather than lexical byte -# ordering. Use this as an example if you need custom collation. -# -# See http://wiki.apache.org/cassandra/Operations for more on -# partitioners and token selection. -partitioner: org.apache.cassandra.dht.Murmur3Partitioner - -# directories where Cassandra should store data on disk. -data_file_directories: - - build/tmp/embeddedCassandra/data - -# commit log -commitlog_directory: build/tmp/embeddedCassandra/commitlog - -cdc_raw_directory: build/tmp/embeddedCassandra/cdc - -# policy for data disk failures: -# stop: shut down gossip and Thrift, leaving the node effectively dead, but -# can still be inspected via JMX. -# best_effort: stop using the failed disk and respond to requests based on -# remaining available sstables. This means you WILL see obsolete -# data at CL.ONE! -# ignore: ignore fatal errors and let requests fail, as in pre-1.2 Cassandra -disk_failure_policy: stop - - -# Maximum size of the key cache in memory. -# -# Each key cache hit saves 1 seek and each row cache hit saves 2 seeks at the -# minimum, sometimes more. The key cache is fairly tiny for the amount of -# time it saves, so it's worthwhile to use it at large numbers. -# The row cache saves even more time, but must store the whole values of -# its rows, so it is extremely space-intensive. It's best to only use the -# row cache if you have hot rows or static rows. -# -# NOTE: if you reduce the size, you may not get you hottest keys loaded on startup. -# -# Default value is empty to make it "auto" (min(5% of Heap (in MB), 100MB)). Set to 0 to disable key cache. -key_cache_size_in_mb: - -# Duration in seconds after which Cassandra should -# safe the keys cache. Caches are saved to saved_caches_directory as -# specified in this configuration file. -# -# Saved caches greatly improve cold-start speeds, and is relatively cheap in -# terms of I/O for the key cache. Row cache saving is much more expensive and -# has limited use. -# -# Default is 14400 or 4 hours. -key_cache_save_period: 14400 - -# Number of keys from the key cache to save -# Disabled by default, meaning all keys are going to be saved -# key_cache_keys_to_save: 100 - -# Maximum size of the row cache in memory. -# NOTE: if you reduce the size, you may not get you hottest keys loaded on startup. -# -# Default value is 0, to disable row caching. -row_cache_size_in_mb: 0 - -# Duration in seconds after which Cassandra should -# safe the row cache. Caches are saved to saved_caches_directory as specified -# in this configuration file. -# -# Saved caches greatly improve cold-start speeds, and is relatively cheap in -# terms of I/O for the key cache. Row cache saving is much more expensive and -# has limited use. -# -# Default is 0 to disable saving the row cache. -row_cache_save_period: 0 - -# Number of keys from the row cache to save -# Disabled by default, meaning all keys are going to be saved -# row_cache_keys_to_save: 100 - -# saved caches -saved_caches_directory: build/tmp/embeddedCassandra/saved_caches - -# commitlog_sync may be either "periodic" or "batch." -# When in batch mode, Cassandra won't ack writes until the commit log -# has been fsynced to disk. It will wait up to -# commitlog_sync_batch_window_in_ms milliseconds for other writes, before -# performing the sync. -# -# commitlog_sync: batch -# commitlog_sync_batch_window_in_ms: 50 -# -# the other option is "periodic" where writes may be acked immediately -# and the CommitLog is simply synced every commitlog_sync_period_in_ms -# milliseconds. -commitlog_sync: periodic -commitlog_sync_period_in_ms: 10000 - -# The size of the individual commitlog file segments. A commitlog -# segment may be archived, deleted, or recycled once all the data -# in it (potentially from each columnfamily in the system) has been -# flushed to sstables. -# -# The default size is 32, which is almost always fine, but if you are -# archiving commitlog segments (see commitlog_archiving.properties), -# then you probably want a finer granularity of archiving; 8 or 16 MB -# is reasonable. -commitlog_segment_size_in_mb: 32 - -# any class that implements the SeedProvider interface and has a -# constructor that takes a Map of parameters will do. -seed_provider: - # Addresses of hosts that are deemed contact points. - # Cassandra nodes use this list of hosts to find each other and learn - # the topology of the ring. You must change this if you are running - # multiple nodes! - - class_name: org.apache.cassandra.locator.SimpleSeedProvider - parameters: - # seeds is actually a comma-delimited list of addresses. - # Ex: ",," - - seeds: "127.0.0.1" - - -# For workloads with more data than can fit in memory, Cassandra's -# bottleneck will be reads that need to fetch data from -# disk. "concurrent_reads" should be set to (16 * number_of_drives) in -# order to allow the operations to enqueue low enough in the stack -# that the OS and drives can reorder them. -# -# On the other hand, since writes are almost never IO bound, the ideal -# number of "concurrent_writes" is dependent on the number of cores in -# your system; (8 * number_of_cores) is a good rule of thumb. -concurrent_reads: 32 -concurrent_writes: 32 - -# Total memory to use for memtables. Cassandra will flush the largest -# memtable when this much memory is used. -# If omitted, Cassandra will set it to 1/3 of the heap. -# memtable_total_space_in_mb: 2048 - -# Total space to use for commitlogs. -# If space gets above this value (it will round up to the next nearest -# segment multiple), Cassandra will flush every dirty CF in the oldest -# segment and remove it. -# commitlog_total_space_in_mb: 4096 - -# This sets the amount of memtable flush writer threads. These will -# be blocked by disk io, and each one will hold a memtable in memory -# while blocked. If you have a large heap and many data directories, -# you can increase this value for better flush performance. -# By default this will be set to the amount of data directories defined. -#memtable_flush_writers: 1 - -# the number of full memtables to allow pending flush, that is, -# waiting for a writer thread. At a minimum, this should be set to -# the maximum number of secondary indexes created on a single CF. -#memtable_flush_queue_size: 4 - -# Whether to, when doing sequential writing, fsync() at intervals in -# order to force the operating system to flush the dirty -# buffers. Enable this to avoid sudden dirty buffer flushing from -# impacting read latencies. Almost always a good idea on SSD:s; not -# necessarily on platters. -trickle_fsync: false -trickle_fsync_interval_in_kb: 10240 - -# TCP port, for commands and data -storage_port: 7010 - -# SSL port, for encrypted communication. Unused unless enabled in -# encryption_options -ssl_storage_port: 7011 - -# Address to bind to and tell other Cassandra nodes to connect to. You -# _must_ change this if you want multiple nodes to be able to -# communicate! -# -# Leaving it blank leaves it up to InetAddress.getLocalHost(). This -# will always do the Right Thing *if* the node is properly configured -# (hostname, name resolution, etc), and the Right Thing is to use the -# address associated with the hostname (it might not be). -# -# Setting this to 0.0.0.0 is always wrong. -listen_address: 127.0.0.1 - -start_native_transport: true -# port for the CQL native transport to listen for clients on -native_transport_port: 9142 - -# Whether to start the thrift rpc server. -start_rpc: true - -# Address to broadcast to other Cassandra nodes -# Leaving this blank will set it to the same value as listen_address -# broadcast_address: 1.2.3.4 - -# The address to bind the Thrift RPC service to -- clients connect -# here. Unlike ListenAddress above, you *can* specify 0.0.0.0 here if -# you want Thrift to listen on all interfaces. -# -# Leaving this blank has the same effect it does for ListenAddress, -# (i.e. it will be based on the configured hostname of the node). -rpc_address: localhost -# port for Thrift to listen for clients on -rpc_port: 9171 - -# enable or disable keepalive on rpc connections -rpc_keepalive: true - -# Cassandra provides three options for the RPC Server: -# -# sync -> One connection per thread in the rpc pool (see below). -# For a very large number of clients, memory will be your limiting -# factor; on a 64 bit JVM, 128KB is the minimum stack size per thread. -# Connection pooling is very, very strongly recommended. -# -# async -> Nonblocking server implementation with one thread to serve -# rpc connections. This is not recommended for high throughput use -# cases. Async has been tested to be about 50% slower than sync -# or hsha and is deprecated: it will be removed in the next major release. -# -# hsha -> Stands for "half synchronous, half asynchronous." The rpc thread pool -# (see below) is used to manage requests, but the threads are multiplexed -# across the different clients. -# -# The default is sync because on Windows hsha is about 30% slower. On Linux, -# sync/hsha performance is about the same, with hsha of course using less memory. -rpc_server_type: sync - -# Uncomment rpc_min|max|thread to set request pool size. -# You would primarily set max for the sync server to safeguard against -# misbehaved clients; if you do hit the max, Cassandra will block until one -# disconnects before accepting more. The defaults for sync are min of 16 and max -# unlimited. -# -# For the Hsha server, the min and max both default to quadruple the number of -# CPU cores. -# -# This configuration is ignored by the async server. -# -# rpc_min_threads: 16 -# rpc_max_threads: 2048 - -# uncomment to set socket buffer sizes on rpc connections -# rpc_send_buff_size_in_bytes: -# rpc_recv_buff_size_in_bytes: - -# Frame size for thrift (maximum field length). -# 0 disables TFramedTransport in favor of TSocket. This option -# is deprecated; we strongly recommend using Framed mode. -thrift_framed_transport_size_in_mb: 15 - -# The max length of a thrift message, including all fields and -# internal thrift overhead. -thrift_max_message_length_in_mb: 16 - -# Set to true to have Cassandra create a hard link to each sstable -# flushed or streamed locally in a backups/ subdirectory of the -# Keyspace data. Removing these links is the operator's -# responsibility. -incremental_backups: false - -# Whether or not to take a snapshot before each compaction. Be -# careful using this option, since Cassandra won't clean up the -# snapshots for you. Mostly useful if you're paranoid when there -# is a data format change. -snapshot_before_compaction: false - -# Whether or not a snapshot is taken of the data before keyspace truncation -# or dropping of column families. The STRONGLY advised default of true -# should be used to provide data safety. If you set this flag to false, you will -# lose data on truncation or drop. -auto_snapshot: false - -# Add column indexes to a row after its contents reach this size. -# Increase if your column values are large, or if you have a very large -# number of columns. The competing causes are, Cassandra has to -# deserialize this much of the row to read a single column, so you want -# it to be small - at least if you do many partial-row reads - but all -# the index data is read for each access, so you don't want to generate -# that wastefully either. -column_index_size_in_kb: 64 - -# Size limit for rows being compacted in memory. Larger rows will spill -# over to disk and use a slower two-pass compaction process. A message -# will be logged specifying the row key. -#in_memory_compaction_limit_in_mb: 64 - -# Number of simultaneous compactions to allow, NOT including -# validation "compactions" for anti-entropy repair. Simultaneous -# compactions can help preserve read performance in a mixed read/write -# workload, by mitigating the tendency of small sstables to accumulate -# during a single long running compactions. The default is usually -# fine and if you experience problems with compaction running too -# slowly or too fast, you should look at -# compaction_throughput_mb_per_sec first. -# -# This setting has no effect on LeveledCompactionStrategy. -# -# concurrent_compactors defaults to the number of cores. -# Uncomment to make compaction mono-threaded, the pre-0.8 default. -#concurrent_compactors: 1 - -# Multi-threaded compaction. When enabled, each compaction will use -# up to one thread per core, plus one thread per sstable being merged. -# This is usually only useful for SSD-based hardware: otherwise, -# your concern is usually to get compaction to do LESS i/o (see: -# compaction_throughput_mb_per_sec), not more. -#multithreaded_compaction: false - -# Throttles compaction to the given total throughput across the entire -# system. The faster you insert data, the faster you need to compact in -# order to keep the sstable count down, but in general, setting this to -# 16 to 32 times the rate you are inserting data is more than sufficient. -# Setting this to 0 disables throttling. Note that this account for all types -# of compaction, including validation compaction. -compaction_throughput_mb_per_sec: 16 - -# Track cached row keys during compaction, and re-cache their new -# positions in the compacted sstable. Disable if you use really large -# key caches. -#compaction_preheat_key_cache: true - -# Throttles all outbound streaming file transfers on this node to the -# given total throughput in Mbps. This is necessary because Cassandra does -# mostly sequential IO when streaming data during bootstrap or repair, which -# can lead to saturating the network connection and degrading rpc performance. -# When unset, the default is 200 Mbps or 25 MB/s. -# stream_throughput_outbound_megabits_per_sec: 200 - -# How long the coordinator should wait for read operations to complete -read_request_timeout_in_ms: 5000 -# How long the coordinator should wait for seq or index scans to complete -range_request_timeout_in_ms: 10000 -# How long the coordinator should wait for writes to complete -write_request_timeout_in_ms: 2000 -# How long a coordinator should continue to retry a CAS operation -# that contends with other proposals for the same row -cas_contention_timeout_in_ms: 1000 -# How long the coordinator should wait for truncates to complete -# (This can be much longer, because unless auto_snapshot is disabled -# we need to flush first so we can snapshot before removing the data.) -truncate_request_timeout_in_ms: 60000 -# The default timeout for other, miscellaneous operations -request_timeout_in_ms: 10000 - -# Enable operation timeout information exchange between nodes to accurately -# measure request timeouts. If disabled, replicas will assume that requests -# were forwarded to them instantly by the coordinator, which means that -# under overload conditions we will waste that much extra time processing -# already-timed-out requests. -# -# Warning: before enabling this property make sure to ntp is installed -# and the times are synchronized between the nodes. -cross_node_timeout: false - -# Enable socket timeout for streaming operation. -# When a timeout occurs during streaming, streaming is retried from the start -# of the current file. This _can_ involve re-streaming an important amount of -# data, so you should avoid setting the value too low. -# Default value is 0, which never timeout streams. -# streaming_socket_timeout_in_ms: 0 - -# phi value that must be reached for a host to be marked down. -# most users should never need to adjust this. -# phi_convict_threshold: 8 - -# endpoint_snitch -- Set this to a class that implements -# IEndpointSnitch. The snitch has two functions: -# - it teaches Cassandra enough about your network topology to route -# requests efficiently -# - it allows Cassandra to spread replicas around your cluster to avoid -# correlated failures. It does this by grouping machines into -# "datacenters" and "racks." Cassandra will do its best not to have -# more than one replica on the same "rack" (which may not actually -# be a physical location) -# -# IF YOU CHANGE THE SNITCH AFTER DATA IS INSERTED INTO THE CLUSTER, -# YOU MUST RUN A FULL REPAIR, SINCE THE SNITCH AFFECTS WHERE REPLICAS -# ARE PLACED. -# -# Out of the box, Cassandra provides -# - SimpleSnitch: -# Treats Strategy order as proximity. This improves cache locality -# when disabling read repair, which can further improve throughput. -# Only appropriate for single-datacenter deployments. -# - PropertyFileSnitch: -# Proximity is determined by rack and data center, which are -# explicitly configured in cassandra-topology.properties. -# - RackInferringSnitch: -# Proximity is determined by rack and data center, which are -# assumed to correspond to the 3rd and 2nd octet of each node's -# IP address, respectively. Unless this happens to match your -# deployment conventions (as it did Facebook's), this is best used -# as an example of writing a custom Snitch class. -# - Ec2Snitch: -# Appropriate for EC2 deployments in a single Region. Loads Region -# and Availability Zone information from the EC2 API. The Region is -# treated as the Datacenter, and the Availability Zone as the rack. -# Only private IPs are used, so this will not work across multiple -# Regions. -# - Ec2MultiRegionSnitch: -# Uses public IPs as broadcast_address to allow cross-region -# connectivity. (Thus, you should set seed addresses to the public -# IP as well.) You will need to open the storage_port or -# ssl_storage_port on the public IP firewall. (For intra-Region -# traffic, Cassandra will switch to the private IP after -# establishing a connection.) -# -# You can use a custom Snitch by setting this to the full class name -# of the snitch, which will be assumed to be on your classpath. -endpoint_snitch: SimpleSnitch - -# controls how often to perform the more expensive part of host score -# calculation -dynamic_snitch_update_interval_in_ms: 100 -# controls how often to reset all host scores, allowing a bad host to -# possibly recover -dynamic_snitch_reset_interval_in_ms: 600000 -# if set greater than zero and read_repair_chance is < 1.0, this will allow -# 'pinning' of replicas to hosts in order to increase cache capacity. -# The badness threshold will control how much worse the pinned host has to be -# before the dynamic snitch will prefer other replicas over it. This is -# expressed as a double which represents a percentage. Thus, a value of -# 0.2 means Cassandra would continue to prefer the static snitch values -# until the pinned host was 20% worse than the fastest. -dynamic_snitch_badness_threshold: 0.1 - -# request_scheduler -- Set this to a class that implements -# RequestScheduler, which will schedule incoming client requests -# according to the specific policy. This is useful for multi-tenancy -# with a single Cassandra cluster. -# NOTE: This is specifically for requests from the client and does -# not affect inter node communication. -# org.apache.cassandra.scheduler.NoScheduler - No scheduling takes place -# org.apache.cassandra.scheduler.RoundRobinScheduler - Round robin of -# client requests to a node with a separate queue for each -# request_scheduler_id. The scheduler is further customized by -# request_scheduler_options as described below. -request_scheduler: org.apache.cassandra.scheduler.NoScheduler - -# Scheduler Options vary based on the type of scheduler -# NoScheduler - Has no options -# RoundRobin -# - throttle_limit -- The throttle_limit is the number of in-flight -# requests per client. Requests beyond -# that limit are queued up until -# running requests can complete. -# The value of 80 here is twice the number of -# concurrent_reads + concurrent_writes. -# - default_weight -- default_weight is optional and allows for -# overriding the default which is 1. -# - weights -- Weights are optional and will default to 1 or the -# overridden default_weight. The weight translates into how -# many requests are handled during each turn of the -# RoundRobin, based on the scheduler id. -# -# request_scheduler_options: -# throttle_limit: 80 -# default_weight: 5 -# weights: -# Keyspace1: 1 -# Keyspace2: 5 - -# request_scheduler_id -- An identifer based on which to perform -# the request scheduling. Currently the only valid option is keyspace. -# request_scheduler_id: keyspace - -# index_interval controls the sampling of entries from the primrary -# row index in terms of space versus time. The larger the interval, -# the smaller and less effective the sampling will be. In technicial -# terms, the interval coresponds to the number of index entries that -# are skipped between taking each sample. All the sampled entries -# must fit in memory. Generally, a value between 128 and 512 here -# coupled with a large key cache size on CFs results in the best trade -# offs. This value is not often changed, however if you have many -# very small rows (many to an OS page), then increasing this will -# often lower memory usage without a impact on performance. -index_interval: 128 - -# Enable or disable inter-node encryption -# Default settings are TLS v1, RSA 1024-bit keys (it is imperative that -# users generate their own keys) TLS_RSA_WITH_AES_128_CBC_SHA as the cipher -# suite for authentication, key exchange and encryption of the actual data transfers. -# NOTE: No custom encryption options are enabled at the moment -# The available internode options are : all, none, dc, rack -# -# If set to dc cassandra will encrypt the traffic between the DCs -# If set to rack cassandra will encrypt the traffic between the racks -# -# The passwords used in these options must match the passwords used when generating -# the keystore and truststore. For instructions on generating these files, see: -# http://download.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#CreateKeystore -# -encryption_options: - internode_encryption: none - keystore: conf/.keystore - keystore_password: cassandra - truststore: conf/.truststore - truststore_password: cassandra - # More advanced defaults below: - # protocol: TLS - # algorithm: SunX509 - # store_type: JKS - # cipher_suites: [TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA] - diff --git a/plugins/cassandra-adapter/src/test/resources/logback-test.xml b/plugins/cassandra-adapter/src/test/resources/logback-test.xml deleted file mode 100644 index e06bb54bc5..0000000000 --- a/plugins/cassandra-adapter/src/test/resources/logback-test.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - - - - %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n - - - - - - - - diff --git a/plugins/cassandra-adapter/src/test/resources/model.json b/plugins/cassandra-adapter/src/test/resources/model.json deleted file mode 100644 index 144336147d..0000000000 --- a/plugins/cassandra-adapter/src/test/resources/model.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "version": "1.0", - "defaultSchema": "twissandra", - "schemas": [ - { - "name": "twissandra", - "type": "custom", - "factory": "org.polypheny.db.adapter.cassandra.CassandraSchemaFactory", - "operand": { - "host": "localhost", - "port": 9142, - "keyspace": "twissandra" - } - } - ] -} diff --git a/plugins/cassandra-adapter/src/test/resources/twissandra.cql b/plugins/cassandra-adapter/src/test/resources/twissandra.cql deleted file mode 100644 index 9b844b5e8a..0000000000 --- a/plugins/cassandra-adapter/src/test/resources/twissandra.cql +++ /dev/null @@ -1,1261 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -CREATE -KEYSPACE twissandra -WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'}; - -CREATE TABLE twissandra.users ( - username text PRIMARY KEY, password text - ); - -CREATE TABLE twissandra.friends ( - username text, friend text, since timestamp, PRIMARY KEY (username, friend) - ); - -CREATE TABLE twissandra.followers ( - username text, follower text, since timestamp, PRIMARY KEY (username, follower) - ); - -CREATE TABLE twissandra.tweets ( - tweet_id uuid PRIMARY KEY, username text, body text - ); - -CREATE TABLE twissandra.userline ( - username text, time timeuuid, tweet_id uuid, PRIMARY KEY (username, time) - ) WITH CLUSTERING ORDER BY (time DESC); - -CREATE TABLE twissandra.timeline ( - username text, time timeuuid, tweet_id uuid, PRIMARY KEY (username, time) - ) WITH CLUSTERING ORDER BY (time DESC); - -CREATE -MATERIALIZED VIEW twissandra."Tweets_By_User" AS -SELECT username, tweet_id -FROM twissandra.tweets -WHERE username IS NOT NULL PRIMARY KEY (username, tweet_id); - -USE -twissandra; - -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', 81514000 - ef01 - 1fb5 - b70b - f062f003e9d1, f3dbb03a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', bb894000 - 086c - 1f96 - ad8e - 67fe0797978a, f3e4182e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', e9194000 - 3674 - 1f10 - 9326 - fef1e3078e33, f3dcff80 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', a3d94000 - a86e - 1eef - 98e0 - 94b6ac5b030e, f3e56ca6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', b4514000 - 01b7 - 1eb1 - aa00 - a725a56435fc, f3e1939c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', 0f894000 - 472c - 1ca5 - 9b62 - eb5c2e2db9d8, f3e032f4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', c2414000 - 7663 - 1b0c - 9e61 - 8e6aa7bf4ca6, f3e2df7c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', a2a94000 - b7d3 - 1aaa - 94cc - 021e635beee7, f3de0664 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', 56e94000 - ca02 - 1a0f - 9d36 - fd47837f58b5, f3d6a892 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', a4594000 - 8014 - 17bb - ba7d - f581995f27f9, f3d8d6c6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', 01d94000 - b0ed - 15e9 - bb5b - 85c46c22a536, f3da3c1e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', 51714000 - a77a - 1486 - babe - 518e4a300603, f3d7a562 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', 25a14000 - d532 - 13cb - 8bc7 - 896cdcb464ca, f3e6c9ca - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('fOGctyIDES', be794000 - d584 - 1051 - a524 - 7989e62f9042, f3df0c44 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 5db54000 - 1516 - 1e71 - b66a - ed80d7c29155, f4020438 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', e5794000 - df7c - 1e14 - bc95 - 396ba65637d7, f40798a8 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', fa394000 - 4ecb - 1ccf - b665 - 7a142d737360, f3fec26e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 30dd4000 - 78e2 - 1bc5 - 949b - af81ca816236, f3fc513c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', b4114000 - ec6b - 1b40 - b1df - 3bfc1128ba7c, f4064a70 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 28894000 - e7ea - 1a5a - 8585 - c19ff031cd1b, f3fd904c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 99d94000 - bb02 - 1894 - bff5 - 57f89aad5b0a, f408a716 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 0ac94000 - f683 - 187d - 8874 - c698342eb895, f3fad4ec - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 0b414000 - 4fe9 - 17aa - 843e-5 defdd5421d2, f409c01a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 32094000 - eabf - 1739 - be54 - 0c2c85b322c2, f404d474 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 3b8d4000 - a57c - 1450 - abea - 1714095272f4, f40383ee - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 582d4000 - 1af7 - 121c - b849 - d468b28a0dfa, f40acb5e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 14854000 - e2ed - 117f - 9e2f - 8ad33d3c1300, f3f987a4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', f0e94000 - 3df6 - 10f6 - 8c77 - 03219898403f, f3fff77e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 2ca94000 - 7566 - 1e9b - 86f6 - 856943b771d2, f3f4f2a2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 07794000 - cc8a - 1e5a - 9e67 - d8789072d13f, f3f7680c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 31514000 - 52a1 - 1e46 - b62e - 33371ec3c2f5, f3ee4862 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 07214000 - df9a - 1e20 - 88a6 - a4e897bdf237, f3f04aea - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 3e614000 - 2d20 - 1d7a - 954c - d946629b1fd7, f3ef4b22 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 9f494000 - d3cf - 1d41 - ae79 - 1a2c29685e47, f3ed3756 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 92214000 - 191a - 1b0b - 8bfb - 6d76ba406467, f3f38016 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', c7014000 - 70fb - 19cc - 9be1 - 6dc17ff109bc, f3f15336 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 43714000 - 286a - 1578 - 904c - 9c6e423bc32d, f3f85c3a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 44f14000 - 6dde - 1533 - 8e6c - 9769ddbef30c, f3f25bbe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 50b94000 - 273e-126f-997e-dcaa37301bb5, f3f67258 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('PNexGqHdVE', ba614000 - 538b - 1c25 - 93fd - 6eb18ce6a927, f3e84e08 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('PNexGqHdVE', 81d14000 - f2c5 - 19a4 - 920a - 049fd5c989a8, f3e9c404 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('PNexGqHdVE', c9f94000 - 0bb1 - 1494 - a242 - def3bb8ad731, f3eb09fe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('PDKIVoezHs', 7a4d4000 - 3e76 - 1e31 - bfd6 - 3467b0e60088, f47bb1ac - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('PDKIVoezHs', 7dd94000 - bccf - 1adf - 8997 - 9b725bb5c949, f47a041a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('PDKIVoezHs', b1154000 - 451b - 182b - ae79 - 49b6668984da, f478bb1e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('PDKIVoezHs', 4e314000 - 4ef9 - 1680 - 9b43 - 7fd2d367190b, f47ce8ce - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', 37814000 - e445 - 1d17 - 8ce2 - 059063ba673c, f3bdb102 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', f9f14000 - 0503 - 1803 - b0b7 - 4c4fef07bcbe, f3c05236 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', 14314000 - 4eca - 17e3 - 90b9 - cadc49af5d35, f3bb6d2a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', ba214000 - b9db - 179a - 9f25 - ad8de44681c2, f3c1c710 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', ac514000 - fe69 - 15ce - 883e-2 d8c561aa0a6, f3b44cf2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', 21a94000 - f6a7 - 1482 - b14e - 9f0333b4a477, f3b87b1a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 5cbd4000 - f10b - 1f4c - b960 - f9185aad7250, f4167044 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', e1d14000 - 9f19 - 1d6d - bccb - 97b0333d3d25, f412d72c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', d6e14000 - 196b - 1cec - be0d - 24723fc4684a, f417b076 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 7f914000 - 4bc4 - 1c8e - bd2e - e1ab747e4575, f41d6638 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', dcf14000 - 9bf1 - 1c7d - b37c - cf705e6e6863, f4205384 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', cef94000 - e6a0 - 1c1f - a42e - 735ce54741ef, f411a604 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 7a114000 - 1800 - 1ba9 - be1e - acc9d49fc617, f42bcfc0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', f2d94000 - 8c12 - 1b78 - bdb4 - ee7887df676a, f4104264 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', f9594000 - a017 - 1a67 - a25a - 62b6c0b037d0, f40eee5a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 1b414000 - 2919 - 19f5 - 9f18 - 3cec58362e0c, f42a861a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 2a514000 - e54e - 19a7 - 886a - b5843a203bf3, f41b8e44 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', b6994000 - 5e80 - 1987 - a836 - f526c37f8152, f41a64ce - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', fee94000 - a0ab - 1889 - bb2d - cbf3badae5b2, f42369de - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 9dc94000 - 0997 - 1718 - 8dea - ab0b89466384, f4152784 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', a2214000 - 0067 - 16fe - 9b00 - 2526ab213ed3, f413ddde - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 39e54000 - 8129 - 158c - 9b4a - 585973b7b98e, f4253796 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 47154000 - d6ed - 14ec - a5a8 - 2b450734331a, f429301c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 80f14000 - 8676 - 14dd - ad0d - 8b709997fea4, f4190c64 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', c5554000 - 9bcb - 1339 - 99ed - bbf318ba211a, f40d784a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 24414000 - dadd - 1204 - 9987 - c85e8667cc05, f4268d12 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', d7754000 - 3ad6 - 10f7 - 95db - 5557d99c9650, f427dfa0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', b4194000 - 0681 - 101c - b64a - e02a5b333ae8, f40c175c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', a73d4000 - 9b5c - 1f41 - 8254 - 47df9ce1eacd, f475371e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', fd454000 - 54a7 - 1f41 - 9a41 - 70c951024c56, f4633a14 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', ab1d4000 - b568 - 1f38 - 9748 - 4862d395e63c, f4377dde - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', c7114000 - f202 - 1f37 - bbaa - f90cb354db2c, f453c840 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', bea34000 - 40d6 - 1f2e - a8ac - fc342eb53d20, f4690e26 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 38394000 - 343e-1ed5-91fb-72eb957744d2, f45a5eee - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', bdd94000 - 4964 - 1e68 - 83a3 - 8be6704361fa, f460e070 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 7b594000 - d4e7 - 1dd2 - 99e9 - 4e86ea899816, f4648266 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', e4c14000 - c9ec - 1dad - 8db5 - 51426cf56079, f45ba60a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 4a414000 - 4831 - 1daa - bf4c - a526c6dedd45, f43f0040 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', bd4b4000 - ac05 - 1d77 - b833 - 1b524444cf8e, f4417bf4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', e0194000 - 52b9 - 1d63 - 9837 - 45e31a1fc132, f44bac0a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', d95b4000 - 74cc - 1d3f - a5ef - 45c55c6a87e8, f45e689a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', a5d54000 - e045 - 1cd5 - 9163 - b5a343177817, f44028b2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 4db14000 - 76a3 - 1c7d - b01e - 4733cd36aaf1, f44a13fe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 73534000 - fc88 - 1bb0 - 827c - a7bd6e96b8b9, f458ffa4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', f8794000 - e0c1 - 1b72 - a469 - 6bc555173118, f442c130 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', a1e14000 - 752d - 1b6a - 85b3 - 296e7e055a91, f44d0aaa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', be954000 - ef52 - 1a9b - abc6 - 49d47c0fd19b, f4718c7c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 15b14000 - 9e96 - 19d2 - ba35 - 7b5669fd5465, f42e664a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 0a5d4000 - 9b69 - 198a - bb74 - 1954d3de4441, f43a34ca - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 84e94000 - a93c - 1948 - bd54 - 7e135ad469d7, f46dd9ba - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 851d4000 - e432 - 188b - ae29 - 9466c9bea87d, f46a2ed2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', ddc54000 - 07b3 - 187c - b500 - e70e8ebe4840, f42d1bdc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 4ac94000 - b570 - 1868 - ab3e - 0e948916bf17, f44783aa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 35194000 - 5d51 - 1861 - 9d3d - f90b9046dd31, f44fb336 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 87614000 - 7eb5 - 182a - b687 - 78bffa0fc066, f455148e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', b6f54000 - a61f - 1821 - 85bd - 48e12b10cb4b, f45f8842 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 09f94000 - 8df4 - 1774 - 8cf1 - 08ef9359097b, f43266fa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 52b54000 - 260a - 175a - b69f - 0d3500b91520, f466da5c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 7cb14000 - 20a1 - 16ec - 8c5d - 4b1cd23c74b8, f465d5d0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', ffa14000 - 9d13 - 1668 - b2b7 - 62023271b76e, f4512284 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 676d4000 - 04e2 - 1652 - a4c6 - 2a99f6c09a53, f44e5d24 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', c3434000 - 584c - 15df - 8020 - d339aa05e6c9, f43b6dfe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 23d54000 - 05c9 - 15d8 - 8614 - fd6affe0ecc7, f43dde2c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', f2f14000 - 627e-15cc-a531-1f398f1fc120, f448c602 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 29354000 - f169 - 15c5 - a457 - 485aa61498a8, f44403d8 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 988f4000 - 0292 - 15c0 - 9317 - c8f64f3fd0fe, f46b7602 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 37c94000 - 3660 - 15bb - 9b91 - d324146c1335, f4361a2a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', eb654000 - 8ecf - 1592 - 9994 - c850027f951b, f4621080 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 9dc94000 - 43fb - 1569 - 8e22 - f8ee2f8f1ba6, f44525e2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 42494000 - 73fa - 1554 - 841e - e0462e899d00, f45d3e70 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 35714000 - 98ef - 1457 - b846 - ffacab9b7397, f46f31fc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 0cb94000 - e922 - 1428 - aa47 - ebd11153147b, f4778532 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', f37d4000 - efa1 - 13f0 - b47c - 03013b58f771, f457b338 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 6a814000 - fc6f - 13e4 - b244 - e27eaffa8179, f4767534 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 8dc94000 - ff56 - 137b - 84d4 - 3fb5a4f6efac, f433b410 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 26594000 - de9a - 1360 - 8ebb - 12aa4ed61486, f438eade - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', e0d94000 - ccb4 - 116f - a2f7 - 5f167eba94dd, f467f4a0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 56254000 - 1cd7 - 1169 - b6a0 - dfdf3d95ac5b, f4707f4e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 15dd4000 - ab62 - 1156 - 8403 - 24ab8c0b41fd, f43c9620 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 0d194000 - 6cc2 - 1145 - 950e-275 ad8812a0f, f4740484 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 34ed4000 - 2472 - 1117 - 856f - df16ec09a322, f4526b80 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 17914000 - 9724 - 10f8 - 9351 - 0478312e375f, f4567860 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 36b14000 - 181c - 1095 - 96a9 - 61056836b162, f44658d6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 63014000 - f61b - 1077 - a6c0 - 6e3d6787bccf, f472bd9a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 159b4000 - de07 - 1039 - 8def - 74ecccc8ec22, f434eb3c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 9bad4000 - eccc - 1032 - a57a - 3069e9121e05, f46c9582 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 3f314000 - dd0c - 1f1d - 98d2 - 95bff58e6bdb, f3d3d4dc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 15914000 - 506d - 1f0a - a101 - b8d0b89799b1, f3cd759c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 0c914000 - 3534 - 1c4d - 9a0c - 87a0521e530e, f3d29edc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', f3914000 - ee9d - 1911 - b55c - 1ef9478a3454, f3d16166 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', ecf94000 - 43e8 - 18f4 - bd96 - 8ff58e628830, f3cc2c50 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 76f14000 - 2b6a - 1843 - 99de - a145935f5468, f3d0196e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 08d94000 - b7e2 - 15d3 - 88bd - f3e43dd604d6, f3d55672 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 5fc94000 - 86e3 - 114f - 8af2 - 761dcafd2d46, f3cec960 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', f21f4000 - 50d5 - 10a4 - b28c - 5b4b4edb4cc4, f3cac086 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('nFtPHprNOd', e8754000 - 80b8 - 1fe9 - 8e73 - e3698c967ddd, f3c329de - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('nFtPHprNOd', 22c54000 - 5f0a - 1b40 - 80d0 - 11672d8ec032, f3c472e4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('nFtPHprNOd', c7314000 - 4882 - 1acd - af96 - e8bdd974193b, f3c5c608 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('nFtPHprNOd', 32754000 - 094d - 1991 - a4de - 006b9d349ae0, f3c77908 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO timeline(username, time, tweet_id) -VALUES ('nFtPHprNOd', 86bd4000 - 38d9 - 1968 - 9153 - 7079fdc144dc, f3c8ae54 - d05b - 11e5 - b58b - 90e2ba530b12); - -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3cd759c - d05b - 11e5 - b58b - 90e2ba530b12, 'Lacus augue pede posuere.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4361a2a - d05b - 11e5 - b58b - 90e2ba530b12, 'Porta metus enim nonummy nisi et sollicitudin pede curae bibendum dignissim lorem quis.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3de0664 - d05b - 11e5 - b58b - 90e2ba530b12, 'Netus risus.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f43a34ca - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin augue.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4104264 - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum augue a euismod aenean sit.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3cec960 - d05b - 11e5 - b58b - 90e2ba530b12, 'Metus justo odio cubilia vitae velit bibendum dui.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4621080 - d05b - 11e5 - b58b - 90e2ba530b12, 'Lorem netus.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3fec26e - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin augue dignissim.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f25bbe - d05b - 11e5 - b58b - 90e2ba530b12, 'Magna lorem netus posuere sapien vulputate.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f987a4 - d05b - 11e5 - b58b - 90e2ba530b12, 'Netus fusce.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3d3d4dc - d05b - 11e5 - b58b - 90e2ba530b12, 'Curae etiam cras a eget netus commodo.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4648266 - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin velit sit quam orci.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4778532 - d05b - 11e5 - b58b - 90e2ba530b12, 'Velit lacus laoreet eu ut ante aenean lacinia orci a orci augue eleifend.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3cac086 - d05b - 11e5 - b58b - 90e2ba530b12, 'Felis curae mollis non ad feugiat diam habitant suscipit at rutrum a adipiscing.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44d0aaa - d05b - 11e5 - b58b - 90e2ba530b12, 'Velit fames.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f46dd9ba - d05b - 11e5 - b58b - 90e2ba530b12, 'Fames vitae sem vestibulum parturient et sagittis sem viverra neque in sapien a.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f43266fa - d05b - 11e5 - b58b - 90e2ba530b12, 'Lorem purus lobortis.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f42d1bdc - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec porta habitasse non consectetuer.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f429301c - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin risus sociosqu fames aptent.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3ee4862 - d05b - 11e5 - b58b - 90e2ba530b12, 'Neque risus condimentum nascetur commodo litora mus nostra ve tempor.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f46b7602 - d05b - 11e5 - b58b - 90e2ba530b12, 'Augue fusce gravida volutpat posuere tristique nibh.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f45f8842 - d05b - 11e5 - b58b - 90e2ba530b12, 'Felis massa lectus tristique pretium fusce quis posuere euismod venenatis est.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44bac0a - d05b - 11e5 - b58b - 90e2ba530b12, 'Etiam velit pulvinar metus porttitor adipiscing dui lorem.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f448c602 - d05b - 11e5 - b58b - 90e2ba530b12, 'Vitae etiam taciti porta ullamcorper pulvinar consequat.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3da3c1e - d05b - 11e5 - b58b - 90e2ba530b12, 'Lacus donec nulla dolor dictum euismod elementum enim pretium nulla pretium velit nibh.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3d8d6c6 - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum lorem sem orci hymenaeos.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4064a70 - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum etiam cubilia purus eget conubia ve mi eget ridiculus taciti.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f417b076 - d05b - 11e5 - b58b - 90e2ba530b12, 'Fusce proin semper enim aliquam mi eu.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f434eb3c - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum morbi diam erat.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f45a5eee - d05b - 11e5 - b58b - 90e2ba530b12, 'Dolor purus.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f408a716 - d05b - 11e5 - b58b - 90e2ba530b12, 'Risus felis tempor eros donec fusce arcu primis feugiat.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f41d6638 - d05b - 11e5 - b58b - 90e2ba530b12, 'Justo metus hac proin aliquet eni in magnis quis.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44525e2 - d05b - 11e5 - b58b - 90e2ba530b12, 'Magna velit purus ipsum neque tempor semper lacus class congue.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3d6a892 - d05b - 11e5 - b58b - 90e2ba530b12, 'Nulla porta.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3c8ae54 - d05b - 11e5 - b58b - 90e2ba530b12, 'Netus metus bibendum morbi cursus.', 'nFtPHprNOd'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f43b6dfe - d05b - 11e5 - b58b - 90e2ba530b12, 'Metus lacus nunc.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4718c7c - d05b - 11e5 - b58b - 90e2ba530b12, 'Massa magna a tempus pede.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4690e26 - d05b - 11e5 - b58b - 90e2ba530b12, 'Etiam purus libero donec dapibus donec at congue.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44e5d24 - d05b - 11e5 - b58b - 90e2ba530b12, 'Metus felis pede etiam auctor porta volutpat leo ipsum vulputate.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f467f4a0 - d05b - 11e5 - b58b - 90e2ba530b12, 'Magna risus.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3b44cf2 - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec dolor torquent tristique congue sagittis eu.', 'HuGetcsXbQ'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f460e070 - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec morbi.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3dbb03a - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec dolor felis ullamcorper mi tempus a adipiscing dis morbi nisl.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3c5c608 - d05b - 11e5 - b58b - 90e2ba530b12, 'Fames velit nostra sociosqu adipiscing.', 'nFtPHprNOd'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4190c64 - d05b - 11e5 - b58b - 90e2ba530b12, 'Lorem lacus pretium tortor mattis ve class ad.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4152784 - d05b - 11e5 - b58b - 90e2ba530b12, 'Curae lorem montes pharetra cras luctus.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f42a861a - d05b - 11e5 - b58b - 90e2ba530b12, 'Nulla ipsum neque fusce neque potenti facilisis varius eu malesuada parturient nisi elementum aenean arcu odio hymenaeos tristique.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f40c175c - d05b - 11e5 - b58b - 90e2ba530b12, 'Curae morbi neque fusce class laoreet et vel.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f41a64ce - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum porta pharetra id risus dictum et ac quam lobortis.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f404d474 - d05b - 11e5 - b58b - 90e2ba530b12, 'Lacus lorem sapien morbi vestibulum interdum ipsum id.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4417bf4 - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin purus a sodales ut cum venenatis auctor metus orci gravida a auctor duis.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f42bcfc0 - d05b - 11e5 - b58b - 90e2ba530b12, 'Massa donec.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f47a041a - d05b - 11e5 - b58b - 90e2ba530b12, 'Neque metus vivamus.', 'PDKIVoezHs'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f4f2a2 - d05b - 11e5 - b58b - 90e2ba530b12, 'Morbi velit nibh malesuada lectus varius mus quis.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4707f4e - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin massa morbi tortor.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f47ce8ce - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec purus varius duis enim felis quam nonummy a scelerisque leo tellus nibh nisl.', 'PDKIVoezHs'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f40eee5a - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin magna.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f04aea - d05b - 11e5 - b58b - 90e2ba530b12, 'Dolor felis nec cras quis vulputate cursus dolor vestibulum dictumst porta dolor.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44403d8 - d05b - 11e5 - b58b - 90e2ba530b12, 'Velit felis leo.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f466da5c - d05b - 11e5 - b58b - 90e2ba530b12, 'Vitae ipsum quam pretium.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3d16166 - d05b - 11e5 - b58b - 90e2ba530b12, 'Porta ipsum.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3fc513c - d05b - 11e5 - b58b - 90e2ba530b12, 'Morbi vitae mollis rutrum pede lacus massa egestas cras.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3cc2c50 - d05b - 11e5 - b58b - 90e2ba530b12, 'Porta curae vel eros nibh senectus.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3fad4ec - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin felis ornare lacus mollis dolor.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3bdb102 - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin curae euismod.', 'HuGetcsXbQ'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f465d5d0 - d05b - 11e5 - b58b - 90e2ba530b12, 'Netus magna aliquet.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f42369de - d05b - 11e5 - b58b - 90e2ba530b12, 'Neque dolor fames proin dis tristique metus fermentum ultrices lectus iaculis.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f40acb5e - d05b - 11e5 - b58b - 90e2ba530b12, 'Risus donec.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f433b410 - d05b - 11e5 - b58b - 90e2ba530b12, 'Velit ipsum feugiat ac nulla.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44658d6 - d05b - 11e5 - b58b - 90e2ba530b12, 'Vitae massa posuere orci facilisis.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3bb6d2a - d05b - 11e5 - b58b - 90e2ba530b12, 'Curae fames montes ad ultrices iaculis mauris nisl.', 'HuGetcsXbQ'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3dcff80 - d05b - 11e5 - b58b - 90e2ba530b12, 'Felis fusce sodales pretium libero nisl vulputate nunc cras nisi.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4167044 - d05b - 11e5 - b58b - 90e2ba530b12, 'Lacus netus duis egestas ut platea ve at urna molestie.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3c329de - d05b - 11e5 - b58b - 90e2ba530b12, 'Risus purus mauris congue convallis vestibulum conubia eget eleifend hymenaeos nisl sodales pretium ridiculus nisl.', 'nFtPHprNOd'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3ed3756 - d05b - 11e5 - b58b - 90e2ba530b12, 'Class vitae ipsum lacus.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3d7a562 - d05b - 11e5 - b58b - 90e2ba530b12, 'Augue proin.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3d29edc - d05b - 11e5 - b58b - 90e2ba530b12, 'Dolor etiam erat eu diam etiam cubilia.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3d0196e - d05b - 11e5 - b58b - 90e2ba530b12, 'Fames morbi posuere cubilia elementum enim phasellus.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f475371e - d05b - 11e5 - b58b - 90e2ba530b12, 'Lacus fames.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3e84e08 - d05b - 11e5 - b58b - 90e2ba530b12, 'Fames curae pede dis.', 'PNexGqHdVE'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f412d72c - d05b - 11e5 - b58b - 90e2ba530b12, 'Massa proin.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f45d3e70 - d05b - 11e5 - b58b - 90e2ba530b12, 'Massa curae morbi dis natoque etiam mi.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4633a14 - d05b - 11e5 - b58b - 90e2ba530b12, 'Curae morbi neque nostra dictum commodo.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f457b338 - d05b - 11e5 - b58b - 90e2ba530b12, 'Curae vitae.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3e032f4 - d05b - 11e5 - b58b - 90e2ba530b12, 'Magna vitae quis eleifend fermentum non nisl id class diam sem nunc nec.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f472bd9a - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum lorem neque sociis ante et.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3c05236 - d05b - 11e5 - b58b - 90e2ba530b12, 'Morbi metus scelerisque eni ve mauris curae diam duis eu.', 'HuGetcsXbQ'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f47bb1ac - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum risus sem enim duis dictumst sollicitudin mattis fusce litora leo conubia.', 'PDKIVoezHs'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4567860 - d05b - 11e5 - b58b - 90e2ba530b12, 'Augue massa nisl sociis mi sociis elementum bibendum metus ac imperdiet diam.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f7680c - d05b - 11e5 - b58b - 90e2ba530b12, 'Morbi augue purus.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3e2df7c - d05b - 11e5 - b58b - 90e2ba530b12, 'Proin lorem interdum ipsum dictumst vulputate cum ultricies.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44a13fe - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum curae vitae fusce leo nisl facilisi sit platea at curabitur lacus.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3e9c404 - d05b - 11e5 - b58b - 90e2ba530b12, 'Fusce netus natoque eget arcu dui malesuada duis.', 'PNexGqHdVE'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f45ba60a - d05b - 11e5 - b58b - 90e2ba530b12, 'Morbi risus nisi sed cras risus vitae vel orci a purus id.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3df0c44 - d05b - 11e5 - b58b - 90e2ba530b12, 'Velit vitae.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f40d784a - d05b - 11e5 - b58b - 90e2ba530b12, 'Fusce fames auctor aenean velit parturient arcu turpis posuere dignissim.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f453c840 - d05b - 11e5 - b58b - 90e2ba530b12, 'Risus justo erat velit purus eget.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3fff77e - d05b - 11e5 - b58b - 90e2ba530b12, 'Metus fames amet tellus curae viverra nam ad sollicitudin.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f442c130 - d05b - 11e5 - b58b - 90e2ba530b12, 'Vitae risus ante sagittis scelerisque.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3fd904c - d05b - 11e5 - b58b - 90e2ba530b12, 'Felis dolor auctor ut quisque pede elit dis.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4526b80 - d05b - 11e5 - b58b - 90e2ba530b12, 'Justo neque.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f43c9620 - d05b - 11e5 - b58b - 90e2ba530b12, 'Purus morbi ullamcorper in aenean tincidunt non.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3e4182e - d05b - 11e5 - b58b - 90e2ba530b12, 'Porta class aenean vestibulum massa elit quisque aptent est arcu consectetuer adipiscing erat augue in.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3c472e4 - d05b - 11e5 - b58b - 90e2ba530b12, 'Curae fusce fames urna eros cras iaculis morbi sed quis turpis phasellus ve nulla est.', 'nFtPHprNOd'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3eb09fe - d05b - 11e5 - b58b - 90e2ba530b12, 'Curae massa nam vitae nibh.', 'PNexGqHdVE'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f427dfa0 - d05b - 11e5 - b58b - 90e2ba530b12, 'Metus fusce vel arcu parturient mauris in pulvinar.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f41b8e44 - d05b - 11e5 - b58b - 90e2ba530b12, 'Dolor etiam.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f38016 - d05b - 11e5 - b58b - 90e2ba530b12, 'Justo fusce fames mattis ve augue montes elit enim commodo.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f409c01a - d05b - 11e5 - b58b - 90e2ba530b12, 'Magna felis libero auctor lobortis curae at justo maecenas.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4205384 - d05b - 11e5 - b58b - 90e2ba530b12, 'Fusce porta lobortis ad turpis interdum lorem pellentesque malesuada donec.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f455148e - d05b - 11e5 - b58b - 90e2ba530b12, 'Netus vitae massa mollis duis eget vestibulum at sociosqu eni.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f46f31fc - d05b - 11e5 - b58b - 90e2ba530b12, 'Justo fames a quisque at sit etiam nisl ac.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3d55672 - d05b - 11e5 - b58b - 90e2ba530b12, 'Massa velit.', 'JmuhsAaMdw'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f40798a8 - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec nulla purus litora ultricies ac.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f67258 - d05b - 11e5 - b58b - 90e2ba530b12, 'Etiam augue massa condimentum eleifend lorem mi eros.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f43dde2c - d05b - 11e5 - b58b - 90e2ba530b12, 'Etiam morbi sapien neque enim nonummy et morbi etiam eni urna cum.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44fb336 - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec risus aenean in tempor in molestie pretium sollicitudin viverra.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f40383ee - d05b - 11e5 - b58b - 90e2ba530b12, 'Morbi risus faucibus leo eros arcu aliquet feugiat.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f43f0040 - d05b - 11e5 - b58b - 90e2ba530b12, 'Dolor justo cum egestas a.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f438eade - d05b - 11e5 - b58b - 90e2ba530b12, 'Netus curae molestie donec tortor urna odio fermentum mattis fusce.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3e6c9ca - d05b - 11e5 - b58b - 90e2ba530b12, 'Neque magna pharetra nec magnis.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44783aa - d05b - 11e5 - b58b - 90e2ba530b12, 'Lorem fusce etiam feugiat montes ac orci ac.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4740484 - d05b - 11e5 - b58b - 90e2ba530b12, 'Fames magna.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f44028b2 - d05b - 11e5 - b58b - 90e2ba530b12, 'Lorem donec purus nibh.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4377dde - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum magna metus mollis.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4268d12 - d05b - 11e5 - b58b - 90e2ba530b12, 'Netus augue magnis massa vestibulum interdum conubia donec id magnis a potenti in.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f46c9582 - d05b - 11e5 - b58b - 90e2ba530b12, 'Velit fusce tempus urna ante pulvinar lorem purus cum primis cubilia est nunc.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4253796 - d05b - 11e5 - b58b - 90e2ba530b12, 'Lacus neque a mauris amet eget pede felis nullam velit.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3e1939c - d05b - 11e5 - b58b - 90e2ba530b12, 'Porta etiam metus lorem.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4020438 - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum lorem fusce vel arcu hendrerit bibendum magnis nostra fames tortor.', 'cWIZrdKQmh'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f42e664a - d05b - 11e5 - b58b - 90e2ba530b12, 'Massa augue est blandit volutpat semper.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3e56ca6 - d05b - 11e5 - b58b - 90e2ba530b12, 'Risus justo.', 'fOGctyIDES'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f458ffa4 - d05b - 11e5 - b58b - 90e2ba530b12, 'Justo class.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f413ddde - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec augue auctor netus leo donec odio enim dis risus.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3c77908 - d05b - 11e5 - b58b - 90e2ba530b12, 'Magna curae mollis pulvinar class conubia lobortis proin taciti.', 'nFtPHprNOd'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4767534 - d05b - 11e5 - b58b - 90e2ba530b12, 'Netus lacus.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f4512284 - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum lorem.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3c1c710 - d05b - 11e5 - b58b - 90e2ba530b12, 'Massa justo nam quam fames aliquam conubia congue felis penatibus habitasse ante.', 'HuGetcsXbQ'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f46a2ed2 - d05b - 11e5 - b58b - 90e2ba530b12, 'Magna risus lorem magnis pellentesque posuere.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f45e689a - d05b - 11e5 - b58b - 90e2ba530b12, 'Magna ipsum congue convallis et tristique commodo nam at turpis nec.', 'UWzCrfaxQi'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f85c3a - d05b - 11e5 - b58b - 90e2ba530b12, 'Donec augue habitant et class scelerisque enim tempor nullam.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3ef4b22 - d05b - 11e5 - b58b - 90e2ba530b12, 'Metus augue.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3f15336 - d05b - 11e5 - b58b - 90e2ba530b12, 'Metus velit libero vestibulum fames amet nisl gravida.', 'lixvTEUaBj'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f411a604 - d05b - 11e5 - b58b - 90e2ba530b12, 'Velit nulla posuere ve.', 'MdHQeWbPjB'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f3b87b1a - d05b - 11e5 - b58b - 90e2ba530b12, 'Ipsum metus dapibus bibendum ad litora non lectus nam enim.', 'HuGetcsXbQ'); -INSERT INTO tweets(tweet_id, body, username) -VALUES (f478bb1e - d05b - 11e5 - b58b - 90e2ba530b12, 'Nulla curae netus duis ullamcorper nonummy dui at nonummy.', 'PDKIVoezHs'); - -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', e8754000 - 80b8 - 1fe9 - 8e73 - e3698c967ddd, f3c329de - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 81514000 - ef01 - 1fb5 - b70b - f062f003e9d1, f3dbb03a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', bb894000 - 086c - 1f96 - ad8e - 67fe0797978a, f3e4182e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 5cbd4000 - f10b - 1f4c - b960 - f9185aad7250, f4167044 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', a73d4000 - 9b5c - 1f41 - 8254 - 47df9ce1eacd, f475371e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', fd454000 - 54a7 - 1f41 - 9a41 - 70c951024c56, f4633a14 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', ab1d4000 - b568 - 1f38 - 9748 - 4862d395e63c, f4377dde - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', c7114000 - f202 - 1f37 - bbaa - f90cb354db2c, f453c840 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', bea34000 - 40d6 - 1f2e - a8ac - fc342eb53d20, f4690e26 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 3f314000 - dd0c - 1f1d - 98d2 - 95bff58e6bdb, f3d3d4dc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', e9194000 - 3674 - 1f10 - 9326 - fef1e3078e33, f3dcff80 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 15914000 - 506d - 1f0a - a101 - b8d0b89799b1, f3cd759c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', a3d94000 - a86e - 1eef - 98e0 - 94b6ac5b030e, f3e56ca6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 38394000 - 343e-1ed5-91fb-72eb957744d2, f45a5eee - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', b4514000 - 01b7 - 1eb1 - aa00 - a725a56435fc, f3e1939c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 2ca94000 - 7566 - 1e9b - 86f6 - 856943b771d2, f3f4f2a2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 5db54000 - 1516 - 1e71 - b66a - ed80d7c29155, f4020438 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', bdd94000 - 4964 - 1e68 - 83a3 - 8be6704361fa, f460e070 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 07794000 - cc8a - 1e5a - 9e67 - d8789072d13f, f3f7680c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 31514000 - 52a1 - 1e46 - b62e - 33371ec3c2f5, f3ee4862 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 7a4d4000 - 3e76 - 1e31 - bfd6 - 3467b0e60088, f47bb1ac - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 07214000 - df9a - 1e20 - 88a6 - a4e897bdf237, f3f04aea - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', e5794000 - df7c - 1e14 - bc95 - 396ba65637d7, f40798a8 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 7b594000 - d4e7 - 1dd2 - 99e9 - 4e86ea899816, f4648266 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', e4c14000 - c9ec - 1dad - 8db5 - 51426cf56079, f45ba60a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 4a414000 - 4831 - 1daa - bf4c - a526c6dedd45, f43f0040 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 3e614000 - 2d20 - 1d7a - 954c - d946629b1fd7, f3ef4b22 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', bd4b4000 - ac05 - 1d77 - b833 - 1b524444cf8e, f4417bf4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', e1d14000 - 9f19 - 1d6d - bccb - 97b0333d3d25, f412d72c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', e0194000 - 52b9 - 1d63 - 9837 - 45e31a1fc132, f44bac0a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 9f494000 - d3cf - 1d41 - ae79 - 1a2c29685e47, f3ed3756 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', d95b4000 - 74cc - 1d3f - a5ef - 45c55c6a87e8, f45e689a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 37814000 - e445 - 1d17 - 8ce2 - 059063ba673c, f3bdb102 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', d6e14000 - 196b - 1cec - be0d - 24723fc4684a, f417b076 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', a5d54000 - e045 - 1cd5 - 9163 - b5a343177817, f44028b2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', fa394000 - 4ecb - 1ccf - b665 - 7a142d737360, f3fec26e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 0f894000 - 472c - 1ca5 - 9b62 - eb5c2e2db9d8, f3e032f4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 7f914000 - 4bc4 - 1c8e - bd2e - e1ab747e4575, f41d6638 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', dcf14000 - 9bf1 - 1c7d - b37c - cf705e6e6863, f4205384 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 4db14000 - 76a3 - 1c7d - b01e - 4733cd36aaf1, f44a13fe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 0c914000 - 3534 - 1c4d - 9a0c - 87a0521e530e, f3d29edc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', ba614000 - 538b - 1c25 - 93fd - 6eb18ce6a927, f3e84e08 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', cef94000 - e6a0 - 1c1f - a42e - 735ce54741ef, f411a604 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 30dd4000 - 78e2 - 1bc5 - 949b - af81ca816236, f3fc513c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 73534000 - fc88 - 1bb0 - 827c - a7bd6e96b8b9, f458ffa4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 7a114000 - 1800 - 1ba9 - be1e - acc9d49fc617, f42bcfc0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f2d94000 - 8c12 - 1b78 - bdb4 - ee7887df676a, f4104264 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f8794000 - e0c1 - 1b72 - a469 - 6bc555173118, f442c130 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', a1e14000 - 752d - 1b6a - 85b3 - 296e7e055a91, f44d0aaa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', b4114000 - ec6b - 1b40 - b1df - 3bfc1128ba7c, f4064a70 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 22c54000 - 5f0a - 1b40 - 80d0 - 11672d8ec032, f3c472e4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', c2414000 - 7663 - 1b0c - 9e61 - 8e6aa7bf4ca6, f3e2df7c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 92214000 - 191a - 1b0b - 8bfb - 6d76ba406467, f3f38016 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 7dd94000 - bccf - 1adf - 8997 - 9b725bb5c949, f47a041a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', c7314000 - 4882 - 1acd - af96 - e8bdd974193b, f3c5c608 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', a2a94000 - b7d3 - 1aaa - 94cc - 021e635beee7, f3de0664 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', be954000 - ef52 - 1a9b - abc6 - 49d47c0fd19b, f4718c7c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f9594000 - a017 - 1a67 - a25a - 62b6c0b037d0, f40eee5a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 28894000 - e7ea - 1a5a - 8585 - c19ff031cd1b, f3fd904c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 56e94000 - ca02 - 1a0f - 9d36 - fd47837f58b5, f3d6a892 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 1b414000 - 2919 - 19f5 - 9f18 - 3cec58362e0c, f42a861a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 15b14000 - 9e96 - 19d2 - ba35 - 7b5669fd5465, f42e664a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', c7014000 - 70fb - 19cc - 9be1 - 6dc17ff109bc, f3f15336 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 2a514000 - e54e - 19a7 - 886a - b5843a203bf3, f41b8e44 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 81d14000 - f2c5 - 19a4 - 920a - 049fd5c989a8, f3e9c404 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 32754000 - 094d - 1991 - a4de - 006b9d349ae0, f3c77908 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 0a5d4000 - 9b69 - 198a - bb74 - 1954d3de4441, f43a34ca - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', b6994000 - 5e80 - 1987 - a836 - f526c37f8152, f41a64ce - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 86bd4000 - 38d9 - 1968 - 9153 - 7079fdc144dc, f3c8ae54 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 84e94000 - a93c - 1948 - bd54 - 7e135ad469d7, f46dd9ba - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f3914000 - ee9d - 1911 - b55c - 1ef9478a3454, f3d16166 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', ecf94000 - 43e8 - 18f4 - bd96 - 8ff58e628830, f3cc2c50 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 99d94000 - bb02 - 1894 - bff5 - 57f89aad5b0a, f408a716 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 851d4000 - e432 - 188b - ae29 - 9466c9bea87d, f46a2ed2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', fee94000 - a0ab - 1889 - bb2d - cbf3badae5b2, f42369de - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 0ac94000 - f683 - 187d - 8874 - c698342eb895, f3fad4ec - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', ddc54000 - 07b3 - 187c - b500 - e70e8ebe4840, f42d1bdc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 4ac94000 - b570 - 1868 - ab3e - 0e948916bf17, f44783aa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 35194000 - 5d51 - 1861 - 9d3d - f90b9046dd31, f44fb336 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 76f14000 - 2b6a - 1843 - 99de - a145935f5468, f3d0196e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', b1154000 - 451b - 182b - ae79 - 49b6668984da, f478bb1e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 87614000 - 7eb5 - 182a - b687 - 78bffa0fc066, f455148e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', b6f54000 - a61f - 1821 - 85bd - 48e12b10cb4b, f45f8842 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f9f14000 - 0503 - 1803 - b0b7 - 4c4fef07bcbe, f3c05236 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 14314000 - 4eca - 17e3 - 90b9 - cadc49af5d35, f3bb6d2a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', a4594000 - 8014 - 17bb - ba7d - f581995f27f9, f3d8d6c6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 0b414000 - 4fe9 - 17aa - 843e-5 defdd5421d2, f409c01a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', ba214000 - b9db - 179a - 9f25 - ad8de44681c2, f3c1c710 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 09f94000 - 8df4 - 1774 - 8cf1 - 08ef9359097b, f43266fa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 52b54000 - 260a - 175a - b69f - 0d3500b91520, f466da5c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 32094000 - eabf - 1739 - be54 - 0c2c85b322c2, f404d474 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 9dc94000 - 0997 - 1718 - 8dea - ab0b89466384, f4152784 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', a2214000 - 0067 - 16fe - 9b00 - 2526ab213ed3, f413ddde - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 7cb14000 - 20a1 - 16ec - 8c5d - 4b1cd23c74b8, f465d5d0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 4e314000 - 4ef9 - 1680 - 9b43 - 7fd2d367190b, f47ce8ce - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', ffa14000 - 9d13 - 1668 - b2b7 - 62023271b76e, f4512284 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 676d4000 - 04e2 - 1652 - a4c6 - 2a99f6c09a53, f44e5d24 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 01d94000 - b0ed - 15e9 - bb5b - 85c46c22a536, f3da3c1e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', c3434000 - 584c - 15df - 8020 - d339aa05e6c9, f43b6dfe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 23d54000 - 05c9 - 15d8 - 8614 - fd6affe0ecc7, f43dde2c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 08d94000 - b7e2 - 15d3 - 88bd - f3e43dd604d6, f3d55672 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', ac514000 - fe69 - 15ce - 883e-2 d8c561aa0a6, f3b44cf2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f2f14000 - 627e-15cc-a531-1f398f1fc120, f448c602 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 29354000 - f169 - 15c5 - a457 - 485aa61498a8, f44403d8 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 988f4000 - 0292 - 15c0 - 9317 - c8f64f3fd0fe, f46b7602 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 37c94000 - 3660 - 15bb - 9b91 - d324146c1335, f4361a2a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', eb654000 - 8ecf - 1592 - 9994 - c850027f951b, f4621080 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 39e54000 - 8129 - 158c - 9b4a - 585973b7b98e, f4253796 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 43714000 - 286a - 1578 - 904c - 9c6e423bc32d, f3f85c3a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 9dc94000 - 43fb - 1569 - 8e22 - f8ee2f8f1ba6, f44525e2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 42494000 - 73fa - 1554 - 841e - e0462e899d00, f45d3e70 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 44f14000 - 6dde - 1533 - 8e6c - 9769ddbef30c, f3f25bbe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 47154000 - d6ed - 14ec - a5a8 - 2b450734331a, f429301c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 80f14000 - 8676 - 14dd - ad0d - 8b709997fea4, f4190c64 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', c9f94000 - 0bb1 - 1494 - a242 - def3bb8ad731, f3eb09fe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 51714000 - a77a - 1486 - babe - 518e4a300603, f3d7a562 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 21a94000 - f6a7 - 1482 - b14e - 9f0333b4a477, f3b87b1a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 35714000 - 98ef - 1457 - b846 - ffacab9b7397, f46f31fc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 3b8d4000 - a57c - 1450 - abea - 1714095272f4, f40383ee - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 0cb94000 - e922 - 1428 - aa47 - ebd11153147b, f4778532 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f37d4000 - efa1 - 13f0 - b47c - 03013b58f771, f457b338 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 6a814000 - fc6f - 13e4 - b244 - e27eaffa8179, f4767534 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 25a14000 - d532 - 13cb - 8bc7 - 896cdcb464ca, f3e6c9ca - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 8dc94000 - ff56 - 137b - 84d4 - 3fb5a4f6efac, f433b410 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 26594000 - de9a - 1360 - 8ebb - 12aa4ed61486, f438eade - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', c5554000 - 9bcb - 1339 - 99ed - bbf318ba211a, f40d784a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 50b94000 - 273e-126f-997e-dcaa37301bb5, f3f67258 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 582d4000 - 1af7 - 121c - b849 - d468b28a0dfa, f40acb5e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 24414000 - dadd - 1204 - 9987 - c85e8667cc05, f4268d12 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 14854000 - e2ed - 117f - 9e2f - 8ad33d3c1300, f3f987a4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', e0d94000 - ccb4 - 116f - a2f7 - 5f167eba94dd, f467f4a0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 56254000 - 1cd7 - 1169 - b6a0 - dfdf3d95ac5b, f4707f4e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 15dd4000 - ab62 - 1156 - 8403 - 24ab8c0b41fd, f43c9620 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 5fc94000 - 86e3 - 114f - 8af2 - 761dcafd2d46, f3cec960 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 0d194000 - 6cc2 - 1145 - 950e-275 ad8812a0f, f4740484 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 34ed4000 - 2472 - 1117 - 856f - df16ec09a322, f4526b80 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 17914000 - 9724 - 10f8 - 9351 - 0478312e375f, f4567860 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', d7754000 - 3ad6 - 10f7 - 95db - 5557d99c9650, f427dfa0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f0e94000 - 3df6 - 10f6 - 8c77 - 03219898403f, f3fff77e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', f21f4000 - 50d5 - 10a4 - b28c - 5b4b4edb4cc4, f3cac086 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 36b14000 - 181c - 1095 - 96a9 - 61056836b162, f44658d6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 63014000 - f61b - 1077 - a6c0 - 6e3d6787bccf, f472bd9a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', be794000 - d584 - 1051 - a524 - 7989e62f9042, f3df0c44 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 159b4000 - de07 - 1039 - 8def - 74ecccc8ec22, f434eb3c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', 9bad4000 - eccc - 1032 - a57a - 3069e9121e05, f46c9582 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('!PUBLIC!', b4194000 - 0681 - 101c - b64a - e02a5b333ae8, f40c175c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', 81514000 - ef01 - 1fb5 - b70b - f062f003e9d1, f3dbb03a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', bb894000 - 086c - 1f96 - ad8e - 67fe0797978a, f3e4182e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', e9194000 - 3674 - 1f10 - 9326 - fef1e3078e33, f3dcff80 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', a3d94000 - a86e - 1eef - 98e0 - 94b6ac5b030e, f3e56ca6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', b4514000 - 01b7 - 1eb1 - aa00 - a725a56435fc, f3e1939c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', 0f894000 - 472c - 1ca5 - 9b62 - eb5c2e2db9d8, f3e032f4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', c2414000 - 7663 - 1b0c - 9e61 - 8e6aa7bf4ca6, f3e2df7c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', a2a94000 - b7d3 - 1aaa - 94cc - 021e635beee7, f3de0664 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', 56e94000 - ca02 - 1a0f - 9d36 - fd47837f58b5, f3d6a892 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', a4594000 - 8014 - 17bb - ba7d - f581995f27f9, f3d8d6c6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', 01d94000 - b0ed - 15e9 - bb5b - 85c46c22a536, f3da3c1e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', 51714000 - a77a - 1486 - babe - 518e4a300603, f3d7a562 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', 25a14000 - d532 - 13cb - 8bc7 - 896cdcb464ca, f3e6c9ca - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('fOGctyIDES', be794000 - d584 - 1051 - a524 - 7989e62f9042, f3df0c44 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 5db54000 - 1516 - 1e71 - b66a - ed80d7c29155, f4020438 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', e5794000 - df7c - 1e14 - bc95 - 396ba65637d7, f40798a8 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', fa394000 - 4ecb - 1ccf - b665 - 7a142d737360, f3fec26e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 30dd4000 - 78e2 - 1bc5 - 949b - af81ca816236, f3fc513c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', b4114000 - ec6b - 1b40 - b1df - 3bfc1128ba7c, f4064a70 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 28894000 - e7ea - 1a5a - 8585 - c19ff031cd1b, f3fd904c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 99d94000 - bb02 - 1894 - bff5 - 57f89aad5b0a, f408a716 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 0ac94000 - f683 - 187d - 8874 - c698342eb895, f3fad4ec - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 0b414000 - 4fe9 - 17aa - 843e-5 defdd5421d2, f409c01a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 32094000 - eabf - 1739 - be54 - 0c2c85b322c2, f404d474 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 3b8d4000 - a57c - 1450 - abea - 1714095272f4, f40383ee - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 582d4000 - 1af7 - 121c - b849 - d468b28a0dfa, f40acb5e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', 14854000 - e2ed - 117f - 9e2f - 8ad33d3c1300, f3f987a4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('cWIZrdKQmh', f0e94000 - 3df6 - 10f6 - 8c77 - 03219898403f, f3fff77e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 2ca94000 - 7566 - 1e9b - 86f6 - 856943b771d2, f3f4f2a2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 07794000 - cc8a - 1e5a - 9e67 - d8789072d13f, f3f7680c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 31514000 - 52a1 - 1e46 - b62e - 33371ec3c2f5, f3ee4862 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 07214000 - df9a - 1e20 - 88a6 - a4e897bdf237, f3f04aea - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 3e614000 - 2d20 - 1d7a - 954c - d946629b1fd7, f3ef4b22 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 9f494000 - d3cf - 1d41 - ae79 - 1a2c29685e47, f3ed3756 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 92214000 - 191a - 1b0b - 8bfb - 6d76ba406467, f3f38016 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', c7014000 - 70fb - 19cc - 9be1 - 6dc17ff109bc, f3f15336 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 43714000 - 286a - 1578 - 904c - 9c6e423bc32d, f3f85c3a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 44f14000 - 6dde - 1533 - 8e6c - 9769ddbef30c, f3f25bbe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('lixvTEUaBj', 50b94000 - 273e-126f-997e-dcaa37301bb5, f3f67258 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('PNexGqHdVE', ba614000 - 538b - 1c25 - 93fd - 6eb18ce6a927, f3e84e08 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('PNexGqHdVE', 81d14000 - f2c5 - 19a4 - 920a - 049fd5c989a8, f3e9c404 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('PNexGqHdVE', c9f94000 - 0bb1 - 1494 - a242 - def3bb8ad731, f3eb09fe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('PDKIVoezHs', 7a4d4000 - 3e76 - 1e31 - bfd6 - 3467b0e60088, f47bb1ac - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('PDKIVoezHs', 7dd94000 - bccf - 1adf - 8997 - 9b725bb5c949, f47a041a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('PDKIVoezHs', b1154000 - 451b - 182b - ae79 - 49b6668984da, f478bb1e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('PDKIVoezHs', 4e314000 - 4ef9 - 1680 - 9b43 - 7fd2d367190b, f47ce8ce - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', 37814000 - e445 - 1d17 - 8ce2 - 059063ba673c, f3bdb102 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', f9f14000 - 0503 - 1803 - b0b7 - 4c4fef07bcbe, f3c05236 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', 14314000 - 4eca - 17e3 - 90b9 - cadc49af5d35, f3bb6d2a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', ba214000 - b9db - 179a - 9f25 - ad8de44681c2, f3c1c710 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', ac514000 - fe69 - 15ce - 883e-2 d8c561aa0a6, f3b44cf2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('HuGetcsXbQ', 21a94000 - f6a7 - 1482 - b14e - 9f0333b4a477, f3b87b1a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 5cbd4000 - f10b - 1f4c - b960 - f9185aad7250, f4167044 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', e1d14000 - 9f19 - 1d6d - bccb - 97b0333d3d25, f412d72c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', d6e14000 - 196b - 1cec - be0d - 24723fc4684a, f417b076 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 7f914000 - 4bc4 - 1c8e - bd2e - e1ab747e4575, f41d6638 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', dcf14000 - 9bf1 - 1c7d - b37c - cf705e6e6863, f4205384 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', cef94000 - e6a0 - 1c1f - a42e - 735ce54741ef, f411a604 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 7a114000 - 1800 - 1ba9 - be1e - acc9d49fc617, f42bcfc0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', f2d94000 - 8c12 - 1b78 - bdb4 - ee7887df676a, f4104264 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', f9594000 - a017 - 1a67 - a25a - 62b6c0b037d0, f40eee5a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 1b414000 - 2919 - 19f5 - 9f18 - 3cec58362e0c, f42a861a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 2a514000 - e54e - 19a7 - 886a - b5843a203bf3, f41b8e44 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', b6994000 - 5e80 - 1987 - a836 - f526c37f8152, f41a64ce - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', fee94000 - a0ab - 1889 - bb2d - cbf3badae5b2, f42369de - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 9dc94000 - 0997 - 1718 - 8dea - ab0b89466384, f4152784 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', a2214000 - 0067 - 16fe - 9b00 - 2526ab213ed3, f413ddde - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 39e54000 - 8129 - 158c - 9b4a - 585973b7b98e, f4253796 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 47154000 - d6ed - 14ec - a5a8 - 2b450734331a, f429301c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 80f14000 - 8676 - 14dd - ad0d - 8b709997fea4, f4190c64 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', c5554000 - 9bcb - 1339 - 99ed - bbf318ba211a, f40d784a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', 24414000 - dadd - 1204 - 9987 - c85e8667cc05, f4268d12 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', d7754000 - 3ad6 - 10f7 - 95db - 5557d99c9650, f427dfa0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('MdHQeWbPjB', b4194000 - 0681 - 101c - b64a - e02a5b333ae8, f40c175c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', a73d4000 - 9b5c - 1f41 - 8254 - 47df9ce1eacd, f475371e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', fd454000 - 54a7 - 1f41 - 9a41 - 70c951024c56, f4633a14 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', ab1d4000 - b568 - 1f38 - 9748 - 4862d395e63c, f4377dde - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', c7114000 - f202 - 1f37 - bbaa - f90cb354db2c, f453c840 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', bea34000 - 40d6 - 1f2e - a8ac - fc342eb53d20, f4690e26 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 38394000 - 343e-1ed5-91fb-72eb957744d2, f45a5eee - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', bdd94000 - 4964 - 1e68 - 83a3 - 8be6704361fa, f460e070 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 7b594000 - d4e7 - 1dd2 - 99e9 - 4e86ea899816, f4648266 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', e4c14000 - c9ec - 1dad - 8db5 - 51426cf56079, f45ba60a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 4a414000 - 4831 - 1daa - bf4c - a526c6dedd45, f43f0040 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', bd4b4000 - ac05 - 1d77 - b833 - 1b524444cf8e, f4417bf4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', e0194000 - 52b9 - 1d63 - 9837 - 45e31a1fc132, f44bac0a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', d95b4000 - 74cc - 1d3f - a5ef - 45c55c6a87e8, f45e689a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', a5d54000 - e045 - 1cd5 - 9163 - b5a343177817, f44028b2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 4db14000 - 76a3 - 1c7d - b01e - 4733cd36aaf1, f44a13fe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 73534000 - fc88 - 1bb0 - 827c - a7bd6e96b8b9, f458ffa4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', f8794000 - e0c1 - 1b72 - a469 - 6bc555173118, f442c130 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', a1e14000 - 752d - 1b6a - 85b3 - 296e7e055a91, f44d0aaa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', be954000 - ef52 - 1a9b - abc6 - 49d47c0fd19b, f4718c7c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 15b14000 - 9e96 - 19d2 - ba35 - 7b5669fd5465, f42e664a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 0a5d4000 - 9b69 - 198a - bb74 - 1954d3de4441, f43a34ca - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 84e94000 - a93c - 1948 - bd54 - 7e135ad469d7, f46dd9ba - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 851d4000 - e432 - 188b - ae29 - 9466c9bea87d, f46a2ed2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', ddc54000 - 07b3 - 187c - b500 - e70e8ebe4840, f42d1bdc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 4ac94000 - b570 - 1868 - ab3e - 0e948916bf17, f44783aa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 35194000 - 5d51 - 1861 - 9d3d - f90b9046dd31, f44fb336 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 87614000 - 7eb5 - 182a - b687 - 78bffa0fc066, f455148e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', b6f54000 - a61f - 1821 - 85bd - 48e12b10cb4b, f45f8842 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 09f94000 - 8df4 - 1774 - 8cf1 - 08ef9359097b, f43266fa - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 52b54000 - 260a - 175a - b69f - 0d3500b91520, f466da5c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 7cb14000 - 20a1 - 16ec - 8c5d - 4b1cd23c74b8, f465d5d0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', ffa14000 - 9d13 - 1668 - b2b7 - 62023271b76e, f4512284 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 676d4000 - 04e2 - 1652 - a4c6 - 2a99f6c09a53, f44e5d24 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', c3434000 - 584c - 15df - 8020 - d339aa05e6c9, f43b6dfe - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 23d54000 - 05c9 - 15d8 - 8614 - fd6affe0ecc7, f43dde2c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', f2f14000 - 627e-15cc-a531-1f398f1fc120, f448c602 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 29354000 - f169 - 15c5 - a457 - 485aa61498a8, f44403d8 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 988f4000 - 0292 - 15c0 - 9317 - c8f64f3fd0fe, f46b7602 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 37c94000 - 3660 - 15bb - 9b91 - d324146c1335, f4361a2a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', eb654000 - 8ecf - 1592 - 9994 - c850027f951b, f4621080 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 9dc94000 - 43fb - 1569 - 8e22 - f8ee2f8f1ba6, f44525e2 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 42494000 - 73fa - 1554 - 841e - e0462e899d00, f45d3e70 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 35714000 - 98ef - 1457 - b846 - ffacab9b7397, f46f31fc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 0cb94000 - e922 - 1428 - aa47 - ebd11153147b, f4778532 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', f37d4000 - efa1 - 13f0 - b47c - 03013b58f771, f457b338 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 6a814000 - fc6f - 13e4 - b244 - e27eaffa8179, f4767534 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 8dc94000 - ff56 - 137b - 84d4 - 3fb5a4f6efac, f433b410 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 26594000 - de9a - 1360 - 8ebb - 12aa4ed61486, f438eade - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', e0d94000 - ccb4 - 116f - a2f7 - 5f167eba94dd, f467f4a0 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 56254000 - 1cd7 - 1169 - b6a0 - dfdf3d95ac5b, f4707f4e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 15dd4000 - ab62 - 1156 - 8403 - 24ab8c0b41fd, f43c9620 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 0d194000 - 6cc2 - 1145 - 950e-275 ad8812a0f, f4740484 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 34ed4000 - 2472 - 1117 - 856f - df16ec09a322, f4526b80 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 17914000 - 9724 - 10f8 - 9351 - 0478312e375f, f4567860 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 36b14000 - 181c - 1095 - 96a9 - 61056836b162, f44658d6 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 63014000 - f61b - 1077 - a6c0 - 6e3d6787bccf, f472bd9a - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 159b4000 - de07 - 1039 - 8def - 74ecccc8ec22, f434eb3c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('UWzCrfaxQi', 9bad4000 - eccc - 1032 - a57a - 3069e9121e05, f46c9582 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 3f314000 - dd0c - 1f1d - 98d2 - 95bff58e6bdb, f3d3d4dc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 15914000 - 506d - 1f0a - a101 - b8d0b89799b1, f3cd759c - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 0c914000 - 3534 - 1c4d - 9a0c - 87a0521e530e, f3d29edc - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', f3914000 - ee9d - 1911 - b55c - 1ef9478a3454, f3d16166 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', ecf94000 - 43e8 - 18f4 - bd96 - 8ff58e628830, f3cc2c50 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 76f14000 - 2b6a - 1843 - 99de - a145935f5468, f3d0196e - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 08d94000 - b7e2 - 15d3 - 88bd - f3e43dd604d6, f3d55672 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', 5fc94000 - 86e3 - 114f - 8af2 - 761dcafd2d46, f3cec960 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('JmuhsAaMdw', f21f4000 - 50d5 - 10a4 - b28c - 5b4b4edb4cc4, f3cac086 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('nFtPHprNOd', e8754000 - 80b8 - 1fe9 - 8e73 - e3698c967ddd, f3c329de - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('nFtPHprNOd', 22c54000 - 5f0a - 1b40 - 80d0 - 11672d8ec032, f3c472e4 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('nFtPHprNOd', c7314000 - 4882 - 1acd - af96 - e8bdd974193b, f3c5c608 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('nFtPHprNOd', 32754000 - 094d - 1991 - a4de - 006b9d349ae0, f3c77908 - d05b - 11e5 - b58b - 90e2ba530b12); -INSERT INTO userline(username, time, tweet_id) -VALUES ('nFtPHprNOd', 86bd4000 - 38d9 - 1968 - 9153 - 7079fdc144dc, f3c8ae54 - d05b - 11e5 - b58b - 90e2ba530b12); - -INSERT INTO users(username, password) -VALUES ('fOGctyIDES', 'cGfDNvOUWH'); -INSERT INTO users(username, password) -VALUES ('cWIZrdKQmh', 'haENHSnBMF'); -INSERT INTO users(username, password) -VALUES ('lixvTEUaBj', 'gmDSxlydEL'); -INSERT INTO users(username, password) -VALUES ('PNexGqHdVE', 'ZSBNHcIrvC'); -INSERT INTO users(username, password) -VALUES ('PDKIVoezHs', 'UINXFlcAod'); -INSERT INTO users(username, password) -VALUES ('HuGetcsXbQ', 'fXwYWMaSjc'); -INSERT INTO users(username, password) -VALUES ('MdHQeWbPjB', 'QlaxOTioNZ'); -INSERT INTO users(username, password) -VALUES ('UWzCrfaxQi', 'EzyQckbKOh'); -INSERT INTO users(username, password) -VALUES ('JmuhsAaMdw', 'SQbIaqvzfW'); -INSERT INTO users(username, password) -VALUES ('nFtPHprNOd', 'CESzsfTALr'); diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java index 1c25b9225d..f4a3811a09 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java @@ -26,9 +26,9 @@ import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; @@ -109,7 +109,7 @@ public String toString() { @Override - public Modify toModificationAlg( + public RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, CatalogReader catalogReader, @@ -119,7 +119,7 @@ public Modify toModificationAlg( List sourceExpressionList, boolean flattened ) { this.cottontailSchema.getConvention().register( cluster.getPlanner() ); - return new LogicalModify( + return new LogicalRelModify( cluster, cluster.traitSetOf( Convention.NONE ), table, diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java index 540b8cfdf2..4f25288730 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java @@ -50,9 +50,9 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; @@ -213,7 +213,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List logicalColumnNames = new LinkedList<>(); @@ -261,7 +261,7 @@ public Namespace getCurrentSchema() { @Override - public void createTable( Context context, CatalogTable combinedTable, List partitionIds ) { + public void createTable( Context context, LogicalTable combinedTable, List partitionIds ) { /* Begin or continue Cottontail DB transaction. */ final long txId = this.wrapper.beginOrContinue( context.getStatement().getTransaction() ); @@ -331,7 +331,7 @@ private List buildColumnDefinitions( List partitionIds ) { + public void dropTable( Context context, LogicalTable combinedTable, List partitionIds ) { /* Begin or continue Cottontail DB transaction. */ final long txId = this.wrapper.beginOrContinue( context.getStatement().getTransaction() ); @@ -355,7 +355,7 @@ public void dropTable( Context context, CatalogTable combinedTable, List p @Override - public void addColumn( Context context, CatalogTable catalogTable, CatalogColumn catalogColumn ) { + public void addColumn( Context context, LogicalTable catalogTable, CatalogColumn catalogColumn ) { /* Begin or continue Cottontail DB transaction. */ final long txId = this.wrapper.beginOrContinue( context.getStatement().getTransaction() ); @@ -458,7 +458,7 @@ public void dropColumn( Context context, CatalogColumnPlacement columnPlacement final List placements = this.catalog.getColumnPlacementsOnAdapterPerTable( this.getAdapterId(), columnPlacement.tableId ); placements.removeIf( it -> it.columnId == columnPlacement.columnId ); final List columns = this.buildColumnDefinitions( placements ); - final CatalogTable catalogTable = catalog.getTable( placements.get( 0 ).tableId ); + final LogicalTable catalogTable = catalog.getTable( placements.get( 0 ).tableId ); final List partitionPlacements = catalog.getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { @@ -607,7 +607,7 @@ public void rollback( PolyXid xid ) { @Override - public void truncate( Context context, CatalogTable table ) { + public void truncate( Context context, LogicalTable table ) { /* Begin or continue Cottontail DB transaction. */ final long txId = this.wrapper.beginOrContinue( context.getStatement().getTransaction() ); @@ -715,7 +715,7 @@ public AvailableIndexMethod getDefaultIndexMethod() { @Override - public List getFunctionalIndexes( CatalogTable catalogTable ) { + public List getFunctionalIndexes( LogicalTable catalogTable ) { return ImmutableList.of(); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java index 137bf97fda..2fe1b757e0 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java @@ -22,7 +22,7 @@ import org.polypheny.db.adapter.cottontail.CottontailEntity; import org.polypheny.db.adapter.cottontail.algebra.CottontailAlg.CottontailImplementContext.QueryType; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; @@ -31,7 +31,7 @@ import org.polypheny.db.plan.AlgTraitSet; -public class CottontailScan extends Scan implements CottontailAlg { +public class CottontailScan extends RelScan implements CottontailAlg { protected final CottontailEntity cottontailTable; diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java index df2df8fe07..df5d4fc22f 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java @@ -32,7 +32,7 @@ import org.polypheny.db.adapter.cottontail.util.CottontailTypeUtil; import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; @@ -50,7 +50,7 @@ import org.polypheny.db.util.BuiltInMethod; -public class CottontailTableModify extends Modify implements CottontailAlg { +public class CottontailTableModify extends RelModify implements CottontailAlg { public final CottontailEntity cottontailTable; diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java index 7c4a6b15ab..bcfe8d3d65 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java @@ -22,8 +22,8 @@ import org.polypheny.db.adapter.cottontail.algebra.CottontailTableModify; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.UnsupportedFromInsertShuttle; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.plan.AlgTraitSet; @@ -35,18 +35,18 @@ public class CottontailTableModificationRule extends CottontailConverterRule { CottontailTableModificationRule( CottontailConvention out, AlgBuilderFactory algBuilderFactory ) { - super( Modify.class, CottontailTableModificationRule::supports, Convention.NONE, out, algBuilderFactory, "CottontailTableModificationRule:" + out.getName() ); + super( RelModify.class, CottontailTableModificationRule::supports, Convention.NONE, out, algBuilderFactory, "CottontailTableModificationRule:" + out.getName() ); } - private static boolean supports( Modify modify ) { + private static boolean supports( RelModify modify ) { return !modify.isInsert() || !UnsupportedFromInsertShuttle.contains( modify ); } @Override public boolean matches( AlgOptRuleCall call ) { - final Modify modify = call.alg( 0 ); + final RelModify modify = call.alg( 0 ); if ( modify.getEntity().unwrap( CottontailEntity.class ) == null ) { return false; } @@ -54,13 +54,13 @@ public boolean matches( AlgOptRuleCall call ) { if ( !modify.getEntity().unwrap( CottontailEntity.class ).getUnderlyingConvention().equals( this.out ) ) { return false; } - return modify.getOperation() != Operation.MERGE; + return modify.getOperation() != Modify.Operation.MERGE; } @Override public AlgNode convert( AlgNode alg ) { - final Modify modify = (Modify) alg; + final RelModify modify = (RelModify) alg; final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java index 76ffa8c4a5..d2bc9ba571 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java @@ -22,7 +22,7 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.core.JoinAlgType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.cql.BooleanGroup.TableOpsBooleanOperator; import org.polypheny.db.cql.exception.InvalidMethodInvocation; import org.polypheny.db.cql.exception.InvalidModifierException; @@ -142,8 +142,8 @@ private static String[] getColumnsToJoinOn( TableIndex left, TableIndex right, S } } - CatalogTable leftCatalogTable = left.catalogTable; - CatalogTable rightCatalogTable = right.catalogTable; + LogicalTable leftCatalogTable = left.catalogTable; + LogicalTable rightCatalogTable = right.catalogTable; List columnList = Arrays.asList( columnStrs ); if ( !leftCatalogTable.getColumnNames().containsAll( columnList ) || !rightCatalogTable.getColumnNames().containsAll( columnList ) ) { diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java index 21e96525ee..7c0a0646df 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java @@ -33,7 +33,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.cql.BooleanGroup.ColumnOpsBooleanOperator; import org.polypheny.db.cql.exception.UnexpectedTypeException; import org.polypheny.db.cql.utils.Tree; @@ -145,7 +145,7 @@ private AlgBuilder generateScan( AlgBuilder algBuilder, RexBuilder rexBuilder ) if ( nodeType == NodeType.DESTINATION_NODE ) { try { if ( treeNode.isLeaf() ) { - CatalogTable catalogTable = treeNode.getExternalNode().catalogTable; + LogicalTable catalogTable = treeNode.getExternalNode().catalogTable; algBuilderAtomicReference.set( algBuilderAtomicReference.get().scan( catalogTable.getNamespaceName(), catalogTable.name ) ); @@ -194,7 +194,7 @@ private AlgBuilder generateProjections( AlgBuilder algBuilder, RexBuilder rexBui try { TableIndex tableIndex = treeNode.getExternalNode(); String columnNamePrefix = tableIndex.fullyQualifiedName + "."; - CatalogTable catalogTable = tableIndex.catalogTable; + LogicalTable catalogTable = tableIndex.catalogTable; for ( Long columnId : catalogTable.fieldIds ) { int ordinal = tableScanColumnOrdinalities.size(); RexNode inputRef = rexBuilder.makeInputRef( baseNode, ordinal ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java index 9e8b30de84..5adddc69e4 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java @@ -18,7 +18,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -31,13 +31,13 @@ @Slf4j public class TableIndex { - public final CatalogTable catalogTable; + public final LogicalTable catalogTable; public final String fullyQualifiedName; public final String schemaName; public final String tableName; - public TableIndex( final CatalogTable catalogTable, final String schemaName, final String tableName ) { + public TableIndex( final LogicalTable catalogTable, final String schemaName, final String tableName ) { this.catalogTable = catalogTable; this.fullyQualifiedName = schemaName + "." + tableName; this.schemaName = schemaName; @@ -49,7 +49,7 @@ public static TableIndex createIndex( String inDatabase, String schemaName, Stri try { log.debug( "Creating TableIndex." ); Catalog catalog = Catalog.getInstance(); - CatalogTable table = catalog.getTable( inDatabase, schemaName, tableName ); + LogicalTable table = catalog.getTable( inDatabase, schemaName, tableName ); return new TableIndex( table, schemaName, tableName ); } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { throw new UnknownIndexException( "Cannot find a underlying table for the specified table name: " + schemaName + "." + tableName + "." ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java index 00a1d4c577..ce16f01cc1 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java @@ -39,12 +39,15 @@ import org.apache.calcite.linq4j.tree.Primitive; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.common.Scan; import org.polypheny.db.algebra.enumerable.EnumerableAlg; import org.polypheny.db.algebra.enumerable.EnumerableAlgImplementor; import org.polypheny.db.algebra.enumerable.EnumerableConvention; import org.polypheny.db.algebra.enumerable.PhysType; import org.polypheny.db.algebra.enumerable.PhysTypeImpl; +import org.polypheny.db.algebra.enumerable.EnumerableAlg; +import org.polypheny.db.algebra.enumerable.EnumerableConvention; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -61,7 +64,7 @@ * * Like any table scan, it serves as a leaf node of a query tree. */ -public class CsvScan extends Scan implements EnumerableAlg { +public class CsvScan extends RelScan implements EnumerableAlg { final CsvTranslatableTable csvTable; final int[] fields; diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 65c35197d9..de0311f1ca 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -49,7 +49,7 @@ import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; @@ -83,7 +83,7 @@ public CsvSchema( long id, URL directoryUrl, CsvTable.Flavor flavor ) { } - public Entity createCsvTable( CatalogTable catalogTable, List columnPlacementsOnStore, CsvSource csvSource, CatalogPartitionPlacement partitionPlacement ) { + public Entity createCsvTable( LogicalTable catalogTable, List columnPlacementsOnStore, CsvSource csvSource, CatalogPartitionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List fieldTypes = new LinkedList<>(); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 4c51e7cebc..167453fc01 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -42,12 +42,11 @@ import org.polypheny.db.adapter.csv.CsvTable.Flavor; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; @@ -133,7 +132,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentSchema.createCsvTable( catalogTable, columnPlacementsOnStore, this, partitionPlacement ); } @@ -145,7 +144,7 @@ public Namespace getCurrentSchema() { @Override - public void truncate( Context context, CatalogTable table ) { + public void truncate( Context context, LogicalTable table ) { throw new RuntimeException( "CSV adapter does not support truncate" ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java index 065ea2d3c7..679dc90110 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java @@ -20,7 +20,7 @@ import lombok.Getter; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.ddl.DdlManager; @@ -53,7 +53,7 @@ public CypherAlterDatabaseAlias( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - List graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( targetName ) ); + List graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( targetName ) ); if ( graphs.size() != 1 ) { if ( !ifExists ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java index 98c6436e16..4664777497 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java @@ -20,7 +20,7 @@ import lombok.Getter; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.ddl.DdlManager; @@ -56,7 +56,7 @@ public CypherCreateDatabaseAlias( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - List graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( targetName ) ); + List graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( targetName ) ); if ( graphs.size() != 1 ) { throw new RuntimeException( "Error while creating a new graph database alias." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java index d463486f84..2978a7f35a 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java @@ -20,7 +20,7 @@ import lombok.Getter; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.ddl.DdlManager; @@ -47,7 +47,7 @@ public CypherDropAlias( ParserPos pos, CypherSimpleEither graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( aliasName ) ); + List graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( aliasName ) ); if ( graphs.size() != 1 ) { throw new RuntimeException( "Error while dropping a graph database alias." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java index 6ff156c700..570ef9b9a3 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java @@ -20,7 +20,7 @@ import java.util.concurrent.TimeUnit; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.cypher.clause.CypherWaitClause; @@ -64,7 +64,7 @@ public void execute( Context context, Statement statement, QueryParameters param } } - List databases = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( databaseName ) ); + List databases = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( databaseName ) ); if ( databases.size() != 1 ) { if ( !ifExists ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index 8263e22e3f..71afa08191 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -35,7 +35,7 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.AggregateCall; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.logical.lpg.LogicalLpgFilter; import org.polypheny.db.algebra.logical.lpg.LogicalLpgMatch; import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; @@ -49,7 +49,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.cypher.CypherNode; import org.polypheny.db.cypher.CypherNode.CypherFamily; @@ -109,7 +109,7 @@ public AlgRoot convert( CypherNode query, ExtendedQueryParameters parameters, Al databaseId = parameters.getDatabaseId(); } - CatalogGraphDatabase graph = Catalog.getInstance().getGraph( databaseId ); + LogicalGraph graph = Catalog.getInstance().getGraph( databaseId ); if ( parameters.isFullGraph() ) { // simple full graph scan @@ -128,7 +128,7 @@ public AlgRoot convert( CypherNode query, ExtendedQueryParameters parameters, Al } - private AlgNode buildFullScan( CatalogGraphDatabase graph ) { + private AlgNode buildFullScan( LogicalGraph graph ) { return new LogicalLpgScan( cluster, cluster.traitSet(), @@ -405,7 +405,7 @@ public static class CypherContext { private final Queue> rexQueue = new LinkedList<>(); private final Queue> rexAggQueue = new LinkedList<>(); public final CypherNode original; - public final CatalogGraphDatabase graph; + public final LogicalGraph graph; public final AlgDataType graphType; public final AlgDataType booleanType; @@ -423,7 +423,7 @@ public static class CypherContext { private CypherContext( CypherNode original, - CatalogGraphDatabase graph, + LogicalGraph graph, AlgOptCluster cluster, AlgBuilder algBuilder, RexBuilder rexBuilder, @@ -703,7 +703,7 @@ public void combineValues() { edges.stream().map( t -> Pair.of( t.left, t.right.edge ) ).collect( Collectors.toList() ), edgeType ) ); - add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Operation.INSERT, null, null ) ); + add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Modify.Operation.INSERT, null, null ) ); } else { // filtered DML List> newNodes = new LinkedList<>(); @@ -759,7 +759,7 @@ public void combineValues() { add( new LogicalLpgProject( node.getCluster(), node.getTraitSet(), pop(), Pair.right( nodesAndEdges ), adjustedNames ) ); - add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Operation.INSERT, null, null ) ); + add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Modify.Operation.INSERT, null, null ) ); } clearVariables(); } @@ -790,7 +790,7 @@ public void combineUpdate() { List> updates = popNodes(); - add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Operation.UPDATE, Pair.left( updates ), Pair.right( updates ) ) ); + add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Modify.Operation.UPDATE, Pair.left( updates ), Pair.right( updates ) ) ); clearVariables(); } @@ -802,7 +802,7 @@ public void combineDelete() { List> deletes = popNodes(); - add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Operation.DELETE, Pair.left( deletes ), Pair.right( deletes ) ) ); + add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Modify.Operation.DELETE, Pair.left( deletes ), Pair.right( deletes ) ) ); clearVariables(); } @@ -843,7 +843,7 @@ public void combineSet() { List> updates = popNodes(); - add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Operation.UPDATE, Pair.left( updates ), Pair.right( updates ) ) ); + add( new LogicalLpgModify( cluster, cluster.traitSet(), graph, pop(), Modify.Operation.UPDATE, Pair.left( updates ), Pair.right( updates ) ) ); clearVariables(); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java index 11b9920418..f58e12a3db 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java @@ -24,7 +24,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.cypher.admin.CypherAdminCommand; @@ -64,7 +64,7 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( this.database ) ); + List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( this.database ) ); List dataStores = Stream.of( store ) .map( store -> (DataStore) adapterManager.getAdapter( store ) ) diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java index 27343c6e8f..174ab09ac7 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java @@ -23,7 +23,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.cypher.admin.CypherAdminCommand; @@ -55,7 +55,7 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( this.databaseName ) ); + List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( this.databaseName ) ); DataStore dataStore = Stream.of( storeName ) .map( store -> (DataStore) adapterManager.getAdapter( storeName ) ) diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java index 71ba25a7c8..b475575099 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java @@ -72,13 +72,14 @@ import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.core.Filter; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.interpreter.BindableAlg; @@ -415,7 +416,7 @@ public static String format( String message, Object... formatArgs ) { /** * Returns a string describing the operations inside this query. * - * For example, "sfpahol" means {@link Scan} (s) + * For example, "sfpahol" means {@link RelScan} (s) * followed by {@link Filter} (f) * followed by {@link Project} (p) * followed by {@link Aggregate} (a) @@ -429,7 +430,7 @@ String signature() { final StringBuilder b = new StringBuilder(); boolean flag = false; for ( AlgNode alg : algs ) { - b.append( alg instanceof Scan ? 's' + b.append( alg instanceof RelScan ? 's' : (alg instanceof Project && flag) ? 'o' : (alg instanceof Filter && flag) ? 'h' : alg instanceof Aggregate ? 'a' @@ -458,7 +459,7 @@ public boolean isValid( Litmus litmus, Context context ) { for ( int i = 0; i < algs.size(); i++ ) { final AlgNode r = algs.get( i ); if ( i == 0 ) { - if ( !(r instanceof Scan) ) { + if ( !(r instanceof RelScan) ) { return litmus.fail( "first alg must be Scan, was ", r ); } if ( r.getEntity() != table ) { @@ -514,8 +515,8 @@ public AlgDataType deriveRowType() { } - public Scan getScan() { - return (Scan) algs.get( 0 ); + public RelScan getScan() { + return (RelScan) algs.get( 0 ); } @@ -525,7 +526,7 @@ public AlgNode getTopNode() { @Override - public AlgOptEntity getEntity() { + public CatalogEntity getEntity() { return table; } @@ -538,8 +539,8 @@ public DruidEntity getDruidTable() { @Override public AlgWriter explainTerms( AlgWriter pw ) { for ( AlgNode alg : algs ) { - if ( alg instanceof Scan ) { - Scan scan = (Scan) alg; + if ( alg instanceof RelScan ) { + RelScan scan = (RelScan) alg; pw.item( "table", scan.getEntity().getCatalogEntity().id ); pw.item( "intervals", intervals ); } else if ( alg instanceof Filter ) { @@ -878,7 +879,7 @@ protected static Pair, List> computeProjectGr * * @param aggCalls List of AggregateCalls to translate * @param aggNames List of aggregate names - * @param project Input project under the aggregate calls, or null if we have {@link Scan} immediately under the {@link Aggregate} + * @param project Input project under the aggregate calls, or null if we have {@link RelScan} immediately under the {@link Aggregate} * @param druidQuery Druid Query Rel * @return List of valid Druid {@link JsonAggregation}s, or null if any of the aggregates is not supported */ diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java index 97b6cb1752..6b1f71b410 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java @@ -37,7 +37,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.rules.AggregateExpandDistinctAggregatesRule; import org.polypheny.db.algebra.type.AlgDataType; @@ -55,7 +55,7 @@ *

    Additional operations might be applied, * using the "find" method.

    */ -public class ElasticsearchScan extends Scan implements ElasticsearchRel { +public class ElasticsearchScan extends RelScan implements ElasticsearchRel { private final ElasticsearchEntity elasticsearchTable; private final AlgDataType projectRowType; diff --git a/plugins/ethereum-adapter/build.gradle b/plugins/ethereum-adapter/build.gradle deleted file mode 100644 index ab2c2553e3..0000000000 --- a/plugins/ethereum-adapter/build.gradle +++ /dev/null @@ -1,89 +0,0 @@ -group "org.polypheny" - - -dependencies { - compileOnly project(":core") - - // Apache 2.0 - implementation(group: "org.web3j", name: "core", version: web3j_version) { - exclude(group: "org.slf4j") - } // Apache 2.0 - // Apache 2.0 - - - // --- Test Compile --- - testImplementation project(path: ":core", configuration: "tests") - - // BSD 3-clause -} - - -sourceSets { - main { - java { - srcDirs = ["src/main/java"] - outputDir = file(project.buildDir.absolutePath + "/classes") - } - resources { - srcDirs = ["src/main/resources"] - } - output.resourcesDir = file(project.buildDir.absolutePath + "/classes") - } - test { - java { - srcDirs = ["src/test/java"] - outputDir = file(project.buildDir.absolutePath + "/test-classes") - } - resources { - srcDirs = ["src/test/resources"] - } - output.resourcesDir = file(project.buildDir.absolutePath + "/test-classes") - } -} - -compileJava { - dependsOn(":config:processResources") - dependsOn(":core:processResources") - dependsOn(":information:processResources") -} - -delombok { - dependsOn(":core:processResources") -} - - -/** - * JARs - */ -jar { - manifest { - attributes "Manifest-Version": "1.0" - attributes "Copyright": "The Polypheny Project (polypheny.org)" - attributes "Version": "$project.version" - } -} -java { - withJavadocJar() - withSourcesJar() -} - -licensee { - allow('Apache-2.0') - allow('MIT') - allow('CC0-1.0') - - allowDependency('com.github.jnr', 'jnr-posix', '3.0.47') { because 'Eclipse Public License v. 2.0' } - - allowDependency('org.bouncycastle', 'bcprov-jdk15on', '1.65') { because 'MIT license' } - - allowDependency('org.java-websocket', 'Java-WebSocket', '1.3.8') { because 'MIT license' } - - allowDependency('org.ow2.asm', 'asm-util', '5.0.3') { because 'removed on release branches' } - allowDependency('org.ow2.asm', 'asm', '5.0.3') { because 'BSD 3-Clause' } - allowDependency('org.ow2.asm', 'asm-analysis', '5.0.3') { because 'BSD 3-Clause' } - allowDependency('org.ow2.asm', 'asm-commons', '5.0.3') { because 'BSD 3-Clause' } - allowDependency('org.ow2.asm', 'asm-tree', '5.0.3') { because 'BSD 3-Clause' } -} - - - diff --git a/plugins/ethereum-adapter/gradle.properties b/plugins/ethereum-adapter/gradle.properties deleted file mode 100644 index 3d8fa3ae29..0000000000 --- a/plugins/ethereum-adapter/gradle.properties +++ /dev/null @@ -1,27 +0,0 @@ -# -# Copyright 2019-2023 The Polypheny Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -pluginVersion = 0.0.1 - -pluginId = ethereum-adapter -pluginClass = org.polypheny.db.adapter.ethereum.EthereumPlugin -pluginProvider = The Polypheny Project -pluginDependencies = -pluginUrlPath = -pluginCategories = source -pluginPolyDependencies = -pluginIsSystemComponent = false -pluginIsUiVisible = true \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/BlockReader.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/BlockReader.java deleted file mode 100644 index 646b607d39..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/BlockReader.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - - -import java.io.Closeable; -import java.io.IOException; -import java.math.BigInteger; -import java.util.function.Predicate; -import org.web3j.protocol.Web3j; -import org.web3j.protocol.core.DefaultBlockParameter; -import org.web3j.protocol.core.methods.response.EthBlock; -import org.web3j.protocol.http.HttpService; - -class BlockReader implements Closeable { - - protected final Web3j web3j; - protected final Predicate blockNumberPredicate; - protected int blockReads; - protected BigInteger currentBlock; - - - BlockReader( String clientUrl, int blocks, Predicate blockNumberPredicate ) { - this.web3j = Web3j.build( new HttpService( clientUrl ) ); - this.blockReads = blocks; - this.blockNumberPredicate = blockNumberPredicate; - try { - this.currentBlock = web3j.ethBlockNumber().send().getBlockNumber(); - } catch ( IOException e ) { - throw new RuntimeException( "Unable to connect to server: " + clientUrl ); - } - } - - - public String[] readNext() throws IOException { - if ( this.blockReads <= 0 ) { - return null; - } - EthBlock.Block block = null; - while ( this.currentBlock.compareTo( BigInteger.ZERO ) == 1 && block == null ) { - if ( blockNumberPredicate.test( this.currentBlock ) ) { - block = web3j - .ethGetBlockByNumber( DefaultBlockParameter.valueOf( this.currentBlock ), false ) - .send() - .getBlock(); - blockReads--; - } - this.currentBlock = this.currentBlock.subtract( BigInteger.ONE ); - } - return block == null ? null : EthereumMapper.BLOCK.map( block ); - } - - - /** - * Closes the underlying reader. - * - * @throws IOException if the close fails - */ - @Override - public void close() throws IOException { - this.web3j.shutdown(); - } - -} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java deleted file mode 100644 index a17a6c09b7..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java +++ /dev/null @@ -1,316 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - -import java.io.IOException; -import java.math.BigInteger; -import java.text.ParseException; -import java.util.Date; -import java.util.List; -import java.util.TimeZone; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Predicate; -import org.apache.calcite.avatica.util.DateTimeUtils; -import org.apache.calcite.linq4j.Enumerator; -import org.apache.commons.lang3.time.FastDateFormat; - - -/** - * Enumerator that reads from a Blockchain. - * - * @param Row type - */ -class EthereumEnumerator implements Enumerator { - - private static final FastDateFormat TIME_FORMAT_DATE; - private static final FastDateFormat TIME_FORMAT_TIME; - private static final FastDateFormat TIME_FORMAT_TIMESTAMP; - - - static { - final TimeZone gmt = TimeZone.getTimeZone( "GMT" ); - TIME_FORMAT_DATE = FastDateFormat.getInstance( "yyyy-MM-dd", gmt ); - TIME_FORMAT_TIME = FastDateFormat.getInstance( "HH:mm:ss", gmt ); - TIME_FORMAT_TIMESTAMP = FastDateFormat.getInstance( "yyyy-MM-dd HH:mm:ss", gmt ); - } - - - private final String clientUrl; - private final BlockReader reader; - private final String[] filterValues; - private final AtomicBoolean cancelFlag; - private final RowConverter rowConverter; - private final int blocks; - private E current; - - - EthereumEnumerator( String clientUrl, int blocks, AtomicBoolean cancelFlag, boolean stream, String[] filterValues, EthereumMapper mapper, Predicate blockNumberPredicate, RowConverter rowConverter ) { - this.clientUrl = clientUrl; - this.cancelFlag = cancelFlag; - this.rowConverter = rowConverter; - this.filterValues = filterValues; - this.reader = mapper.makeReader( clientUrl, blocks, blockNumberPredicate ); - this.blocks = blocks; - } - - - static RowConverter converter( List fieldTypes, int[] fields ) { - if ( fields.length == 1 ) { - final int field = fields[0]; - return new SingleColumnRowConverter( fieldTypes.get( field ), field ); - } else { - return new ArrayRowConverter( fieldTypes, fields ); - } - } - - - /** - * Returns an array of integers {0, ..., n - 1}. - */ - static int[] identityList( int n ) { - int[] integers = new int[n]; - for ( int i = 0; i < n; i++ ) { - integers[i] = i; - } - return integers; - } - - - @Override - public E current() { - return current; - } - - - @Override - public boolean moveNext() { - try { - outer: - for ( ; ; ) { - if ( cancelFlag.get() ) { - return false; - } - final String[] strings = reader.readNext(); - if ( strings == null ) { - return false; - } - if ( filterValues != null ) { - for ( int i = 0; i < strings.length; i++ ) { - String filterValue = filterValues[i]; - if ( filterValue != null ) { - if ( !filterValue.equals( strings[i] ) ) { - continue outer; - } - } - } - } - current = rowConverter.convertRow( strings ); - return true; - } - } catch ( IOException e ) { - throw new RuntimeException( e ); - } - } - - - @Override - public void reset() { - throw new UnsupportedOperationException(); - } - - - @Override - public void close() { - try { - reader.close(); - } catch ( IOException e ) { - throw new RuntimeException( "Error closing Blockchain reader", e ); - } - } - - - /** - * Row converter. - * - * @param element type - */ - abstract static class RowConverter { - - abstract E convertRow( String[] rows ); - - - protected Object convert( EthereumFieldType fieldType, String string ) { - if ( fieldType == null ) { - return string; - } - switch ( fieldType ) { - case BOOLEAN: - if ( string.length() == 0 ) { - return null; - } - return Boolean.parseBoolean( string ); - case BYTE: - if ( string.length() == 0 ) { - return null; - } - return Byte.parseByte( string ); - case SHORT: - if ( string.length() == 0 ) { - return null; - } - return Short.parseShort( string ); - case INT: - if ( string.length() == 0 ) { - return null; - } - return Integer.parseInt( string ); - case LONG: - if ( string.length() == 0 ) { - return null; - } - - return new BigInteger( string ); - case FLOAT: - if ( string.length() == 0 ) { - return null; - } - return Float.parseFloat( string ); - case DOUBLE: - if ( string.length() == 0 ) { - return null; - } - return Double.parseDouble( string ); - case DATE: - if ( string.length() == 0 ) { - return null; - } - try { - Date date = TIME_FORMAT_DATE.parse( string ); - return (int) (date.getTime() / DateTimeUtils.MILLIS_PER_DAY); - } catch ( ParseException e ) { - return null; - } - case TIME: - if ( string.length() == 0 ) { - return null; - } - try { - Date date = TIME_FORMAT_TIME.parse( string ); - return (int) date.getTime(); - } catch ( ParseException e ) { - return null; - } - case TIMESTAMP: - if ( string.length() == 0 ) { - return null; - } - try { - Date date = new Date( Long.parseLong( string ) * 1000 ); - return date.getTime(); - } catch ( Exception e ) { - return null; - } - case STRING: - default: - return string; - } - } - - } - - - /** - * Array row converter. - */ - static class ArrayRowConverter extends RowConverter { - - private final EthereumFieldType[] fieldTypes; - private final int[] fields; - // whether the row to convert is from a stream - private final boolean stream; - - - ArrayRowConverter( List fieldTypes, int[] fields ) { - this.fieldTypes = fieldTypes.toArray( new EthereumFieldType[0] ); - this.fields = fields; - this.stream = false; - } - - - ArrayRowConverter( List fieldTypes, int[] fields, boolean stream ) { - this.fieldTypes = fieldTypes.toArray( new EthereumFieldType[0] ); - this.fields = fields; - this.stream = stream; - } - - - @Override - public Object[] convertRow( String[] strings ) { - if ( stream ) { - return convertStreamRow( strings ); - } else { - return convertNormalRow( strings ); - } - } - - - public Object[] convertNormalRow( String[] strings ) { - final Object[] objects = new Object[fields.length]; - for ( int i = 0; i < fields.length; i++ ) { - int field = fields[i]; - objects[i] = convert( fieldTypes[i], strings[field] ); - } - return objects; - } - - - public Object[] convertStreamRow( String[] strings ) { - final Object[] objects = new Object[fields.length]; - objects[0] = System.currentTimeMillis(); - for ( int i = 0; i < fields.length; i++ ) { - int field = fields[i]; - objects[i] = convert( fieldTypes[i], strings[field] ); - } - return objects; - } - - } - - - /** - * Single column row converter. - */ - private static class SingleColumnRowConverter extends RowConverter { - - private final EthereumFieldType fieldType; - private final int fieldIndex; - - - private SingleColumnRowConverter( EthereumFieldType fieldType, int fieldIndex ) { - this.fieldType = fieldType; - this.fieldIndex = fieldIndex; - } - - - @Override - public Object convertRow( String[] strings ) { - return convert( fieldType, strings[fieldIndex] ); - } - - } - -} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java deleted file mode 100644 index 7ec2acad0f..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - - -import java.util.HashMap; -import java.util.Map; -import org.apache.calcite.linq4j.tree.Primitive; -import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.type.PolyType; - - -/** - * Type of Blockchain field. - */ -enum EthereumFieldType { - STRING( String.class, "string" ), - BOOLEAN( Primitive.BOOLEAN ), - BYTE( Primitive.BYTE ), - CHAR( Primitive.CHAR ), - SHORT( Primitive.SHORT ), - INT( Primitive.INT ), - LONG( Primitive.LONG ), - FLOAT( Primitive.FLOAT ), - DOUBLE( Primitive.DOUBLE ), - DATE( java.sql.Date.class, "date" ), - TIME( java.sql.Time.class, "time" ), - TIMESTAMP( java.sql.Timestamp.class, "timestamp" ); - - private static final Map MAP = new HashMap<>(); - - - static { - for ( EthereumFieldType value : values() ) { - MAP.put( value.simpleName, value ); - } - } - - - private final Class clazz; - private final String simpleName; - - - EthereumFieldType( Primitive primitive ) { - this( primitive.boxClass, primitive.primitiveClass.getSimpleName() ); - } - - - EthereumFieldType( Class clazz, String simpleName ) { - this.clazz = clazz; - this.simpleName = simpleName; - } - - - public static EthereumFieldType getBlockchainFieldType( PolyType type ) { - switch ( type ) { - case BOOLEAN: - return EthereumFieldType.BOOLEAN; - case VARBINARY: - return EthereumFieldType.BYTE; - case INTEGER: - return EthereumFieldType.INT; - case BIGINT: - return EthereumFieldType.LONG; - case REAL: - return EthereumFieldType.FLOAT; - case DOUBLE: - return EthereumFieldType.DOUBLE; - case VARCHAR: - return EthereumFieldType.STRING; - case DATE: - return EthereumFieldType.DATE; - case TIME: - return EthereumFieldType.TIME; - case TIMESTAMP: - return EthereumFieldType.TIMESTAMP; - default: - throw new RuntimeException( "Unsupported datatype: " + type.name() ); - } - } - - - public static EthereumFieldType of( String typeString ) { - return MAP.get( typeString ); - } - - - public AlgDataType toType( JavaTypeFactory typeFactory ) { - AlgDataType javaType = typeFactory.createJavaType( clazz ); - AlgDataType sqlType = typeFactory.createPolyType( javaType.getPolyType() ); - return typeFactory.createTypeWithNullability( sqlType, true ); - } -} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java deleted file mode 100644 index db67e69734..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - -import java.math.BigInteger; -import java.util.function.Predicate; -import org.web3j.protocol.core.methods.response.EthBlock; - -public enum EthereumMapper { - - BLOCK, - TRANSACTION; - - - static EthereumMapper getMapper( String tableName ) { - if ( tableName.equals( "block" ) ) { - return BLOCK; - } - return TRANSACTION; - } - - - static String safeToString( Object x ) { - if ( x == null ) { - return null; - } - return x.toString(); - } - - - public String[] map( Object obj ) { - String[] str = null; - - if ( this == BLOCK ) { - - EthBlock.Block blk = (EthBlock.Block) obj; - str = new String[]{ - safeToString( blk.getNumber() ), - blk.getHash(), - blk.getParentHash(), - safeToString( blk.getNonce() ), - blk.getSha3Uncles(), - blk.getLogsBloom(), - blk.getTransactionsRoot(), - blk.getStateRoot(), - blk.getReceiptsRoot(), - blk.getAuthor(), - blk.getMiner(), - blk.getMixHash(), - safeToString( blk.getDifficulty() ), - safeToString( blk.getTotalDifficulty() ), - blk.getExtraData(), - safeToString( blk.getSize() ), - safeToString( blk.getGasLimit() ), - safeToString( blk.getGasUsed() ), - safeToString( blk.getTimestamp() ), - }; - } else { - EthBlock.TransactionObject tnx = (EthBlock.TransactionObject) obj; - str = new String[]{ - tnx.getHash(), - safeToString( tnx.getNonce() ), - tnx.getBlockHash(), - safeToString( tnx.getBlockNumber() ), - safeToString( tnx.getTransactionIndex() ), - tnx.getFrom(), - tnx.getTo(), - safeToString( tnx.getValue() ), - safeToString( tnx.getGasPrice() ), - safeToString( tnx.getGas() ), - tnx.getInput(), - tnx.getCreates(), - tnx.getPublicKey(), - tnx.getRaw(), - tnx.getR(), - tnx.getS() - }; - } - - return str; - } - - - public BlockReader makeReader( String clientUrl, int blocks, Predicate blockNumberPredicate ) { - if ( this == BLOCK ) { - return new BlockReader( clientUrl, blocks, blockNumberPredicate ); - } - return new TransactionReader( clientUrl, blocks, blockNumberPredicate ); - } -} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java deleted file mode 100644 index e18f0bbe9c..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - - -import com.google.common.collect.ImmutableMap; -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.pf4j.Extension; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; -import org.polypheny.db.adapter.Adapter.AdapterProperties; -import org.polypheny.db.adapter.Adapter.AdapterSettingBoolean; -import org.polypheny.db.adapter.Adapter.AdapterSettingInteger; -import org.polypheny.db.adapter.Adapter.AdapterSettingString; -import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.DeployMode; -import org.polypheny.db.catalog.Adapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.information.InformationGroup; -import org.polypheny.db.information.InformationTable; -import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; -import org.polypheny.db.transaction.PolyXid; -import org.polypheny.db.type.PolyType; -import org.web3j.protocol.Web3j; -import org.web3j.protocol.http.HttpService; - - -public class EthereumPlugin extends Plugin { - - - public static final String ADAPTER_NAME = "ETHEREUM"; - - - /** - * Constructor to be used by plugin manager for plugin instantiation. - * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. - */ - public EthereumPlugin( PluginWrapper wrapper ) { - super( wrapper ); - } - - - @Override - public void start() { - Map settings = ImmutableMap.of( - "ClientUrl", "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", - "Blocks", "10", - "ExperimentalFiltering", "false" - ); - - Adapter.addAdapter( EthereumDataSource.class, ADAPTER_NAME, settings ); - } - - - @Override - public void stop() { - Adapter.removeAdapter( EthereumDataSource.class, ADAPTER_NAME ); - } - - - @Slf4j - @Extension - @AdapterProperties( - name = "Ethereum", - description = "An adapter for querying the Ethereum blockchain. It uses the ethereum JSON-RPC API. Currently, this adapter only supports read operations.", - usedModes = DeployMode.REMOTE) - @AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) - @AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) - @AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) - public static class EthereumDataSource extends DataSource { - - private String clientURL; - @Getter - private int blocks; - @Getter - private boolean experimentalFiltering; - private EthereumSchema currentSchema; - - - public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true ); - setClientURL( settings.get( "ClientUrl" ) ); - this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); - this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); - createInformationPage(); - enableInformationPage(); - } - - - private void setClientURL( String clientURL ) { - Web3j web3j = Web3j.build( new HttpService( clientURL ) ); - try { - BigInteger latest = web3j.ethBlockNumber().send().getBlockNumber(); - } catch ( Exception e ) { - throw new RuntimeException( "Unable to connect the client URL '" + clientURL + "'" ); - } - web3j.shutdown(); - this.clientURL = clientURL; - } - - - @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - currentSchema = new EthereumSchema( this.clientURL ); - } - - - @Override - public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { - return currentSchema.createBlockchainTable( combinedTable, columnPlacementsOnStore, this ); - } - - - @Override - public Schema getCurrentSchema() { - return currentSchema; - } - - - @Override - public void truncate( Context context, CatalogTable table ) { - throw new RuntimeException( "Blockchain adapter does not support truncate" ); - } - - - @Override - public Map> getExportedColumns() { - Map> map = new HashMap<>(); - String[] blockColumns = { "number", "hash", "parent_hash", "nonce", "sha3uncles", "logs_bloom", "transactions_root", "state_root", "receipts_root", "author", "miner", "mix_hash", "difficulty", "total_difficulty", "extra_data", "size", "gas_limit", "gas_used", "timestamp" }; - PolyType[] blockTypes = { PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.TIMESTAMP }; - String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; - PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; - - PolyType type = PolyType.VARCHAR; - PolyType collectionsType = null; - Integer length = 300; - Integer scale = null; - Integer dimension = null; - Integer cardinality = null; - int position = 0; - List blockCols = new ArrayList<>(); - for ( String blockCol : blockColumns ) { - blockCols.add( new ExportedColumn( - blockCol, - blockTypes[position], - collectionsType, - length, - scale, - dimension, - cardinality, - false, - "public", - "block", - blockCol, - position, - position == 0 ) ); - position++; - - } - map.put( "block", blockCols ); - List transactCols = new ArrayList<>(); - position = 0; - for ( String transactCol : transactionColumns ) { - transactCols.add( new ExportedColumn( - transactCol, - transactionTypes[position], - collectionsType, - length, - scale, - dimension, - cardinality, - false, - "public", - "transaction", - transactCol, - position, - position == 0 ) ); - position++; - } - map.put( "transaction", transactCols ); - return map; - } - - - @Override - public boolean prepare( PolyXid xid ) { - log.debug( "Blockchain Store does not support prepare()." ); - return true; - } - - - @Override - public void commit( PolyXid xid ) { - log.debug( "Blockchain Store does not support commit()." ); - } - - - @Override - public void rollback( PolyXid xid ) { - log.debug( "Blockchain Store does not support rollback()." ); - } - - - @Override - public void shutdown() { - removeInformationPage(); - } - - - @Override - protected void reloadSettings( List updatedSettings ) { - if ( updatedSettings.contains( "ClientUrl" ) ) { - setClientURL( settings.get( "ClientUrl" ) ); - } - if ( updatedSettings.contains( "Blocks" ) ) { - this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); - } - if ( updatedSettings.contains( "ExperimentalFiltering" ) ) { - this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); - } - } - - - protected void createInformationPage() { - for ( Map.Entry> entry : getExportedColumns().entrySet() ) { - InformationGroup group = new InformationGroup( - informationPage, - entry.getValue().get( 0 ).physicalSchemaName + "." + entry.getValue().get( 0 ).physicalTableName ); - - InformationTable table = new InformationTable( - group, - Arrays.asList( "Position", "Column Name", "Type", "Primary" ) ); - for ( ExportedColumn exportedColumn : entry.getValue() ) { - table.addRow( - exportedColumn.physicalPosition, - exportedColumn.name, - exportedColumn.getDisplayType(), - exportedColumn.primary ? "✔" : "" - ); - } - informationElements.add( table ); - informationGroups.add( group ); - } - } - - } - -} \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPredicateFactory.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPredicateFactory.java deleted file mode 100644 index 39c4e28ed0..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPredicateFactory.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Predicate; -import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.rex.RexCall; -import org.polypheny.db.rex.RexDynamicParam; -import org.polypheny.db.rex.RexInputRef; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.util.Pair; - -public class EthereumPredicateFactory { - - static final List REX_COMPARATORS = new ArrayList() {{ - this.add( Kind.EQUALS ); - this.add( Kind.LESS_THAN ); - this.add( Kind.LESS_THAN_OR_EQUAL ); - this.add( Kind.GREATER_THAN ); - this.add( Kind.GREATER_THAN_OR_EQUAL ); - }}; - static final Predicate ALWAYS_TRUE = bigInteger -> true; - - - static Predicate makePredicate( DataContext dataContext, List filters, EthereumMapper mapper ) { - String field = "$0"; - if ( mapper == EthereumMapper.TRANSACTION ) { - field = "$3"; - } - String blockNumberField = field; - return bigInteger -> { - boolean result = true; - for ( RexNode filter : filters ) { - Pair intermediateResult = match( bigInteger, dataContext, filter, blockNumberField ); - if ( !intermediateResult.left ) { - continue; - } - result &= intermediateResult.right; - if ( !result ) { - break; - } - } - return result; - }; - } - - - private static Pair match( BigInteger bigInteger, DataContext dataContext, RexNode filter, String blockNumberField ) { - boolean result = true; - boolean exists = false; - - if ( filter.isA( REX_COMPARATORS ) ) { - final RexCall call = (RexCall) filter; - RexNode left = call.getOperands().get( 0 ); - if ( left.isA( Kind.CAST ) ) { - left = ((RexCall) left).operands.get( 0 ); - } - final RexNode right = call.getOperands().get( 1 ); - if ( left instanceof RexInputRef && right instanceof RexDynamicParam ) { - if ( !((RexInputRef) left).getName().equals( blockNumberField ) ) // $0 is the in - { - return new Pair<>( false, false ); - } - exists = true; - BigInteger value = new BigInteger( String.valueOf( dataContext.getParameterValue( ((RexDynamicParam) right).getIndex() ) ) ); - if ( filter.isA( Kind.EQUALS ) ) { - result = bigInteger.compareTo( value ) == 0; - } else if ( filter.isA( Kind.LESS_THAN ) ) { - result = bigInteger.compareTo( value ) == -1; - } else if ( filter.isA( Kind.LESS_THAN_OR_EQUAL ) ) { - result = bigInteger.compareTo( value ) <= 0; - } else if ( filter.isA( Kind.GREATER_THAN ) ) { - result = bigInteger.compareTo( value ) == 1; - } else if ( filter.isA( Kind.GREATER_THAN_OR_EQUAL ) ) { - result = bigInteger.compareTo( value ) >= 0; - } - } - } else if ( filter.isA( Kind.AND ) ) { - for ( RexNode and : ((RexCall) filter).getOperands() ) { - Pair x = match( bigInteger, dataContext, and, blockNumberField ); - exists |= x.left; - if ( x.left ) { - result &= x.right; - } - } - } else if ( filter.isA( Kind.OR ) ) { - result = false; - for ( RexNode and : ((RexCall) filter).getOperands() ) { - Pair x = match( bigInteger, dataContext, and, blockNumberField ); - exists |= x.left; - if ( x.left ) { - result |= x.right; - } - } - } else if ( filter.isA( Kind.NOT ) ) { - Pair x = match( bigInteger, dataContext, ((RexCall) filter).getOperands().get( 0 ), blockNumberField ); - if ( x.left ) { - exists = true; - result = !x.right; - } - } - return new Pair<>( exists, result ); - } - -} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java deleted file mode 100644 index 676d7ed723..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import org.polypheny.db.adapter.ethereum.EthereumPlugin.EthereumDataSource; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.Table; -import org.polypheny.db.schema.impl.AbstractSchema; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; -import org.polypheny.db.util.Util; - -public class EthereumSchema extends AbstractSchema { - - private final String clientUrl; - private Map tableMap = new HashMap<>(); - - - public EthereumSchema( String clientUrl ) { - this.clientUrl = clientUrl; - } - - - public Table createBlockchainTable( CatalogTable catalogTable, List columnPlacementsOnStore, EthereumDataSource ethereumDataSource ) { - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - List fieldTypes = new LinkedList<>(); - List fieldIds = new ArrayList<>( columnPlacementsOnStore.size() ); - for ( CatalogColumnPlacement placement : columnPlacementsOnStore ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); - AlgDataType sqlType = sqlType( typeFactory, catalogColumn.type, catalogColumn.length, catalogColumn.scale, null ); - fieldInfo.add( catalogColumn.name, placement.physicalColumnName, sqlType ).nullable( catalogColumn.nullable ); - fieldTypes.add( EthereumFieldType.getBlockchainFieldType( catalogColumn.type ) ); - fieldIds.add( (int) placement.physicalPosition ); - } - - int[] fields = fieldIds.stream().mapToInt( i -> i ).toArray(); - EthereumMapper mapper = catalogTable.name.equals( "block" ) ? EthereumMapper.BLOCK : EthereumMapper.TRANSACTION; - EthereumTable table = new EthereumTable( clientUrl, AlgDataTypeImpl.proto( fieldInfo.build() ), fieldTypes, fields, mapper, ethereumDataSource, catalogTable.id ); - tableMap.put( catalogTable.name, table ); - return table; - } - - - @Override - public Map getTableMap() { - return new HashMap<>( tableMap ); - } - - - private AlgDataType sqlType( AlgDataTypeFactory typeFactory, PolyType dataTypeName, Integer length, Integer scale, String typeString ) { - // Fall back to ANY if type is unknown - final PolyType polyType = Util.first( dataTypeName, PolyType.ANY ); - switch ( polyType ) { - case ARRAY: - AlgDataType component = null; - if ( typeString != null && typeString.endsWith( " ARRAY" ) ) { - // E.g. hsqldb gives "INTEGER ARRAY", so we deduce the component type "INTEGER". - final String remaining = typeString.substring( 0, typeString.length() - " ARRAY".length() ); - component = parseTypeString( typeFactory, remaining ); - } - if ( component == null ) { - component = typeFactory.createTypeWithNullability( typeFactory.createPolyType( PolyType.ANY ), true ); - } - return typeFactory.createArrayType( component, -1 ); - } - if ( scale != null && length != null && length >= 0 && scale >= 0 && polyType.allowsPrecScale( true, true ) ) { - return typeFactory.createPolyType( polyType, length, scale ); - } else if ( length != null && length >= 0 && polyType.allowsPrecNoScale() ) { - return typeFactory.createPolyType( polyType, length ); - } else { - assert polyType.allowsNoPrecNoScale(); - return typeFactory.createPolyType( polyType ); - } - } - - - /** - * Given "INTEGER", returns BasicSqlType(INTEGER). - * Given "VARCHAR(10)", returns BasicSqlType(VARCHAR, 10). - * Given "NUMERIC(10, 2)", returns BasicSqlType(NUMERIC, 10, 2). - */ - private AlgDataType parseTypeString( AlgDataTypeFactory typeFactory, String typeString ) { - int precision = -1; - int scale = -1; - int open = typeString.indexOf( "(" ); - if ( open >= 0 ) { - int close = typeString.indexOf( ")", open ); - if ( close >= 0 ) { - String rest = typeString.substring( open + 1, close ); - typeString = typeString.substring( 0, open ); - int comma = rest.indexOf( "," ); - if ( comma >= 0 ) { - precision = Integer.parseInt( rest.substring( 0, comma ) ); - scale = Integer.parseInt( rest.substring( comma ) ); - } else { - precision = Integer.parseInt( rest ); - } - } - } - try { - final PolyType typeName = PolyType.valueOf( typeString ); - return typeName.allowsPrecScale( true, true ) - ? typeFactory.createPolyType( typeName, precision, scale ) - : typeName.allowsPrecScale( true, false ) - ? typeFactory.createPolyType( typeName, precision ) - : typeFactory.createPolyType( typeName ); - } catch ( IllegalArgumentException e ) { - return typeFactory.createTypeWithNullability( typeFactory.createPolyType( PolyType.ANY ), true ); - } - } - -} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java deleted file mode 100644 index 6c095c2e93..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Predicate; -import org.apache.calcite.linq4j.AbstractEnumerable; -import org.apache.calcite.linq4j.Enumerable; -import org.apache.calcite.linq4j.Enumerator; -import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.ethereum.EthereumPlugin.EthereumDataSource; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.FilterableTable; -import org.polypheny.db.schema.impl.AbstractTable; -import org.polypheny.db.util.Pair; - -public class EthereumTable extends AbstractTable implements FilterableTable { - - protected final String clientUrl; - protected final AlgProtoDataType protoRowType; - protected final int[] fields; - protected final EthereumDataSource ethereumDataSource; - protected final EthereumMapper mapper; - protected List fieldTypes; - - - public EthereumTable( - String clientUrl, - AlgProtoDataType protoRowType, - List fieldTypes, - int[] fields, - EthereumMapper mapper, - EthereumDataSource ethereumDataSource, - Long tableId ) { - this.clientUrl = clientUrl; - this.protoRowType = protoRowType; - this.fieldTypes = fieldTypes; - this.fields = fields; - this.ethereumDataSource = ethereumDataSource; - this.mapper = mapper; - this.tableId = tableId; - } - - - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - final List types = new ArrayList<>(); - final List names = new ArrayList<>(); - for ( AlgDataTypeField field : this.protoRowType.apply( typeFactory ).getFieldList() ) { - types.add( field.getType() ); - names.add( field.getName() ); - } - return typeFactory.createStructType( Pair.zip( names, types ) ); - } - - - public String toString() { - return "BlockchainTable"; - } - - - @Override - public Enumerable scan( DataContext dataContext, List filters ) { - dataContext.getStatement().getTransaction().registerInvolvedAdapter( ethereumDataSource ); - Predicate blockNumberPredicate = EthereumPredicateFactory.ALWAYS_TRUE; - if ( ethereumDataSource.isExperimentalFiltering() ) { - if ( !filters.isEmpty() ) { - blockNumberPredicate = EthereumPredicateFactory.makePredicate( dataContext, filters, mapper ); - } - } - final AtomicBoolean cancelFlag = DataContext.Variable.CANCEL_FLAG.get( dataContext ); - final Predicate finalBlockNumberPredicate = blockNumberPredicate; - - if ( fields.length == 1 ) { - return new AbstractEnumerable() { - @Override - public Enumerator enumerator() { - return new EthereumEnumerator<>( - clientUrl, - ethereumDataSource.getBlocks(), - cancelFlag, - true, - null, - mapper, - finalBlockNumberPredicate, - (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ) ); - } - }; - } - return new AbstractEnumerable() { - @Override - public Enumerator enumerator() { - return new EthereumEnumerator<>( - clientUrl, - ethereumDataSource.getBlocks(), - cancelFlag, - true, - null, - mapper, - finalBlockNumberPredicate, - (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ) ); - } - }; - } - -} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/TransactionReader.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/TransactionReader.java deleted file mode 100644 index 8d1887bc8c..0000000000 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/TransactionReader.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.ethereum; - -import java.io.IOException; -import java.math.BigInteger; -import java.util.List; -import java.util.function.Predicate; -import org.web3j.protocol.core.DefaultBlockParameter; -import org.web3j.protocol.core.methods.response.EthBlock; - -public class TransactionReader extends BlockReader { - - private List transactionsList; - private int transactionIndex; - - - TransactionReader( String clientUrl, int blocks, Predicate blockNumberPrecate ) { - super( clientUrl, blocks, blockNumberPrecate ); - transactionIndex = -1; - } - - - @Override - public String[] readNext() throws IOException { - if ( this.blockReads <= 0 ) { - return null; - } - - while ( this.currentBlock.compareTo( BigInteger.ZERO ) == 1 && (transactionsList == null || transactionsList.size() == 0) ) { - if ( blockNumberPredicate.test( this.currentBlock ) ) { - transactionsList = web3j - .ethGetBlockByNumber( DefaultBlockParameter.valueOf( currentBlock ), true ) - .send() - .getBlock() - .getTransactions(); - transactionIndex = 0; - blockReads--; - } - this.currentBlock = this.currentBlock.subtract( BigInteger.ONE ); - } - - String[] res = EthereumMapper.TRANSACTION.map( transactionsList.get( transactionIndex++ ).get() ); - - if ( transactionIndex >= transactionsList.size() ) { - transactionsList = null; - } - - return res; - } - -} diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java index 2d1773d1a6..68a53af98b 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java @@ -53,14 +53,14 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.information.InformationGraph; import org.polypheny.db.information.InformationGraph.GraphData; import org.polypheny.db.information.InformationGraph.GraphType; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; @@ -183,7 +183,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentSchema.createFileTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @@ -195,7 +195,7 @@ public Namespace getCurrentSchema() { @Override - public void createTable( Context context, CatalogTable catalogTable, List partitionIds ) { + public void createTable( Context context, LogicalTable catalogTable, List partitionIds ) { context.getStatement().getTransaction().registerInvolvedAdapter( this ); for ( long partitionId : partitionIds ) { @@ -225,7 +225,7 @@ public void createTable( Context context, CatalogTable catalogTable, List @Override - public void dropTable( Context context, CatalogTable catalogTable, List partitionIds ) { + public void dropTable( Context context, LogicalTable catalogTable, List partitionIds ) { context.getStatement().getTransaction().registerInvolvedAdapter( this ); // TODO check if it is on this store? @@ -244,7 +244,7 @@ public void dropTable( Context context, CatalogTable catalogTable, List pa @Override - public void addColumn( Context context, CatalogTable catalogTable, CatalogColumn catalogColumn ) { + public void addColumn( Context context, LogicalTable catalogTable, CatalogColumn catalogColumn ) { context.getStatement().getTransaction().registerInvolvedAdapter( this ); CatalogColumnPlacement ccp = null; @@ -451,7 +451,7 @@ private void cleanupHardlinks( final PolyXid xid ) { @Override - public void truncate( Context context, CatalogTable table ) { + public void truncate( Context context, LogicalTable table ) { //context.getStatement().getTransaction().registerInvolvedStore( this ); for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( getAdapterId(), table.id ) ) { FileTranslatableEntity fileTable = (FileTranslatableEntity) currentSchema.getEntity( table.name + "_" + partitionPlacement.partitionId ); @@ -487,7 +487,7 @@ public AvailableIndexMethod getDefaultIndexMethod() { @Override - public List getFunctionalIndexes( CatalogTable catalogTable ) { + public List getFunctionalIndexes( LogicalTable catalogTable ) { // TODO: Check if this is correct and ind better approach List pkIds = Catalog.getInstance().getPrimaryKey( catalogTable.primaryKey ).columnIds; return ImmutableList.of( new FunctionalIndexInfo( pkIds, "PRIMARY (unique)" ) ); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java index 1a1c0660cc..e89e300667 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java @@ -41,7 +41,7 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.SchemaPlus; @@ -90,7 +90,7 @@ protected Map getTableMap() { public Entity createFileTable( - CatalogTable catalogTable, + LogicalTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java index 5b2e4231da..1ea7dd33e0 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java @@ -29,9 +29,9 @@ import org.polypheny.db.adapter.file.algebra.FileScan; import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; @@ -116,7 +116,7 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Override - public Modify toModificationAlg( + public RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, CatalogReader catalogReader, @@ -126,7 +126,7 @@ public Modify toModificationAlg( List sourceExpressionList, boolean flattened ) { fileSchema.getConvention().register( cluster.getPlanner() ); - return new LogicalModify( + return new LogicalRelModify( cluster, cluster.traitSetOf( Convention.NONE ), table, diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java index 4fcff08d4a..0a48d8f515 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java @@ -32,7 +32,7 @@ import org.polypheny.db.algebra.convert.ConverterRule; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.Filter; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; @@ -74,12 +74,12 @@ static class FileTableModificationRule extends ConverterRule { public FileTableModificationRule( FileConvention out, AlgBuilderFactory algBuilderFactory ) { - super( Modify.class, FileTableModificationRule::supports, Convention.NONE, out, algBuilderFactory, "FileTableModificationRule:" + out.getName() ); + super( RelModify.class, FileTableModificationRule::supports, Convention.NONE, out, algBuilderFactory, "FileTableModificationRule:" + out.getName() ); this.convention = out; } - private static boolean supports( Modify node ) { + private static boolean supports( RelModify node ) { if ( node.getSourceExpressionList() != null ) { return !UnsupportedRexCallVisitor.containsModelItem( node.getSourceExpressionList() ); } @@ -89,7 +89,7 @@ private static boolean supports( Modify node ) { @Override public boolean matches( AlgOptRuleCall call ) { - final Modify modify = call.alg( 0 ); + final RelModify modify = call.alg( 0 ); if ( modify.getEntity().unwrap( FileTranslatableEntity.class ) == null ) { // todo insert from select is not correctly implemented return false; @@ -107,7 +107,7 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { - final Modify modify = (Modify) alg; + final RelModify modify = (RelModify) alg; final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java index b9ed11aa05..dc13f25d78 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java @@ -22,7 +22,7 @@ import org.polypheny.db.adapter.file.FileAlg.FileImplementor.Operation; import org.polypheny.db.adapter.file.FileTranslatableEntity; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; @@ -33,7 +33,7 @@ import org.polypheny.db.schema.ModelTrait; -public class FileScan extends Scan implements FileAlg { +public class FileScan extends RelScan implements FileAlg { private final FileTranslatableEntity fileTable; diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java index fd0771c547..d422474928 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java @@ -24,7 +24,7 @@ import org.polypheny.db.adapter.file.Value; import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; @@ -39,7 +39,7 @@ import org.polypheny.db.type.PolyType; -public class FileTableModify extends Modify implements FileAlg { +public class FileTableModify extends RelModify implements FileAlg { public FileTableModify( AlgOptCluster cluster, AlgTraitSet traits, AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traits, table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java index b181364487..fbaf787a4e 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java @@ -36,15 +36,13 @@ import org.polypheny.db.adapter.Adapter.AdapterSettingString; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationTable; import org.polypheny.db.information.InformationText; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; @@ -90,7 +88,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentSchema.createFileTable( combinedTable, columnPlacementsOnStore, partitionPlacement ); } @@ -102,7 +100,7 @@ public Namespace getCurrentSchema() { @Override - public void truncate( Context context, CatalogTable table ) { + public void truncate( Context context, LogicalTable table ) { throw new RuntimeException( "QFS does not support truncate" ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java index 84fe4b5a68..374af574d2 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java @@ -45,7 +45,7 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.SchemaPlus; @@ -93,7 +93,7 @@ protected Map getTableMap() { } - public Entity createFileTable( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public Entity createFileTable( LogicalTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); ArrayList columnIds = new ArrayList<>(); diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java index 067c6fc6df..8da1f05317 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java @@ -36,7 +36,7 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; @@ -48,7 +48,7 @@ /** * Relational expression representing a scan of a Geode collection. */ -public class GeodeScan extends Scan implements GeodeAlg { +public class GeodeScan extends RelScan implements GeodeAlg { final GeodeEntity geodeTable; final AlgDataType projectRowType; diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 7a10b37cf7..519be5e95c 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -39,11 +39,11 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.HsqldbSqlDialect; import org.polypheny.db.transaction.PUID; @@ -105,7 +105,7 @@ protected ConnectionFactory deployEmbedded() { @Override - public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @@ -184,7 +184,7 @@ public AvailableIndexMethod getDefaultIndexMethod() { @Override - public List getFunctionalIndexes( CatalogTable catalogTable ) { + public List getFunctionalIndexes( LogicalTable catalogTable ) { return ImmutableList.of(); } diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java index 6adba99f63..0ce3918474 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlScan.java @@ -45,7 +45,7 @@ import org.polypheny.db.adapter.enumerable.PhysTypeImpl; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -61,7 +61,7 @@ * * Trivially modified from CsvScan. */ -class HtmlScan extends Scan implements EnumerableAlg { +class HtmlScan extends RelScan implements EnumerableAlg { private final HtmlEntity webTable; private final int[] fields; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 20b1f738db..48e0fb4341 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -48,9 +48,9 @@ import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -235,7 +235,7 @@ public Enumerable scan( DataContext root ) { @Override - public Modify toModificationAlg( + public RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, CatalogReader catalogReader, @@ -245,7 +245,7 @@ public Modify toModificationAlg( List sourceExpressionList, boolean flattened ) { jdbcSchema.getConvention().register( cluster.getPlanner() ); - return new LogicalModify( + return new LogicalRelModify( cluster, cluster.traitSetOf( Convention.NONE ), table, diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java index 7b7658d69d..0910629d95 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java @@ -40,7 +40,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlIdentifier; @@ -83,7 +83,7 @@ public SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement placement ) @Override public SqlIdentifier getPhysicalColumnName( CatalogPartitionPlacement placement, String columnName ) { - CatalogTable catalogTable = Catalog.getInstance().getTable( placement.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getTable( placement.tableId ); JdbcEntity table = schema.getTableMap().get( catalogTable.name + "_" + placement.partitionId ); if ( table.hasPhysicalColumnName( columnName ) ) { return table.physicalColumnName( columnName ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java index f2b6de603e..4082fb6a71 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java @@ -59,7 +59,7 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Minus; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; @@ -1000,13 +1000,13 @@ public static class JdbcTableModificationRule extends JdbcConverterRule { * Creates a JdbcTableModificationRule. */ private JdbcTableModificationRule( JdbcConvention out, AlgBuilderFactory algBuilderFactory ) { - super( Modify.class, (Predicate) r -> true, Convention.NONE, out, algBuilderFactory, "JdbcTableModificationRule." + out ); + super( RelModify.class, (Predicate) r -> true, Convention.NONE, out, algBuilderFactory, "JdbcTableModificationRule." + out ); } @Override public boolean matches( AlgOptRuleCall call ) { - final Modify modify = call.alg( 0 ); + final RelModify modify = call.alg( 0 ); if ( modify.getEntity().unwrap( JdbcEntity.class ) != null ) { JdbcEntity table = modify.getEntity().unwrap( JdbcEntity.class ); if ( out.getJdbcSchema() == table.getSchema() ) { @@ -1019,7 +1019,7 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { - final Modify modify = (Modify) alg; + final RelModify modify = (RelModify) alg; final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; @@ -1043,7 +1043,7 @@ public AlgNode convert( AlgNode alg ) { /** * Table-modification operator implemented in JDBC convention. */ - public static class JdbcTableModify extends Modify implements JdbcAlg { + public static class JdbcTableModify extends RelModify implements JdbcAlg { private final Expression expression; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java index 0b77bbb6b1..4a0b0f78fd 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java @@ -37,7 +37,7 @@ import java.util.List; import org.polypheny.db.adapter.jdbc.rel2sql.SqlImplementor.Result; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; @@ -50,7 +50,7 @@ /** * Relational expression representing a scan of a table in a JDBC data source. */ -public class JdbcScan extends Scan implements JdbcAlg { +public class JdbcScan extends RelScan implements JdbcAlg { protected final JdbcEntity jdbcTable; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index 1662df9252..6b649bcdc4 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -62,7 +62,7 @@ import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; @@ -143,7 +143,7 @@ public JdbcSchema( public JdbcEntity createJdbcTable( - CatalogTable catalogTable, + LogicalTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { // Temporary type factory, just for the duration of this method. Allowable because we're creating a proto-type, diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java index f80828d045..c09740c65e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java @@ -60,17 +60,17 @@ import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Match; import org.polypheny.db.algebra.core.Minus; -import org.polypheny.db.algebra.core.Modify; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.Scan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; @@ -273,9 +273,9 @@ public Result visit( Aggregate e ) { /** * @see #dispatch */ - public Result visit( Scan e ) { + public Result visit( RelScan e ) { return result( - new SqlIdentifier( List.of( e.getEntity().unwrap( CatalogTable.class ).getNamespaceName(), e.getEntity().getCatalogEntity().name ), ParserPos.ZERO ), + new SqlIdentifier( List.of( e.getEntity().unwrap( LogicalTable.class ).getNamespaceName(), e.getEntity().getCatalogEntity().name ), ParserPos.ZERO ), ImmutableList.of( Clause.FROM ), e, null ); @@ -350,7 +350,7 @@ public Result visit( Values e ) { final Map pairs = ImmutableMap.of(); final Context context = aliasContext( pairs, false ); SqlNode query; - final boolean rename = stack.size() <= 1 || !(Iterables.get( stack, 1 ).r instanceof Modify); + final boolean rename = stack.size() <= 1 || !(Iterables.get( stack, 1 ).r instanceof RelModify); final List fieldNames = e.getRowType().getFieldNames(); if ( !dialect.supportsAliasedValues() && rename ) { // Oracle does not support "AS t (c1, c2)". So instead of @@ -448,7 +448,7 @@ public Result visit( Sort e ) { /** * @see #dispatch */ - public Result visit( Modify modify ) { + public Result visit( RelModify modify ) { final Map pairs = ImmutableMap.of(); final Context context = aliasContext( pairs, false ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 58783239e7..54b3217daa 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -37,7 +37,7 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.SchemaPlus; @@ -119,7 +119,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public void truncate( Context context, CatalogTable catalogTable ) { + public void truncate( Context context, LogicalTable catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index a2c98987fd..7d08eeb869 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -34,7 +34,7 @@ import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.languages.ParserPos; @@ -123,7 +123,7 @@ public void createUdfs() { @Override - public void createTable( Context context, CatalogTable catalogTable, List partitionIds ) { + public void createTable( Context context, LogicalTable catalogTable, List partitionIds ) { List qualifiedNames = new LinkedList<>(); qualifiedNames.add( catalogTable.getNamespaceName() ); qualifiedNames.add( catalogTable.name ); @@ -161,7 +161,7 @@ public void createTable( Context context, CatalogTable catalogTable, List } - protected StringBuilder buildCreateTableQuery( String schemaName, String physicalTableName, CatalogTable catalogTable ) { + protected StringBuilder buildCreateTableQuery( String schemaName, String physicalTableName, LogicalTable catalogTable ) { StringBuilder builder = new StringBuilder(); builder.append( "CREATE TABLE " ) .append( dialect.quoteIdentifier( schemaName ) ) @@ -185,7 +185,7 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica @Override - public void addColumn( Context context, CatalogTable catalogTable, CatalogColumn catalogColumn ) { + public void addColumn( Context context, LogicalTable catalogTable, CatalogColumn catalogColumn ) { String physicalColumnName = getPhysicalColumnName( catalogColumn.id ); for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { String physicalTableName = partitionPlacement.physicalTableName; @@ -208,7 +208,7 @@ public void addColumn( Context context, CatalogTable catalogTable, CatalogColumn } - protected StringBuilder buildAddColumnQuery( String physicalSchemaName, String physicalTableName, String physicalColumnName, CatalogTable catalogTable, CatalogColumn catalogColumn ) { + protected StringBuilder buildAddColumnQuery( String physicalSchemaName, String physicalTableName, String physicalColumnName, LogicalTable catalogTable, CatalogColumn catalogColumn ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( physicalSchemaName ) ) @@ -308,7 +308,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public void dropTable( Context context, CatalogTable catalogTable, List partitionIds ) { + public void dropTable( Context context, LogicalTable catalogTable, List partitionIds ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows dropping linked tables. @@ -353,7 +353,7 @@ public void dropColumn( Context context, CatalogColumnPlacement columnPlacement @Override - public void truncate( Context context, CatalogTable catalogTable ) { + public void truncate( Context context, LogicalTable catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java index d24d61616e..4aed2ec0e8 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java @@ -36,7 +36,7 @@ import org.polypheny.db.algebra.constant.ExplainFormat; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.convert.ConverterRule; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.rules.FilterMergeRule; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.languages.Parser; @@ -179,7 +179,7 @@ public AlgNode convert( AlgNode alg ) { /** * Relational expression representing a "mock" scan of a table in a JDBC data source. */ - private static class MockJdbcScan extends Scan implements JdbcAlg { + private static class MockJdbcScan extends RelScan implements JdbcAlg { MockJdbcScan( AlgOptCluster cluster, AlgOptEntity table, JdbcConvention jdbcConvention ) { super( cluster, cluster.traitSetOf( jdbcConvention ), table ); diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 7be7941aeb..50d2670e9e 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -72,7 +72,6 @@ import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -85,10 +84,11 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.GraphAlreadyExistsException; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; @@ -154,8 +154,8 @@ public class CatalogImpl extends Catalog { private static BTreeMap schemaNames; private static HTreeMap> schemaChildren; - private static BTreeMap tables; - private static BTreeMap tableNames; + private static BTreeMap tables; + private static BTreeMap tableNames; private static HTreeMap> tableChildren; private static BTreeMap collections; @@ -190,9 +190,9 @@ public class CatalogImpl extends Catalog { // Container Object that contains all other placements private static BTreeMap dataPlacements; // (AdapterId, TableId) -> CatalogDataPlacement - private static BTreeMap graphs; - private static BTreeMap graphAliases; - private static BTreeMap graphNames; + private static BTreeMap graphs; + private static BTreeMap graphAliases; + private static BTreeMap graphNames; private static BTreeMap graphPlacements; private static BTreeMap graphMappings; @@ -403,7 +403,7 @@ public void restoreColumnPlacements( Transaction transaction ) { for ( CatalogColumn c : columns.values() ) { List placements = getColumnPlacement( c.id ); - CatalogTable catalogTable = getTable( c.tableId ); + LogicalTable catalogTable = getTable( c.tableId ); // No column placements need to be restored if it is a view if ( catalogTable.entityType != EntityType.VIEW ) { @@ -434,7 +434,7 @@ public void restoreColumnPlacements( Transaction transaction ) { Map persistent = placements.stream().collect( Collectors.toMap( p -> p.adapterId, p -> manager.getStore( p.adapterId ).isPersistent() ) ); if ( !persistent.containsValue( true ) ) { // no persistent placement for this column - CatalogTable table = getTable( c.tableId ); + LogicalTable table = getTable( c.tableId ); for ( CatalogColumnPlacement p : placements ) { DataStore store = manager.getStore( p.adapterId ); @@ -472,7 +472,7 @@ public void restoreColumnPlacements( Transaction transaction ) { public void restoreViews( Transaction transaction ) { Statement statement = transaction.createStatement(); - for ( CatalogTable c : tables.values() ) { + for ( LogicalTable c : tables.values() ) { if ( c.entityType == EntityType.VIEW || c.entityType == EntityType.MATERIALIZED_VIEW ) { String query; QueryLanguage language; @@ -835,19 +835,19 @@ public void restoreInterfacesIfNecessary() { */ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaException, UnknownTableException, GenericCatalogException, UnknownColumnException, UnknownDatabaseException { CatalogSchema schema = getSchema( "APP", "public" ); - CatalogTable depts = getTable( schema.id, "depts" ); + LogicalTable depts = getTable( schema.id, "depts" ); addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - CatalogTable emps = getTable( schema.id, "emps" ); + LogicalTable emps = getTable( schema.id, "emps" ); addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); - CatalogTable emp = getTable( schema.id, "emp" ); + LogicalTable emp = getTable( schema.id, "emp" ); addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); @@ -859,7 +859,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaExce addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); - CatalogTable work = getTable( schema.id, "work" ); + LogicalTable work = getTable( schema.id, "work" ); addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); @@ -896,7 +896,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaExce } - private void addDefaultCsvColumn( CatalogAdapter csv, CatalogTable table, String name, PolyType type, Collation collation, int position, Integer length ) { + private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { if ( !checkIfExistsColumn( table.id, name ) ) { long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); String filename = table.name + ".csv"; @@ -913,7 +913,7 @@ private void addDefaultCsvColumn( CatalogAdapter csv, CatalogTable table, String } - private void addDefaultColumn( CatalogAdapter adapter, CatalogTable table, String name, PolyType type, Collation collation, int position, Integer length ) { + private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { if ( !checkIfExistsColumn( table.id, name ) ) { long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); addColumnPlacement( adapter.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name ); @@ -1216,7 +1216,7 @@ public long addGraph( long databaseId, String name, List stores, bool long id = addNamespace( name, databaseId, Catalog.defaultUserId, NamespaceType.GRAPH ); - CatalogGraphDatabase graph = new CatalogGraphDatabase( databaseId, id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); + LogicalGraph graph = new LogicalGraph( databaseId, id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); synchronized ( this ) { graphs.put( id, graph ); @@ -1233,7 +1233,7 @@ public long addGraph( long databaseId, String name, List stores, bool */ @Override public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { - CatalogGraphDatabase graph = Objects.requireNonNull( getGraph( graphId ) ); + LogicalGraph graph = Objects.requireNonNull( getGraph( graphId ) ); if ( graphAliases.containsKey( alias ) ) { if ( !ifNotExists ) { @@ -1634,7 +1634,7 @@ public void deleteGraph( long id ) { throw new UnknownGraphException( id ); } - CatalogGraphDatabase old = Objects.requireNonNull( graphs.get( id ) ); + LogicalGraph old = Objects.requireNonNull( graphs.get( id ) ); removeGraphLogistics( id ); @@ -1652,7 +1652,7 @@ public void deleteGraph( long id ) { * {@inheritDoc} */ @Override - public CatalogGraphDatabase getGraph( long id ) { + public LogicalGraph getGraph( long id ) { if ( !graphs.containsKey( id ) ) { throw new UnknownGraphException( id ); } @@ -1664,7 +1664,7 @@ public CatalogGraphDatabase getGraph( long id ) { * {@inheritDoc} */ @Override - public List getGraphs( long databaseId, Pattern graphName ) { + public List getGraphs( long databaseId, Pattern graphName ) { if ( graphName != null ) { return ImmutableList.copyOf( Stream.concat( @@ -1704,7 +1704,7 @@ public void deleteSchema( long schemaId ) { * {@inheritDoc} */ @Override - public List getTables( long schemaId, Pattern tableNamePattern ) { + public List getTables( long schemaId, Pattern tableNamePattern ) { if ( schemas.containsKey( schemaId ) ) { CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); @@ -1722,7 +1722,7 @@ public List getTables( long schemaId, Pattern tableNamePattern ) { * {@inheritDoc} */ @Override - public List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ) { if ( schemaNamePattern != null && tableNamePattern != null ) { CatalogSchema schema = schemaNames.get( new Object[]{ databaseId, schemaNamePattern.pattern } ); if ( schema != null ) { @@ -1745,11 +1745,11 @@ public List getTables( long databaseId, Pattern schemaNamePattern, * {@inheritDoc} */ @Override - public List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { List catalogSchemas = getSchemas( databaseNamePattern, schemaNamePattern ); if ( catalogSchemas.size() > 0 ) { - Stream catalogTables = catalogSchemas.stream() + Stream catalogTables = catalogSchemas.stream() .filter( t -> schemaChildren.containsKey( t.id ) ) .flatMap( t -> Objects.requireNonNull( schemaChildren.get( t.id ) ).stream() ) .map( tables::get ); @@ -1774,7 +1774,7 @@ public List getTables( Pattern databaseNamePattern, Pattern schema * {@inheritDoc} */ @Override - public CatalogTable getTable( long tableId ) { + public LogicalTable getTable( long tableId ) { try { return Objects.requireNonNull( tables.get( tableId ) ); } catch ( NullPointerException e ) { @@ -1787,7 +1787,7 @@ public CatalogTable getTable( long tableId ) { * {@inheritDoc} */ @Override - public CatalogTable getTable( long schemaId, String tableName ) throws UnknownTableException { + public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { try { CatalogSchema schema = getSchema( schemaId ); if ( !schema.caseSensitive ) { @@ -1804,7 +1804,7 @@ public CatalogTable getTable( long schemaId, String tableName ) throws UnknownTa * {@inheritDoc} */ @Override - public CatalogTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException { + public LogicalTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException { try { CatalogSchema schema = Objects.requireNonNull( schemaNames.get( new Object[]{ databaseId, schemaName } ) ); if ( !schema.caseSensitive ) { @@ -1822,7 +1822,7 @@ public CatalogTable getTable( long databaseId, String schemaName, String tableNa * {@inheritDoc} */ @Override - public CatalogTable getTableFromPartition( long partitionId ) { + public LogicalTable getTableFromPartition( long partitionId ) { return getTable( getPartition( partitionId ).tableId ); } @@ -1831,7 +1831,7 @@ public CatalogTable getTableFromPartition( long partitionId ) { * {@inheritDoc} */ @Override - public CatalogTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException { + public LogicalTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException { try { long databaseId = getDatabase( databaseName ).id; CatalogSchema schema = getSchema( databaseId, schemaName ); @@ -1872,7 +1872,7 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent .reliesOnPeriodicChecks( false ) .build(); - CatalogTable table = new CatalogTable( + LogicalTable table = new LogicalTable( id, name, ImmutableList.of(), @@ -2021,7 +2021,7 @@ public long addMaterializedView( String name, long namespaceId, int ownerId, Ent /** * Update all information after the addition of all kind of tables */ - private void updateEntityLogistics( String name, long namespaceId, long id, CatalogSchema schema, CatalogTable entity ) { + private void updateEntityLogistics( String name, long namespaceId, long id, CatalogSchema schema, LogicalTable entity ) { synchronized ( this ) { tables.put( id, entity ); tableChildren.put( id, ImmutableList.builder().build() ); @@ -2040,11 +2040,11 @@ private void updateEntityLogistics( String name, long namespaceId, long id, Cata */ public void addConnectedViews( Map> underlyingTables, long viewId ) { for ( long id : underlyingTables.keySet() ) { - CatalogTable old = getTable( id ); + LogicalTable old = getTable( id ); List connectedViews; connectedViews = new ArrayList<>( old.connectedViews ); connectedViews.add( viewId ); - CatalogTable table = old.getConnectedViews( ImmutableList.copyOf( connectedViews ) ); + LogicalTable table = old.getConnectedViews( ImmutableList.copyOf( connectedViews ) ); synchronized ( this ) { tables.replace( id, table ); assert table != null; @@ -2061,10 +2061,10 @@ public void addConnectedViews( Map> underlyingTables, long view @Override public void deleteViewDependencies( CatalogView catalogView ) { for ( long id : catalogView.getUnderlyingTables().keySet() ) { - CatalogTable old = getTable( id ); + LogicalTable old = getTable( id ); List connectedViews = old.connectedViews.stream().filter( e -> e != catalogView.id ).collect( Collectors.toList() ); - CatalogTable table = old.getConnectedViews( ImmutableList.copyOf( connectedViews ) ); + LogicalTable table = old.getConnectedViews( ImmutableList.copyOf( connectedViews ) ); synchronized ( this ) { tables.replace( id, table ); @@ -2103,12 +2103,12 @@ public boolean checkIfExistsEntity( long tableId ) { */ @Override public void renameTable( long tableId, String name ) { - CatalogTable old = getTable( tableId ); + LogicalTable old = getTable( tableId ); if ( !getSchema( old.namespaceId ).caseSensitive ) { name = name.toLowerCase(); } - CatalogTable table = old.getRenamed( name ); + LogicalTable table = old.getRenamed( name ); synchronized ( this ) { tables.replace( tableId, table ); tableNames.remove( new Object[]{ table.databaseId, table.namespaceId, old.name } ); @@ -2123,7 +2123,7 @@ public void renameTable( long tableId, String name ) { */ @Override public void deleteTable( long tableId ) { - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); List children = new ArrayList<>( Objects.requireNonNull( schemaChildren.get( table.namespaceId ) ) ); children.remove( tableId ); synchronized ( this ) { @@ -2165,8 +2165,8 @@ public void deleteTable( long tableId ) { */ @Override public void setTableOwner( long tableId, int ownerId ) { - CatalogTable old = getTable( tableId ); - CatalogTable table; + LogicalTable old = getTable( tableId ); + LogicalTable table; if ( old instanceof CatalogMaterializedView ) { CatalogMaterializedView oldView = (CatalogMaterializedView) old; @@ -2190,7 +2190,7 @@ public void setTableOwner( long tableId, int ownerId ) { oldView.getMaterializedCriteria(), oldView.isOrdered() ); } else { - table = new CatalogTable( + table = new LogicalTable( old.id, old.name, old.fieldIds, @@ -2218,9 +2218,9 @@ public void setTableOwner( long tableId, int ownerId ) { */ @Override public void setPrimaryKey( long tableId, Long keyId ) { - CatalogTable old = getTable( tableId ); + LogicalTable old = getTable( tableId ); - CatalogTable table; + LogicalTable table; if ( old instanceof CatalogMaterializedView ) { CatalogMaterializedView oldView = (CatalogMaterializedView) old; @@ -2244,7 +2244,7 @@ public void setPrimaryKey( long tableId, Long keyId ) { oldView.getMaterializedCriteria(), oldView.isOrdered() ); } else { - table = new CatalogTable( + table = new LogicalTable( old.id, old.name, old.fieldIds, @@ -2623,7 +2623,7 @@ public CatalogCollectionPlacement getCollectionPlacement( long collectionId, int */ @Override public void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ) { - CatalogTable oldTable = getTable( getColumn( columnId ).tableId ); + LogicalTable oldTable = getTable( getColumn( columnId ).tableId ); synchronized ( this ) { if ( log.isDebugEnabled() ) { @@ -2726,7 +2726,7 @@ public List getColumnPlacementsByColumn( long columnId ) */ @Override public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); Map> columnPlacementsByAdapter = new HashMap<>(); table.dataPlacements.forEach( adapterId -> columnPlacementsByAdapter.put( @@ -2745,7 +2745,7 @@ public ImmutableMap> getColumnPlacementsByAdapter( */ @Override public ImmutableMap> getPartitionPlacementsByAdapter( long tableId ) { - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); Map> partitionPlacementsByAdapter = new HashMap<>(); table.dataPlacements.forEach( adapterId -> partitionPlacementsByAdapter.put( @@ -2926,7 +2926,7 @@ public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, St @Override public List getColumns( long tableId ) { try { - CatalogTable table = Objects.requireNonNull( tables.get( tableId ) ); + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); return columnNames.prefixSubMap( new Object[]{ table.databaseId, table.namespaceId, table.id } ).values().stream().sorted( columnComparator ).collect( Collectors.toList() ); } catch ( NullPointerException e ) { return new ArrayList<>(); @@ -2939,7 +2939,7 @@ public List getColumns( long tableId ) { */ @Override public List getColumns( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { - List catalogEntities = getTables( databaseNamePattern, schemaNamePattern, tableNamePattern ); + List catalogEntities = getTables( databaseNamePattern, schemaNamePattern, tableNamePattern ); if ( catalogEntities.size() > 0 ) { Stream catalogColumns = catalogEntities.stream().filter( t -> tableChildren.containsKey( t.id ) ).flatMap( t -> Objects.requireNonNull( tableChildren.get( t.id ) ).stream() ).map( columns::get ); @@ -2973,7 +2973,7 @@ public CatalogColumn getColumn( long columnId ) { @Override public CatalogColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { try { - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); if ( !getSchema( table.namespaceId ).caseSensitive ) { columnName = columnName.toLowerCase(); } @@ -2990,7 +2990,7 @@ public CatalogColumn getColumn( long tableId, String columnName ) throws Unknown @Override public CatalogColumn getColumn( String databaseName, String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownDatabaseException, UnknownTableException { try { - CatalogTable table = getTable( databaseName, schemaName, tableName ); + LogicalTable table = getTable( databaseName, schemaName, tableName ); return Objects.requireNonNull( columnNames.get( new Object[]{ table.databaseId, table.namespaceId, table.id, columnName } ) ); } catch ( NullPointerException e ) { throw new UnknownColumnException( databaseName, schemaName, tableName, columnName ); @@ -3003,7 +3003,7 @@ public CatalogColumn getColumn( String databaseName, String schemaName, String t */ @Override public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); if ( !getSchema( table.namespaceId ).caseSensitive ) { name = name.toLowerCase(); @@ -3046,7 +3046,7 @@ public long addColumn( String name, long tableId, int position, PolyType type, P List columnIds = new ArrayList<>( table.fieldIds ); columnIds.add( id ); - CatalogTable updatedTable; + LogicalTable updatedTable; updatedTable = table.getTableWithColumns( ImmutableList.copyOf( columnIds ) ); tables.replace( tableId, updatedTable ); @@ -3147,7 +3147,7 @@ public void setNullable( long columnId, boolean nullable ) throws GenericCatalog CatalogColumn old = Objects.requireNonNull( columns.get( columnId ) ); if ( nullable ) { // Check if the column is part of a primary key (pk's are not allowed to contain null values) - CatalogTable table = Objects.requireNonNull( tables.get( old.tableId ) ); + LogicalTable table = Objects.requireNonNull( tables.get( old.tableId ) ); if ( table.primaryKey != null ) { CatalogKey catalogKey = getPrimaryKey( table.primaryKey ); if ( catalogKey.columnIds.contains( columnId ) ) { @@ -3224,7 +3224,7 @@ public void setCollation( long columnId, Collation collation ) { */ @Override public boolean checkIfExistsColumn( long tableId, String columnName ) { - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); return columnNames.containsKey( new Object[]{ table.databaseId, table.namespaceId, tableId, columnName } ); } @@ -3240,11 +3240,11 @@ public void deleteColumn( long columnId ) { List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( column.tableId ) ) ); children.remove( columnId ); - CatalogTable old = getTable( column.tableId ); + LogicalTable old = getTable( column.tableId ); List columnIds = new ArrayList<>( old.fieldIds ); columnIds.remove( columnId ); - CatalogTable table; + LogicalTable table; if ( old.entityType == EntityType.MATERIALIZED_VIEW ) { CatalogMaterializedView oldView = (CatalogMaterializedView) old; table = new CatalogMaterializedView( @@ -3268,7 +3268,7 @@ public void deleteColumn( long columnId ) { oldView.isOrdered() ); } else { - table = new CatalogTable( + table = new LogicalTable( old.id, old.name, ImmutableList.copyOf( columnIds ), @@ -3405,7 +3405,7 @@ public void addPrimaryKey( long tableId, List columnIds ) throws GenericCa // TODO: Check if the current values are unique // Check if there is already a primary key defined for this table and if so, delete it. - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); if ( table.primaryKey != null ) { // CatalogCombinedKey combinedKey = getCombinedKey( table.primaryKey ); @@ -3515,7 +3515,7 @@ public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) th @Override public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { try { - CatalogTable table = Objects.requireNonNull( tables.get( tableId ) ); + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); for ( CatalogKey refKey : childKeys ) { @@ -3625,7 +3625,7 @@ public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownInd @Override public boolean checkIfExistsIndex( long tableId, String indexName ) { try { - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); getIndex( table.id, indexName ); return true; } catch ( UnknownIndexException e ) { @@ -3737,7 +3737,7 @@ public void deleteIndex( long indexId ) { */ @Override public void deletePrimaryKey( long tableId ) throws GenericCatalogException { - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); // TODO: Check if the currently stored values are unique if ( table.primaryKey != null ) { @@ -4271,9 +4271,9 @@ public List getPartitionsByTable( long tableId ) { */ @Override public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { - CatalogTable old = Objects.requireNonNull( tables.get( tableId ) ); + LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - CatalogTable table = new CatalogTable( + LogicalTable table = new LogicalTable( old.id, old.name, old.fieldIds, @@ -4305,7 +4305,7 @@ public void partitionTable( long tableId, PartitionType partitionType, long part */ @Override public void mergeTable( long tableId ) { - CatalogTable old = Objects.requireNonNull( tables.get( tableId ) ); + LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); if ( old.partitionProperty.reliesOnPeriodicChecks ) { removeTableFromPeriodicProcessing( tableId ); @@ -4329,7 +4329,7 @@ public void mergeTable( long tableId ) { .reliesOnPeriodicChecks( false ) .build(); - CatalogTable table = new CatalogTable( + LogicalTable table = new LogicalTable( old.id, old.name, old.fieldIds, @@ -4356,9 +4356,9 @@ public void mergeTable( long tableId ) { */ @Override public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { - CatalogTable old = Objects.requireNonNull( tables.get( tableId ) ); + LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - CatalogTable table = new CatalogTable( + LogicalTable table = new LogicalTable( old.id, old.name, old.fieldIds, @@ -4387,7 +4387,7 @@ public void updateTablePartitionProperties( long tableId, PartitionProperty part @Override public List getPartitionGroups( long tableId ) { try { - CatalogTable table = Objects.requireNonNull( tables.get( tableId ) ); + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); List partitionGroups = new ArrayList<>(); if ( table.partitionProperty.partitionGroupIds == null ) { return new ArrayList<>(); @@ -4407,9 +4407,9 @@ public List getPartitionGroups( long tableId ) { */ @Override public List getPartitionGroups( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { - List catalogEntities = getTables( databaseNamePattern, schemaNamePattern, tableNamePattern ); + List catalogEntities = getTables( databaseNamePattern, schemaNamePattern, tableNamePattern ); Stream partitionGroupStream = Stream.of(); - for ( CatalogTable catalogTable : catalogEntities ) { + for ( LogicalTable catalogTable : catalogEntities ) { partitionGroupStream = Stream.concat( partitionGroupStream, getPartitionGroups( catalogTable.id ).stream() ); } return partitionGroupStream.collect( Collectors.toList() ); @@ -4537,7 +4537,7 @@ public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long ta } List partitionGroupIndexList = new ArrayList<>(); - CatalogTable catalogTable = getTable( tableId ); + LogicalTable catalogTable = getTable( tableId ); for ( int index = 0; index < catalogTable.partitionProperty.partitionGroupIds.size(); index++ ) { if ( partitionGroups.contains( catalogTable.partitionProperty.partitionGroupIds.get( index ) ) ) { partitionGroupIndexList.add( (long) index ); @@ -4681,7 +4681,7 @@ public boolean validateDataPlacementsConstraints( long tableId, long adapterId, // TODO @HENNLO Focus on PartitionPlacements that are labeled as UPTODATE nodes. The outdated nodes do not // necessarily need placement constraints - CatalogTable table = getTable( tableId ); + LogicalTable table = getTable( tableId ); List dataPlacements = getDataPlacements( tableId ); // Checks for every column on every DataPlacement if each column is placed with all partitions @@ -4806,9 +4806,9 @@ public CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tab */ @Override public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { - CatalogTable old = Objects.requireNonNull( tables.get( tableId ) ); + LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - CatalogTable newTable; + LogicalTable newTable; if ( old.entityType == EntityType.MATERIALIZED_VIEW ) { CatalogMaterializedView oldView = (CatalogMaterializedView) old; @@ -4833,7 +4833,7 @@ public void updateDataPlacementsOnTable( long tableId, List newDataPlac oldView.isOrdered() ); } else { - newTable = new CatalogTable( + newTable = new LogicalTable( old.id, old.name, old.fieldIds, @@ -4907,12 +4907,12 @@ protected void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlac public long addGraphPlacement( int adapterId, long graphId ) { long id = partitionIdBuilder.getAndIncrement(); CatalogGraphPlacement placement = new CatalogGraphPlacement( adapterId, graphId, null, id ); - CatalogGraphDatabase old = graphs.get( graphId ); + LogicalGraph old = graphs.get( graphId ); if ( old == null ) { throw new UnknownGraphException( graphId ); } - CatalogGraphDatabase graph = old.addPlacement( adapterId ); + LogicalGraph graph = old.addPlacement( adapterId ); synchronized ( this ) { graphPlacements.put( new Object[]{ graph.id, adapterId }, placement ); @@ -4957,9 +4957,9 @@ public void deleteGraphPlacement( int adapterId, long graphId ) { deleteGraphPlacementLogistics( placement.graphId, adapterId ); - CatalogGraphDatabase old = Objects.requireNonNull( graphs.get( placement.graphId ) ); + LogicalGraph old = Objects.requireNonNull( graphs.get( placement.graphId ) ); - CatalogGraphDatabase graph = old.removePlacement( adapterId ); + LogicalGraph graph = old.removePlacement( adapterId ); synchronized ( this ) { graphPlacements.remove( new Object[]{ graphId, adapterId } ); @@ -5045,7 +5045,7 @@ public void removeDataPlacement( int adapterId, long tableId ) { */ @Override protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) { - CatalogTable old = getTable( tableId ); + LogicalTable old = getTable( tableId ); List updatedPlacements = new ArrayList<>( old.dataPlacements ); if ( !updatedPlacements.contains( adapterId ) ) { @@ -5060,7 +5060,7 @@ protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) */ @Override protected void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ) { - CatalogTable old = getTable( tableId ); + LogicalTable old = getTable( tableId ); List updatedPlacements = old.dataPlacements.stream().collect( Collectors.toList() ); if ( updatedPlacements.contains( adapterId ) ) { @@ -5279,8 +5279,8 @@ public List getPartitionPlacements( long partitionId * {@inheritDoc} */ @Override - public List getTablesForPeriodicProcessing() { - List procTables = new ArrayList<>(); + public List getTablesForPeriodicProcessing() { + List procTables = new ArrayList<>(); for ( Iterator iterator = frequencyDependentTables.iterator(); iterator.hasNext(); ) { long tableId = -1; try { @@ -5412,7 +5412,7 @@ private void deleteKeyIfNoLongerUsed( Long keyId ) { return; } CatalogKey key = getKey( keyId ); - CatalogTable table = getTable( key.tableId ); + LogicalTable table = getTable( key.tableId ); if ( table.primaryKey != null && table.primaryKey.equals( keyId ) ) { return; } @@ -5453,7 +5453,7 @@ private long getOrAddKey( long tableId, List columnIds, EnforcementTime en private long addKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { try { - CatalogTable table = Objects.requireNonNull( tables.get( tableId ) ); + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); long id = keyIdBuilder.getAndIncrement(); CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, table.databaseId, columnIds, enforcementTime ); synchronized ( this ) { diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index 6cdd9bea5c..6f15cb73c7 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -46,8 +46,8 @@ import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -98,7 +98,7 @@ public void testLayout() throws UnknownDatabaseException, UnknownSchemaException assertEquals( schemaId, schema.id ); long tableId = catalog.addTable( "test_table", schemaId, userId, EntityType.ENTITY, true ); - CatalogTable table = catalog.getTable( schemaId, "test_table" ); + LogicalTable table = catalog.getTable( schemaId, "test_table" ); assertEquals( tableId, table.id ); long columnId = catalog.addColumn( "test_column", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 67fe8fb43f..04fb7b26d1 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -31,10 +31,7 @@ import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.catalog.Adapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.Entity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.dialect.MonetdbSqlDialect; @@ -94,7 +91,7 @@ protected ConnectionFactory createConnectionFactory( final Map s @Override - public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java index 07fbb3cf0d..007fd0cf4f 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java @@ -40,13 +40,13 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.docker.DockerManager; import org.polypheny.db.docker.DockerManager.Container; import org.polypheny.db.docker.DockerManager.ContainerBuilder; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.MonetdbSqlDialect; import org.polypheny.db.transaction.PUID; @@ -149,7 +149,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac return; } // MonetDB does not support updating the column type directly. We need to do a work-around - CatalogTable catalogTable = Catalog.getInstance().getTable( catalogColumn.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getTable( catalogColumn.tableId ); String tmpColName = columnPlacement.physicalColumnName + "tmp"; StringBuilder builder; @@ -229,7 +229,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @@ -268,7 +268,7 @@ public AvailableIndexMethod getDefaultIndexMethod() { @Override - public List getFunctionalIndexes( CatalogTable catalogTable ) { + public List getFunctionalIndexes( LogicalTable catalogTable ) { return ImmutableList.of(); // TODO } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java index cc0a439ffa..c28d464d4b 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java @@ -50,11 +50,11 @@ import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; import org.polypheny.db.util.Pair; @@ -144,9 +144,9 @@ public void setStaticRowType( AlgRecordType staticRowType ) { public String getPhysicalName( String name ) { - int index = mongoEntity.getCatalogEntity().unwrap( CatalogTable.class ).getColumnNames().indexOf( name ); + int index = mongoEntity.getCatalogEntity().unwrap( LogicalTable.class ).getColumnNames().indexOf( name ); if ( index != -1 ) { - return MongoStore.getPhysicalColumnName( name, mongoEntity.getCatalogEntity().unwrap( CatalogTable.class ).fieldIds.get( index ) ); + return MongoStore.getPhysicalColumnName( name, mongoEntity.getCatalogEntity().unwrap( LogicalTable.class ).fieldIds.get( index ) ); } throw new RuntimeException( "This column is not part of the table." ); } @@ -195,7 +195,7 @@ public AlgNode visit( LogicalProject project ) { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { super.visit( scan ); return scan; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index 3e9d265e70..8b2aa7b2af 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -72,18 +72,18 @@ import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; import org.polypheny.db.adapter.mongodb.util.MongoDynamic; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.logical.document.LogicalDocumentModify; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.entity.CatalogCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; @@ -116,7 +116,7 @@ public class MongoEntity extends AbstractQueryableEntity implements Translatable @Getter private final MongoCollection collection; @Getter - private final CatalogTable catalogTable; + private final LogicalTable catalogTable; @Getter private final CatalogCollection catalogCollection; @@ -129,7 +129,7 @@ public class MongoEntity extends AbstractQueryableEntity implements Translatable /** * Creates a MongoTable. */ - MongoEntity( CatalogTable catalogTable, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long storeId, CatalogPartitionPlacement partitionPlacement ) { + MongoEntity( LogicalTable catalogTable, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long storeId, CatalogPartitionPlacement partitionPlacement ) { super( Object[].class, catalogTable.id, partitionPlacement.partitionId, storeId ); this.collectionName = MongoStore.getPhysicalTableName( catalogTable.id, partitionPlacement.partitionId ); this.transactionProvider = transactionProvider; @@ -315,7 +315,7 @@ private static Integer parseIntString( String valueString ) { @Override - public Modify toModificationAlg( + public RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, CatalogReader catalogReader, @@ -325,7 +325,7 @@ public Modify toModificationAlg( List sourceExpressionList, boolean flattened ) { mongoSchema.getConvention().register( cluster.getPlanner() ); - return new LogicalModify( + return new LogicalRelModify( cluster, cluster.traitSetOf( Convention.NONE ), table, diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoMethod.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoMethod.java index d4548f32a5..0f44e3a3fc 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoMethod.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoMethod.java @@ -36,7 +36,7 @@ import com.google.common.collect.ImmutableMap; import org.apache.calcite.linq4j.tree.Types; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; import java.lang.reflect.Method; import java.util.List; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java index c3ad25aa89..929b3fd8ef 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java @@ -69,7 +69,8 @@ import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.config.ConfigDocker; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.docker.DockerInstance; @@ -238,7 +239,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentSchema.createTable( combinedTable, columnPlacementsOnStore, getAdapterId(), partitionPlacement ); } @@ -250,7 +251,7 @@ public Namespace getCurrentSchema() { @Override - public void truncate( Context context, CatalogTable table ) { + public void truncate( Context context, LogicalTable table ) { commitAll(); context.getStatement().getTransaction().registerInvolvedAdapter( this ); for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( getAdapterId(), table.id ) ) { @@ -304,7 +305,7 @@ protected void reloadSettings( List updatedSettings ) { @Override - public void createTable( Context context, CatalogTable catalogTable, List partitionIds ) { + public void createTable( Context context, LogicalTable catalogTable, List partitionIds ) { Catalog catalog = Catalog.getInstance(); commitAll(); @@ -377,7 +378,7 @@ public void dropCollection( Context context, CatalogCollection catalogCollection @Override - public void dropTable( Context context, CatalogTable combinedTable, List partitionIds ) { + public void dropTable( Context context, LogicalTable combinedTable, List partitionIds ) { commitAll(); context.getStatement().getTransaction().registerInvolvedAdapter( this ); //transactionProvider.startTransaction(); @@ -393,7 +394,7 @@ public void dropTable( Context context, CatalogTable combinedTable, List p @Override - public void addColumn( Context context, CatalogTable catalogTable, CatalogColumn catalogColumn ) { + public void addColumn( Context context, LogicalTable catalogTable, CatalogColumn catalogColumn ) { commitAll(); context.getStatement().getTransaction().registerInvolvedAdapter( this ); // updates all columns with this field if a default value is provided @@ -578,7 +579,7 @@ public AvailableIndexMethod getDefaultIndexMethod() { @Override - public List getFunctionalIndexes( CatalogTable catalogTable ) { + public List getFunctionalIndexes( LogicalTable catalogTable ) { return ImmutableList.of(); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java index dfd84d9384..925d5a5f5f 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java @@ -65,8 +65,8 @@ import org.polypheny.db.algebra.convert.ConverterRule; import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.core.AlgFactories; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.document.DocumentModify; @@ -84,7 +84,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.entity.CatalogCollection; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; @@ -872,11 +872,11 @@ private static class MongoTableModificationRule extends MongoConverterRule { MongoTableModificationRule() { - super( Modify.class, MongoTableModificationRule::mongoSupported, Convention.NONE, MongoAlg.CONVENTION, "MongoTableModificationRule." + MongoAlg.CONVENTION ); + super( RelModify.class, MongoTableModificationRule::mongoSupported, Convention.NONE, MongoAlg.CONVENTION, "MongoTableModificationRule." + MongoAlg.CONVENTION ); } - private static boolean mongoSupported( Modify modify ) { + private static boolean mongoSupported( RelModify modify ) { if ( !modify.isInsert() ) { return true; } @@ -894,7 +894,7 @@ private static class ScanChecker extends AlgShuttleImpl { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { supported = false; return super.visit( scan ); } @@ -913,7 +913,7 @@ public AlgNode visit( AlgNode other ) { @Override public AlgNode convert( AlgNode alg ) { - final Modify modify = (Modify) alg; + final RelModify modify = (RelModify) alg; final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; @@ -975,7 +975,7 @@ public AlgNode convert( AlgNode alg ) { } - private static class MongoEntityModify extends Modify implements MongoAlg { + private static class MongoEntityModify extends RelModify implements MongoAlg { private final GridFSBucket bucket; @@ -1302,7 +1302,7 @@ private void handlePreparedInsert( Implementor implementor, MongoProject input ) } BsonDocument doc = new BsonDocument(); - CatalogTable catalogTable = implementor.mongoEntity.getCatalogEntity().unwrap( CatalogTable.class ); + LogicalTable catalogTable = implementor.mongoEntity.getCatalogEntity().unwrap( LogicalTable.class ); GridFSBucket bucket = implementor.mongoEntity.getMongoSchema().getBucket(); //noinspection AssertWithSideEffects assert input.getRowType().getFieldCount() == this.getEntity().getRowType().getFieldCount(); @@ -1356,7 +1356,7 @@ private Map getPhysicalMap( List fieldList, C } - private Map getPhysicalMap( List fieldList, CatalogTable catalogTable ) { + private Map getPhysicalMap( List fieldList, LogicalTable catalogTable ) { Map map = new HashMap<>(); List names = catalogTable.getColumnNames(); List ids = catalogTable.fieldIds; @@ -1369,7 +1369,7 @@ private Map getPhysicalMap( List fieldList, C } - private String getPhysicalName( MongoProject input, CatalogTable catalogTable, int pos ) { + private String getPhysicalName( MongoProject input, LogicalTable catalogTable, int pos ) { String logicalName = input.getRowType().getFieldNames().get( pos ); int index = catalogTable.getColumnNames().indexOf( logicalName ); return MongoStore.getPhysicalColumnName( logicalName, catalogTable.fieldIds.get( index ) ); @@ -1395,7 +1395,7 @@ private BsonValue getBsonArray( RexCall el, PolyType type, GridFSBucket bucket ) private void handleDirectInsert( Implementor implementor, MongoValues values ) { List docs = new ArrayList<>(); - CatalogTable catalogTable = implementor.mongoEntity.getCatalogEntity().unwrap( CatalogTable.class ); + LogicalTable catalogTable = implementor.mongoEntity.getCatalogEntity().unwrap( LogicalTable.class ); GridFSBucket bucket = implementor.mongoEntity.getMongoSchema().getBucket(); AlgDataType valRowType = rowType; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java index b636ac93ff..3ed7c6b758 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java @@ -36,7 +36,7 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgRecordType; @@ -53,7 +53,7 @@ * * Additional operations might be applied, using the "find" or "aggregate" methods.

    */ -public class MongoScan extends Scan implements MongoAlg { +public class MongoScan extends RelScan implements MongoAlg { final MongoEntity mongoEntity; final AlgDataType projectRowType; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java index c9e07599dc..983b0b6062 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java @@ -53,7 +53,7 @@ import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.Convention; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; @@ -108,7 +108,7 @@ private String buildDatabaseName( CatalogColumn column ) { } - public MongoEntity createTable( CatalogTable catalogTable, List columnPlacementsOnStore, int storeId, CatalogPartitionPlacement partitionPlacement ) { + public MongoEntity createTable( LogicalTable catalogTable, List columnPlacementsOnStore, int storeId, CatalogPartitionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverter.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverter.java index 1b7a7e11d8..da72e935c9 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverter.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverter.java @@ -44,7 +44,7 @@ import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.convert.ConverterImpl; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/MongoTypeUtil.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/MongoTypeUtil.java index 929a4aae43..8c03e9208f 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/MongoTypeUtil.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/MongoTypeUtil.java @@ -21,7 +21,7 @@ import org.bson.BsonInt32; import org.bson.BsonString; import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; /** @@ -30,7 +30,7 @@ public class MongoTypeUtil { - public static BsonDocument getPhysicalProjections( List logicalCols, CatalogTable catalogTable ) { + public static BsonDocument getPhysicalProjections( List logicalCols, LogicalTable catalogTable ) { BsonDocument projections = new BsonDocument(); List names = catalogTable.getColumnNames(); for ( String logicalCol : logicalCols ) { diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index 461703196c..e112167965 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -20,7 +20,7 @@ import java.util.Optional; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.ddl.DdlManager; @@ -59,10 +59,10 @@ public void execute( Context context, Statement statement, QueryParameters param try { CatalogSchema schema = catalog.getSchema( Catalog.defaultDatabaseId, database ); - List tables = catalog.getTables( schema.id, null ); + List tables = catalog.getTables( schema.id, null ); if ( dropTarget ) { - Optional newTable = tables.stream() + Optional newTable = tables.stream() .filter( t -> t.name.equals( newName ) ) .findAny(); @@ -71,7 +71,7 @@ public void execute( Context context, Statement statement, QueryParameters param } } - Optional table = tables.stream() + Optional table = tables.stream() .filter( t -> t.name.equals( getCollection() ) ) .findAny(); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 3862ac5de8..fb6f51fe38 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -47,9 +47,9 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.core.CorrelationId; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.document.DocumentProject; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.document.LogicalDocumentAggregate; @@ -527,7 +527,7 @@ private void combineUpdate( Map>> me * @param key the left associated parent key * @param mergedUpdates collection, which combines all performed update steps according to the operation * @param rowType the default rowtype at this point - * @param node the transformed operation up to this step e.g. {@link Scan} or {@link LogicalDocumentAggregate} + * @param node the transformed operation up to this step e.g. {@link RelScan} or {@link LogicalDocumentAggregate} * @param table the active table * @return the unified UPDATE AlgNode */ @@ -586,7 +586,7 @@ private AlgNode finalizeUpdates( String key, Map se @Override - public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java index 3a76668e25..d0ca652995 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java @@ -58,18 +58,18 @@ import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDefaultValue; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalGraph; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.docker.DockerManager; import org.polypheny.db.docker.DockerManager.Container; import org.polypheny.db.docker.DockerManager.ContainerBuilder; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Entity; -import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.transaction.PolyXid; @@ -237,7 +237,7 @@ public List getSupportedSchemaType() { @Override - public void createTable( Context context, CatalogTable combinedTable, List partitionIds ) { + public void createTable( Context context, LogicalTable combinedTable, List partitionIds ) { Catalog catalog = Catalog.getInstance(); if ( this.currentSchema == null ) { @@ -286,7 +286,7 @@ public void executeDdlTrx( PolyXid session, String query ) { @Override - public void dropTable( Context context, CatalogTable combinedTable, List partitionIds ) { + public void dropTable( Context context, LogicalTable combinedTable, List partitionIds ) { Catalog catalog = Catalog.getInstance(); context.getStatement().getTransaction().registerInvolvedAdapter( this ); List partitionPlacements = partitionIds.stream() @@ -303,7 +303,7 @@ public void dropTable( Context context, CatalogTable combinedTable, List p @Override - public void addColumn( Context context, CatalogTable catalogTable, CatalogColumn catalogColumn ) { + public void addColumn( Context context, LogicalTable catalogTable, CatalogColumn catalogColumn ) { transactionProvider.commitAll(); context.getStatement().getTransaction().registerInvolvedAdapter( this ); Catalog catalog = Catalog.getInstance(); @@ -458,7 +458,7 @@ public AvailableIndexMethod getDefaultIndexMethod() { @Override - public List getFunctionalIndexes( CatalogTable catalogTable ) { + public List getFunctionalIndexes( LogicalTable catalogTable ) { return ImmutableList.of(); } @@ -495,13 +495,13 @@ public void createNewSchema( SchemaPlus rootSchema, String name ) { @Override - public Entity createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { - return this.currentSchema.createTable( combinedTable, columnPlacementsOnStore, partitionPlacement ); + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + return this.currentSchema.createTable( boilerplate ); } @Override - public void truncate( Context context, CatalogTable table ) { + public void truncate( Context context, LogicalTable table ) { transactionProvider.commitAll(); context.getStatement().getTransaction().registerInvolvedAdapter( this ); for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( getAdapterId(), table.id ) ) { @@ -513,7 +513,7 @@ public void truncate( Context context, CatalogTable table ) { @Override - public void createGraph( Context context, CatalogGraphDatabase graphDatabase ) { + public void createGraph( Context context, LogicalGraph graphDatabase ) { catalog.updateGraphPlacementPhysicalNames( graphDatabase.id, getAdapterId(), getPhysicalGraphName( graphDatabase.id ) ); } @@ -528,14 +528,8 @@ public void dropGraph( Context context, CatalogGraphPlacement graphPlacement ) { @Override - public void createGraphNamespace( SchemaPlus rootSchema, String name, long id ) { - this.currentGraph = new NeoGraph( name, this.transactionProvider, this.db, id, getMappingLabel( id ), this ); - } - - - @Override - public Namespace getCurrentGraphNamespace() { - return currentGraph; + public void createGraphNamespace( PhysicalGraph graph ) { + this.currentGraph = new NeoGraph( graph, this.transactionProvider, this.db, getMappingLabel( graph.id ), this ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java index 0b9a28e942..792f82ad5c 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java @@ -26,33 +26,30 @@ import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; -import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.function.Function1; import org.neo4j.driver.Record; import org.neo4j.driver.Result; import org.neo4j.driver.Transaction; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.neo4j.rules.relational.NeoScan; import org.polypheny.db.adapter.neo4j.util.NeoUtil; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; -import org.polypheny.db.algebra.core.Modify.Operation; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Pair; @@ -60,68 +57,49 @@ /** * Relational Neo4j representation of a {@link org.polypheny.db.schema.PolyphenyDbSchema} entity */ -public class NeoEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity { +public class NeoEntity extends PhysicalTable implements TranslatableEntity, ModifiableEntity { - public final String physicalEntityName; - public final AlgProtoDataType rowType; - - protected NeoEntity( String physicalEntityName, AlgProtoDataType proto, long id, long partitionId, long adapterId ) { - super( Object[].class, id, partitionId, adapterId ); - this.physicalEntityName = physicalEntityName; - this.rowType = proto; - } + private final AlgDataType rowType; - @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - return new NeoQueryable<>( dataContext, schema, this, tableName ); - } - - - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return rowType.apply( getTypeFactory() ); + protected NeoEntity( PhysicalTable table ) { + super( table ); + this.rowType = getRowType(); } @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); - return new NeoScan( cluster, traitSet.replace( NeoConvention.INSTANCE ), algOptEntity, this ); + return new NeoScan( cluster, traitSet.replace( NeoConvention.INSTANCE ), this ); } /** - * Creates an {@link org.polypheny.db.algebra.core.Modify} algebra object, which is modifies this relational entity. + * Creates an {@link RelModify} algebra object, which is modifies this relational entity. * * @param child child algebra nodes of the created algebra operation * @param operation the operation type - * @param updateColumnList the target elements of the modification - * @param sourceExpressionList the modify operation to create the new values - * @param flattened if the {@link Modify} is flattened */ @Override - public Modify toModificationAlg( + public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptEntity table, - CatalogReader catalogReader, + AlgTraitSet traits, + CatalogEntity physical, AlgNode child, Operation operation, - List updateColumnList, - List sourceExpressionList, - boolean flattened ) { + List targets, + List sources ) { NeoConvention.INSTANCE.register( cluster.getPlanner() ); - return new LogicalModify( - cluster, - cluster.traitSetOf( Convention.NONE ), - table, - catalogReader, + return new LogicalRelModify( + traits.replace( Convention.NONE ), + physical, child, operation, - updateColumnList, - sourceExpressionList, - flattened ); + targets, + sources + ); } @@ -137,14 +115,14 @@ public NeoQueryable( DataContext dataContext, SchemaPlus schema, QueryableEntity super( dataContext, schema, table, tableName ); this.entity = (NeoEntity) table; this.namespace = schema.unwrap( NeoSchema.class ); - this.rowType = entity.rowType.apply( entity.getTypeFactory() ); + this.rowType = entity.rowType; } @Override public Enumerator enumerator() { return execute( - String.format( "MATCH (n:%s) RETURN %s", entity.physicalEntityName, buildAllQuery() ), + String.format( "MATCH (n:%s) RETURN %s", entity.name, buildAllQuery() ), getTypes(), getComponentType(), Map.of() ).enumerator(); diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java index 4eb98f0666..8808a079a5 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java @@ -16,6 +16,7 @@ package org.polypheny.db.adapter.neo4j; +import java.io.Serializable; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Iterator; @@ -28,7 +29,6 @@ import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Linq4j; import org.apache.calcite.linq4j.QueryProvider; -import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.function.Function1; import org.apache.calcite.linq4j.tree.Expression; import org.neo4j.driver.Driver; @@ -40,29 +40,26 @@ import org.polypheny.db.adapter.neo4j.rules.graph.NeoLpgScan; import org.polypheny.db.adapter.neo4j.util.NeoUtil; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalGraph; +import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.rex.RexNode; import org.polypheny.db.runtime.PolyCollections.PolyMap; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Statistic; -import org.polypheny.db.schema.TranslatableGraph; -import org.polypheny.db.schema.graph.ModifiableGraph; import org.polypheny.db.schema.graph.PolyEdge; import org.polypheny.db.schema.graph.PolyGraph; import org.polypheny.db.schema.graph.PolyNode; import org.polypheny.db.schema.graph.QueryableGraph; -import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Pair; @@ -70,18 +67,18 @@ /** * Graph entity in the Neo4j representation. */ -public class NeoGraph extends AbstractNamespace implements ModifiableGraph, TranslatableGraph, QueryableGraph { +public class NeoGraph extends PhysicalGraph implements TranslatableEntity, ModifiableEntity { - public final String name; public final TransactionProvider transactionProvider; public final Driver db; public final String mappingLabel; public final Neo4jStore store; + public final PhysicalGraph allocation; - public NeoGraph( String name, TransactionProvider transactionProvider, Driver db, long id, String mappingLabel, Neo4jStore store ) { - super( id ); - this.name = name; + public NeoGraph( PhysicalGraph graph, TransactionProvider transactionProvider, Driver db, String mappingLabel, Neo4jStore store ) { + super( graph.id, graph.name, graph.entityType, graph.namespaceType ); + this.allocation = graph; this.transactionProvider = transactionProvider; this.db = db; this.mappingLabel = mappingLabel; @@ -90,69 +87,43 @@ public NeoGraph( String name, TransactionProvider transactionProvider, Driver db /** - * Creates an {@link org.polypheny.db.algebra.core.Modify} algebra object, which is modifies this graph. + * Creates an {@link RelModify} algebra object, which is modifies this graph. * - * @param graph the {@link org.polypheny.db.schema.PolyphenyDbSchema} graph object - * @param input the child nodes of the created algebra node + * @param cluster + * @param child the child nodes of the created algebra node * @param operation the modify operation - * @param ids the ids, which are modified by the created algebra opertions - * @param operations the operations to perform */ @Override - public LpgModify toModificationAlg( + public LpgModify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, - CatalogGraphDatabase graph, - PolyphenyDbCatalogReader catalogReader, - AlgNode input, + CatalogEntity physicalEntity, + AlgNode child, Operation operation, - List ids, List operations ) { + List targets, + List sources ) { NeoConvention.INSTANCE.register( cluster.getPlanner() ); return new LogicalLpgModify( cluster, traits.replace( Convention.NONE ), - graph, - input, + physicalEntity, + child, operation, - ids, - operations ); + targets, + sources ); } @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { - return null; - } - - - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return null; - } - - - @Override - public Statistic getStatistic() { - return null; - } - - - @Override - public C unwrap( Class aClass ) { - return null; - } - - - @Override - public AlgNode toAlg( ToAlgContext context, org.polypheny.db.schema.graph.Graph graph ) { + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); return new NeoLpgScan( cluster, cluster.traitSetOf( NeoConvention.INSTANCE ).replace( ModelTrait.GRAPH ), this ); } @Override - public Queryable asQueryable( DataContext root, QueryableGraph graph ) { - return new NeoQueryable<>( root, this ); + public Serializable[] getParameterArray() { + return new Serializable[0]; } @@ -163,7 +134,7 @@ public static class NeoQueryable extends AbstractQueryable { private final DataContext dataContext; - public NeoQueryable( DataContext dataContext, org.polypheny.db.schema.graph.Graph graph ) { + public NeoQueryable( DataContext dataContext, QueryableGraph graph ) { this.dataContext = dataContext; this.graph = (NeoGraph) graph; } @@ -271,4 +242,10 @@ public Enumerator enumerator() { } + + @Override + public AlgDataType getRowType() { + return null; + } + } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraphImplementor.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraphImplementor.java index 27903eade7..4ca84fced0 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraphImplementor.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraphImplementor.java @@ -19,6 +19,7 @@ import static org.polypheny.db.adapter.neo4j.util.NeoStatements.as_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.distinct_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.edge_; +import static org.polypheny.db.adapter.neo4j.util.NeoStatements.labels_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.literal_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.match_; import static org.polypheny.db.adapter.neo4j.util.NeoStatements.node_; @@ -44,7 +45,6 @@ import org.polypheny.db.algebra.core.lpg.LpgProject; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.type.PathType; import org.polypheny.db.util.Pair; @@ -66,7 +66,7 @@ public class NeoGraphImplementor extends AlgShuttleImpl { @Setter @Getter - private CatalogGraphDatabase graph; + private NeoGraph graph; @Setter @Getter diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java index f2f5c49c5e..25596877c2 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoRelationalImplementor.java @@ -57,7 +57,6 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexDynamicParam; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; @@ -83,9 +82,7 @@ public class NeoRelationalImplementor extends AlgShuttleImpl { private boolean isDml; @Getter - private AlgOptEntity table; - - @Getter + @Setter private NeoEntity entity; @Getter @@ -109,12 +106,6 @@ public void addAll( List statements ) { } - public void setTable( AlgOptEntity table ) { - this.table = table; - this.entity = (NeoEntity) table.getEntity(); - } - - public void addValues( ImmutableList> tuples ) { this.values = tuples; } @@ -184,7 +175,7 @@ public void addWith( NeoProject project ) { public static Pair createCreate( ImmutableList> values, NeoEntity entity ) { int nodeI = 0; List nodes = new ArrayList<>(); - AlgDataType rowType = entity.getRowType( entity.getTypeFactory() ); + AlgDataType rowType = entity.getRowType(); for ( ImmutableList row : values ) { int pos = 0; @@ -196,7 +187,7 @@ public static Pair createCreate( Immut props.add( property_( rowType.getFieldList().get( pos ).getPhysicalName(), literal_( NeoUtil.rexAsString( value, null, false ) ) ) ); pos++; } - String name = entity.physicalEntityName; + String name = entity.name; nodes.add( NeoStatements.node_( name + nodeI, NeoStatements.labels_( name ), props ) ); nodeI++; @@ -226,7 +217,7 @@ public static NeoStatements.OperatorStatement create( Function1 public static OperatorStatement createProjectValues( NeoProject last, NeoEntity entity, NeoRelationalImplementor implementor ) { List properties = new ArrayList<>(); - List fields = entity.getRowType( entity.getTypeFactory() ).getFieldList(); + List fields = entity.getRowType().getFieldList(); int i = 0; for ( RexNode project : last.getProjects() ) { @@ -241,7 +232,7 @@ public static OperatorStatement createProjectValues( NeoProject last, NeoEntity } i++; } - String name = entity.physicalEntityName; + String name = entity.name; return create_( node_( name, labels_( name ), properties ) ); } @@ -286,8 +277,8 @@ public void addFilter( NeoFilter filter ) { private Map getToPhysicalMapping( @Nullable AlgNode node ) { Map mapping = new HashMap<>(); - for ( AlgDataTypeField field : table.getRowType().getFieldList() ) { - mapping.put( field.getName(), entity.physicalEntityName + "." + field.getPhysicalName() ); + for ( AlgDataTypeField field : entity.getRowType().getFieldList() ) { + mapping.put( field.getName(), entity.name + "." + field.getPhysicalName() ); } if ( node instanceof NeoProject ) { @@ -305,7 +296,7 @@ private Map getToPhysicalMapping( @Nullable AlgNode node ) { public void addDelete() { - add( delete_( false, literal_( entity.physicalEntityName ) ) ); + add( delete_( false, literal_( entity.name ) ) ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java index f132e242aa..09360892e0 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoSchema.java @@ -16,24 +16,15 @@ package org.polypheny.db.adapter.neo4j; -import java.util.List; import org.apache.calcite.linq4j.tree.Expression; import org.neo4j.driver.Driver; import org.neo4j.driver.Session; import org.polypheny.db.adapter.neo4j.Neo4jPlugin.Neo4jStore; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; -import org.polypheny.db.type.PolyTypeFactoryImpl; public class NeoSchema extends AbstractNamespace implements Schema { @@ -64,29 +55,13 @@ public NeoSchema( Driver db, Expression expression, TransactionProvider transact /** - * Creates a new table according to the given {@link CatalogTable} + * Creates a new table according to the given {@link LogicalTable} * - * @param combinedTable the table according to which the table is created - * @param columnPlacementsOnStore the placements ofr the table on the store - * @param partitionPlacement reference to the partition * @return the created table */ - public Entity createTable( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); + public NeoEntity createTable( PhysicalTable table ) { - for ( CatalogColumnPlacement placement : columnPlacementsOnStore ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); - AlgDataType sqlType = catalogColumn.getAlgDataType( typeFactory ); - fieldInfo.add( catalogColumn.name, Neo4jPlugin.getPhysicalFieldName( catalogColumn.id ), sqlType ).nullable( catalogColumn.nullable ); - } - - return new NeoEntity( - Neo4jPlugin.getPhysicalEntityName( combinedTable.namespaceId, combinedTable.id, partitionPlacement.partitionId ), - AlgDataTypeImpl.proto( fieldInfo.build() ), - combinedTable.id, - partitionPlacement.partitionId, - store.getAdapterId() ); + return new NeoEntity( table ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/NeoRules.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/NeoRules.java index 60aeae6601..889a9763e3 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/NeoRules.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/NeoRules.java @@ -29,7 +29,7 @@ import org.polypheny.db.algebra.convert.ConverterRule; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.Filter; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.plan.AlgOptRule; @@ -62,10 +62,10 @@ public NeoConverterRule( Class clazz, Predicate NeoModifyRule( Class clazz, Predicate @Override public AlgNode convert( AlgNode alg ) { - Modify modify = (Modify) alg; + RelModify modify = (RelModify) alg; return new NeoModify( modify.getCluster(), modify.getTraitSet().replace( NeoConvention.INSTANCE ), diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java index 4e85b8a3e5..74f33e8c73 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java @@ -39,12 +39,11 @@ import org.polypheny.db.adapter.neo4j.util.Translator; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Modify.Operation; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.core.lpg.LpgProject; import org.polypheny.db.algebra.core.lpg.LpgValues; import org.polypheny.db.algebra.operators.OperatorName; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; @@ -63,7 +62,7 @@ public class NeoLpgModify extends LpgModify implements NeoGraphAlg { * @param traits Traits active for this node, including {@link org.polypheny.db.schema.ModelTrait#GRAPH} * @param input Input algebraic expression */ - public NeoLpgModify( AlgOptCluster cluster, AlgTraitSet traits, CatalogGraphDatabase graph, AlgNode input, Operation operation, List ids, List operations ) { + public NeoLpgModify( AlgOptCluster cluster, AlgTraitSet traits, LogicalGraph graph, AlgNode input, Operation operation, List ids, List operations ) { super( cluster, traits, graph, input, operation, ids, operations, AlgOptUtil.createDmlRowType( Kind.INSERT, cluster.getTypeFactory() ) ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgScan.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgScan.java index fcec0f02ec..55abfa7124 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgScan.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgScan.java @@ -21,9 +21,11 @@ import org.polypheny.db.adapter.neo4j.rules.NeoGraphAlg; import org.polypheny.db.algebra.core.lpg.LpgScan; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.allocation.AllocationGraph; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.refactor.PhysicalEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.TranslatableGraph; import org.polypheny.db.type.PolyType; public class NeoLpgScan extends LpgScan implements NeoGraphAlg { @@ -35,7 +37,7 @@ public class NeoLpgScan extends LpgScan implements NeoGraphAlg { * @param cluster Cluster this expression belongs to * @param traitSet Traits active for this node, including {@link org.polypheny.db.schema.ModelTrait#GRAPH} */ - public NeoLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, TranslatableGraph graph ) { + public NeoLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, NeoGraph graph ) { super( cluster, traitSet, graph ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java index 4025eb9d5e..f264259d52 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoModify.java @@ -17,17 +17,17 @@ package org.polypheny.db.adapter.neo4j.rules.relational; import java.util.List; +import org.polypheny.db.adapter.neo4j.NeoEntity; import org.polypheny.db.adapter.neo4j.NeoRelationalImplementor; import org.polypheny.db.adapter.neo4j.rules.NeoRelAlg; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -public class NeoModify extends Modify implements NeoRelAlg { +public class NeoModify extends RelModify implements NeoRelAlg { /** * Creates a {@code Modify}. @@ -47,7 +47,7 @@ public class NeoModify extends Modify implements NeoRelAlg { * @param sourceExpressionList List of value expressions to be set (e.g. exp1, exp2); null if not UPDATE * @param flattened Whether set flattens the input row type */ - public NeoModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public NeoModify( AlgOptCluster cluster, AlgTraitSet traitSet, NeoEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traitSet, table, catalogReader, input, operation, updateColumnList, sourceExpressionList, flattened ); } @@ -55,7 +55,7 @@ public NeoModify( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity tabl @Override public void implement( NeoRelationalImplementor implementor ) { assert getEntity() != null; - implementor.setTable( getEntity() ); + implementor.setEntity( entity ); implementor.setDml( true ); implementor.visitChild( 0, getInput() ); @@ -101,7 +101,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new NeoModify( inputs.get( 0 ).getCluster(), traitSet, - table, + entity, catalogReader, inputs.get( 0 ), getOperation(), diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java index ae33d7a479..f3df76932b 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/relational/NeoScan.java @@ -32,39 +32,34 @@ import org.polypheny.db.adapter.neo4j.rules.NeoRelAlg; import org.polypheny.db.adapter.neo4j.util.NeoStatements.NeoStatement; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; -public class NeoScan extends Scan implements NeoRelAlg { +public class NeoScan extends RelScan implements NeoRelAlg { - private final NeoEntity neoEntity; - - - public NeoScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, NeoEntity neoEntity ) { - super( cluster, traitSet, table ); - this.neoEntity = neoEntity; + public NeoScan( AlgOptCluster cluster, AlgTraitSet traitSet, NeoEntity neoEntity ) { + super( cluster, traitSet, neoEntity ); } @Override public void implement( NeoRelationalImplementor implementor ) { - if ( implementor.getTable() != null && !Objects.equals( table.getEntity().getId(), implementor.getTable().getEntity().getId() ) ) { + if ( implementor.getEntity() != null && !Objects.equals( entity.id, implementor.getEntity().id ) ) { handleInsertFromOther( implementor ); return; } - implementor.setTable( table ); + implementor.setEntity( entity ); - implementor.add( match_( node_( neoEntity.physicalEntityName, labels_( neoEntity.physicalEntityName ) ) ) ); + implementor.add( match_( node_( entity.name, labels_( entity.name ) ) ) ); if ( !implementor.isDml() ) { - List mapping = table + List mapping = entity .getRowType() .getFieldList() - .stream().map( f -> as_( literal_( neoEntity.physicalEntityName + "." + f.getPhysicalName() ), literal_( f.getName() ) ) ) + .stream().map( f -> as_( literal_( entity.name + "." + f.getPhysicalName() ), literal_( f.getName() ) ) ) .collect( Collectors.toList() ); implementor.add( with_( list_( mapping ) ) ); @@ -73,13 +68,13 @@ public void implement( NeoRelationalImplementor implementor ) { private void handleInsertFromOther( NeoRelationalImplementor implementor ) { - implementor.selectFromTable = (NeoEntity) table.getEntity(); + implementor.selectFromTable = entity; } @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new NeoScan( getCluster(), traitSet, this.getEntity(), neoEntity ); + return new NeoScan( getCluster(), traitSet, entity ); } } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java index a61397440f..22fcd5fccd 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAggregate.java @@ -43,8 +43,8 @@ import org.polypheny.db.algebra.core.Aggregate; import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.ImmutableBitSet; @@ -96,7 +96,7 @@ private String getPigAggregateStatement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptEntity getEntity() { + public CatalogEntity getEntity() { return getInput().getEntity(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlgFactories.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlgFactories.java index 530dca3ad9..b4202312c6 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlgFactories.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigAlgFactories.java @@ -43,8 +43,8 @@ import org.polypheny.db.algebra.core.AlgFactories.ScanFactory; import org.polypheny.db.algebra.core.CorrelationId; import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Context; import org.polypheny.db.plan.Contexts; import org.polypheny.db.rex.RexNode; @@ -73,8 +73,8 @@ public static class PigScanFactory implements ScanFactory { @Override - public AlgNode createScan( AlgOptCluster cluster, AlgOptEntity table ) { - return new PigScan( cluster, cluster.traitSetOf( PigAlg.CONVENTION ), table ); + public AlgNode createScan( AlgOptCluster cluster, CatalogEntity entity ) { + return new PigScan( cluster, cluster.traitSetOf( PigAlg.CONVENTION ), entity ); } } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java index 62ef4111ab..a949c4b9e8 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigFilter.java @@ -42,8 +42,8 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Filter; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexCall; @@ -83,7 +83,7 @@ public void implement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptEntity getEntity() { + public CatalogEntity getEntity() { return getInput().getEntity(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java index 6f527cd310..0aff75ed34 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigJoin.java @@ -41,8 +41,8 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexCall; @@ -82,7 +82,7 @@ public void implement( Implementor implementor ) { * The Pig alias of the joined relation will have the same name as one from the left side of the join. */ @Override - public AlgOptEntity getEntity() { + public CatalogEntity getEntity() { return getLeft().getEntity(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java index cb72573c14..9c4ff1e4ed 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigProject.java @@ -38,8 +38,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; @@ -74,7 +74,7 @@ public void implement( Implementor implementor ) { * Override this method so it looks down the tree to find the table this node is acting on. */ @Override - public AlgOptEntity getEntity() { + public CatalogEntity getEntity() { return getInput().getEntity(); } diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java index b32c85a40f..90283a10a4 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigScan.java @@ -38,7 +38,7 @@ import java.util.List; import org.apache.pig.data.DataType; import org.polypheny.db.adapter.enumerable.EnumerableRules; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.rules.AggregateExpandDistinctAggregatesRule; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgOptCluster; @@ -50,9 +50,9 @@ /** - * Implementation of {@link Scan} in {@link PigAlg#CONVENTION Pig calling convention}. + * Implementation of {@link RelScan} in {@link PigAlg#CONVENTION Pig calling convention}. */ -public class PigScan extends Scan implements PigAlg { +public class PigScan extends RelScan implements PigAlg { /** * Creates a PigScan. diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java index 94327885b2..f7c77bc72d 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java @@ -40,16 +40,16 @@ import org.apache.calcite.linq4j.Ord; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.JoinAlgType; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.Context; import org.polypheny.db.plan.Contexts; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Util; @@ -62,8 +62,8 @@ public class PigAlgBuilder extends AlgBuilder { private String lastAlias; - private PigAlgBuilder( Context context, AlgOptCluster cluster, AlgOptSchema algOptSchema ) { - super( context, cluster, algOptSchema ); + private PigAlgBuilder( Context context, AlgOptCluster cluster, PolyphenyDbSchema schema ) { + super( context, cluster, schema ); } @@ -72,12 +72,12 @@ private PigAlgBuilder( Context context, AlgOptCluster cluster, AlgOptSchema algO */ public static PigAlgBuilder create( FrameworkConfig config ) { final AlgBuilder algBuilder = AlgBuilder.create( config ); - return new PigAlgBuilder( config.getContext(), algBuilder.cluster, algBuilder.algOptSchema ); + return new PigAlgBuilder( config.getContext(), algBuilder.cluster, algBuilder.schema ); } public static PigAlgBuilder create( Statement statement, AlgOptCluster cluster ) { - return new PigAlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader() ); + return new PigAlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader().getRootSchema() ); } @@ -203,8 +203,8 @@ String getAlias() { return lastAlias; } else { AlgNode top = peek(); - if ( top instanceof Scan ) { - return top.getEntity().getCatalogEntity().name; + if ( top instanceof RelScan ) { + return top.getEntity().name; } else { return null; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 22a4e0294e..7b561e6427 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -30,7 +30,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entities.CatalogUser; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; @@ -199,7 +199,7 @@ public AlgOptEntity getCollection( List names ) { @Override - public CatalogGraphDatabase getGraph( String name ) { + public LogicalGraph getGraph( String name ) { return null; } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index fea4951046..5ad5d8001e 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -29,10 +29,7 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.catalog.Adapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.schema.Entity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.PostgresqlSqlDialect; @@ -85,7 +82,7 @@ public static void register() { @Override - public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java index 1abf0cbfc4..ef46799fd5 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java @@ -42,13 +42,13 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.docker.DockerManager; import org.polypheny.db.docker.DockerManager.Container; import org.polypheny.db.docker.DockerManager.ContainerBuilder; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.PostgresqlSqlDialect; import org.polypheny.db.transaction.PUID; @@ -195,7 +195,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public Entity createTableSchema( CatalogTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } @@ -296,7 +296,7 @@ public AvailableIndexMethod getDefaultIndexMethod() { @Override - public List getFunctionalIndexes( CatalogTable catalogTable ) { + public List getFunctionalIndexes( LogicalTable catalogTable ) { return ImmutableList.of(); } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 4901cd85d3..4c061934e0 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -43,7 +43,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -139,7 +139,7 @@ static Pair decodeBasicAuthorization( String encodedAuthorizatio public ResourceGetRequest parseGetResourceRequest( HttpServletRequest req, String resourceName ) throws ParserException { - List tables = this.parseTables( resourceName ); + List tables = this.parseTables( resourceName ); List requestColumns = this.newParseProjectionsAndAggregations( getProjectionsValues( req ), tables ); Map nameMapping = this.newGenerateNameMapping( requestColumns ); @@ -158,7 +158,7 @@ public ResourceGetRequest parseGetResourceRequest( HttpServletRequest req, Strin public ResourcePostRequest parsePostResourceRequest( Context ctx, String resourceName, Gson gson ) throws ParserException { - List tables = this.parseTables( resourceName ); + List tables = this.parseTables( resourceName ); List requestColumns = this.newParseProjectionsAndAggregations( getProjectionsValues( ctx.req ), tables ); Map nameMapping = this.newGenerateNameMapping( requestColumns ); List>> values = this.parseValues( ctx, gson, nameMapping ); @@ -168,7 +168,7 @@ public ResourcePostRequest parsePostResourceRequest( Context ctx, String resourc public ResourcePostRequest parsePostMultipartRequest( String resourceName, String[] projections, List insertValues ) throws ParserException { - List tables = this.parseTables( resourceName ); + List tables = this.parseTables( resourceName ); List requestColumns = this.newParseProjectionsAndAggregations( projections, tables ); Map nameMapping = this.newGenerateNameMapping( requestColumns ); List>> values = parseInsertStatementBody( insertValues, nameMapping ); @@ -179,7 +179,7 @@ public ResourcePostRequest parsePostMultipartRequest( String resourceName, Strin public ResourcePatchRequest parsePatchResourceRequest( Context ctx, String resourceName, Gson gson ) throws ParserException { // TODO js: make sure it's only a single resource - List tables = this.parseTables( resourceName ); + List tables = this.parseTables( resourceName ); // TODO js: make sure there are no actual projections List requestColumns = this.newParseProjectionsAndAggregations( getProjectionsValues( ctx.req ), tables ); Map nameMapping = this.newGenerateNameMapping( requestColumns ); @@ -193,7 +193,7 @@ public ResourcePatchRequest parsePatchResourceRequest( Context ctx, String resou public ResourcePatchRequest parsePatchMultipartRequest( String resourceName, String[] projections, Map filterMap, List insertValues ) { - List tables = this.parseTables( resourceName ); + List tables = this.parseTables( resourceName ); List requestColumns = this.newParseProjectionsAndAggregations( projections, tables ); Map nameMapping = this.newGenerateNameMapping( requestColumns ); Filters filters = this.parseFilters( filterMap, nameMapping ); @@ -204,7 +204,7 @@ public ResourcePatchRequest parsePatchMultipartRequest( String resourceName, Str public ResourceDeleteRequest parseDeleteResourceRequest( HttpServletRequest request, String resourceName ) throws ParserException { // TODO js: make sure it's only a single resource - List tables = this.parseTables( resourceName ); + List tables = this.parseTables( resourceName ); List requestColumns = this.newParseProjectionsAndAggregations( getProjectionsValues( request ), tables ); @@ -224,16 +224,16 @@ public ResourceDeleteRequest parseDeleteResourceRequest( HttpServletRequest requ * @throws ParserException thrown if unable to parse table list */ @VisibleForTesting - List parseTables( String tableList ) throws ParserException { + List parseTables( String tableList ) throws ParserException { log.debug( "Starting to parse table list: {}", tableList ); if ( tableList == null ) { throw new ParserException( ParserErrorCode.TABLE_LIST_GENERIC, "null" ); } String[] tableNameList = tableList.split( "," ); - List tables = new ArrayList<>(); + List tables = new ArrayList<>(); for ( String tableName : tableNameList ) { - CatalogTable temp = this.parseCatalogTableName( tableName ); + LogicalTable temp = this.parseCatalogTableName( tableName ); tables.add( temp ); log.debug( "Added table \"{}\" to table list.", tableName ); } @@ -251,7 +251,7 @@ List parseTables( String tableList ) throws ParserException { * @throws ParserException thrown if unable to parse table name */ @VisibleForTesting - CatalogTable parseCatalogTableName( String tableName ) throws ParserException { + LogicalTable parseCatalogTableName( String tableName ) throws ParserException { String[] tableElements = tableName.split( "\\." ); if ( tableElements.length != 2 ) { log.warn( "Table name \"{}\" not possible to parse.", tableName ); @@ -259,7 +259,7 @@ CatalogTable parseCatalogTableName( String tableName ) throws ParserException { } try { - CatalogTable table = this.catalog.getTable( this.databaseName, tableElements[0], tableElements[1] ); + LogicalTable table = this.catalog.getTable( this.databaseName, tableElements[0], tableElements[1] ); if ( log.isDebugEnabled() ) { log.debug( "Finished parsing table \"{}\".", tableName ); } @@ -272,12 +272,12 @@ CatalogTable parseCatalogTableName( String tableName ) throws ParserException { @VisibleForTesting - List newParseProjectionsAndAggregations( String[] possibleProjectionValues, List tables ) throws ParserException { + List newParseProjectionsAndAggregations( String[] possibleProjectionValues, List tables ) throws ParserException { // Helper structures & data Map tableOffsets = new HashMap<>(); Set validColumns = new HashSet<>(); int columnOffset = 0; - for ( CatalogTable table : tables ) { + for ( LogicalTable table : tables ) { tableOffsets.put( table.id, columnOffset ); validColumns.addAll( table.fieldIds ); columnOffset += table.fieldIds.size(); @@ -304,10 +304,10 @@ private String[] getProjectionsValues( HttpServletRequest request ) { @VisibleForTesting - List generateRequestColumnsWithoutProject( List tables, Map tableOffsets ) { + List generateRequestColumnsWithoutProject( List tables, Map tableOffsets ) { List columns = new ArrayList<>(); long internalPosition = 0L; - for ( CatalogTable table : tables ) { + for ( LogicalTable table : tables ) { for ( long columnId : table.fieldIds ) { CatalogColumn column = this.catalog.getColumn( columnId ); int calculatedPosition = tableOffsets.get( table.id ) + column.position - 1; @@ -744,9 +744,9 @@ private List> parseInsertStatementValues( Map rowVal } - public Map generateNameMapping( List tables ) { + public Map generateNameMapping( List tables ) { Map nameMapping = new HashMap<>(); - for ( CatalogTable table : tables ) { + for ( LogicalTable table : tables ) { for ( CatalogColumn column : this.catalog.getColumns( table.id ) ) { nameMapping.put( column.getSchemaName() + "." + column.getTableName() + "." + column.name, column ); } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 96cf751c64..fb538c7bd9 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -38,15 +38,16 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.core.JoinAlgType; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.fun.AggFunction; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; @@ -181,13 +182,13 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi List rexValues = this.valuesNode( statement, algBuilder, rexBuilder, resourcePatchRequest, tableRows, inputStreams ).get( 0 ); AlgNode algNode = algBuilder.build(); - Modify modify = new LogicalModify( + RelModify modify = new LogicalRelModify( cluster, algNode.getTraitSet(), table, catalogReader, algNode, - LogicalModify.Operation.UPDATE, + Modify.Operation.UPDATE, valueColumnNames, rexValues, false @@ -234,13 +235,13 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder ); AlgNode algNode = algBuilder.build(); - Modify modify = new LogicalModify( + RelModify modify = new LogicalRelModify( cluster, algNode.getTraitSet(), table, catalogReader, algNode, - LogicalModify.Operation.DELETE, + Modify.Operation.DELETE, null, null, false @@ -286,13 +287,13 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final // Table Modify AlgNode algNode = algBuilder.build(); - Modify modify = new LogicalModify( + RelModify modify = new LogicalRelModify( cluster, algNode.getTraitSet(), table, catalogReader, algNode, - LogicalModify.Operation.INSERT, + Modify.Operation.INSERT, null, null, false @@ -313,9 +314,9 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final @VisibleForTesting - AlgBuilder tableScans( AlgBuilder algBuilder, RexBuilder rexBuilder, List tables ) { + AlgBuilder tableScans( AlgBuilder algBuilder, RexBuilder rexBuilder, List tables ) { boolean firstTable = true; - for ( CatalogTable catalogTable : tables ) { + for ( LogicalTable catalogTable : tables ) { if ( firstTable ) { algBuilder = algBuilder.scan( catalogTable.getNamespaceName(), catalogTable.name ); firstTable = false; diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceDeleteRequest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceDeleteRequest.java index e37e0db2dd..2f2bc7ff7a 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceDeleteRequest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceDeleteRequest.java @@ -20,7 +20,7 @@ import java.util.List; import java.util.Map; import lombok.AllArgsConstructor; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.restapi.RequestColumn; import org.polypheny.db.restapi.RequestParser.Filters; @@ -28,7 +28,7 @@ @AllArgsConstructor public class ResourceDeleteRequest { - public final List tables; + public final List tables; public final List requestColumns; public final Map nameMapping; public final Filters filters; diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceGetRequest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceGetRequest.java index f04feff7e1..aabd6d5b74 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceGetRequest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceGetRequest.java @@ -20,7 +20,7 @@ import java.util.List; import java.util.Map; import lombok.AllArgsConstructor; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.restapi.RequestColumn; import org.polypheny.db.restapi.RequestParser.Filters; import org.polypheny.db.util.Pair; @@ -29,7 +29,7 @@ @AllArgsConstructor public class ResourceGetRequest { - public final List tables; + public final List tables; public final List requestColumns; public final Map nameMapping; // public final List> aggregates; diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourcePatchRequest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourcePatchRequest.java index e1a9e64988..fbf2b01a39 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourcePatchRequest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourcePatchRequest.java @@ -19,7 +19,7 @@ import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.restapi.RequestColumn; import org.polypheny.db.restapi.RequestParser.Filters; import org.polypheny.db.util.Pair; @@ -31,7 +31,7 @@ public class ResourcePatchRequest extends ResourceValuesRequest { public final Filters filters; - public ResourcePatchRequest( List tables, List requestColumns, List>> values, Map nameMapping, Filters filters, boolean useDynamicParams ) { + public ResourcePatchRequest( List tables, List requestColumns, List>> values, Map nameMapping, Filters filters, boolean useDynamicParams ) { super( tables, requestColumns, values, useDynamicParams ); this.nameMapping = nameMapping; this.filters = filters; diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourcePostRequest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourcePostRequest.java index ceec9215bb..177bde0fef 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourcePostRequest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourcePostRequest.java @@ -19,7 +19,7 @@ import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.restapi.RequestColumn; import org.polypheny.db.util.Pair; @@ -29,7 +29,7 @@ public class ResourcePostRequest extends ResourceValuesRequest { public final Map nameMapping; - public ResourcePostRequest( List tables, List requestColumns, Map nameMapping, List>> values, boolean useDynamicParams ) { + public ResourcePostRequest( List tables, List requestColumns, Map nameMapping, List>> values, boolean useDynamicParams ) { super( tables, requestColumns, values, useDynamicParams ); this.nameMapping = nameMapping; } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceValuesRequest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceValuesRequest.java index 59152c2aa7..bbd35821e3 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceValuesRequest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/models/requests/ResourceValuesRequest.java @@ -19,7 +19,7 @@ import java.util.List; import lombok.AllArgsConstructor; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.restapi.RequestColumn; import org.polypheny.db.util.Pair; @@ -27,7 +27,7 @@ @AllArgsConstructor public abstract class ResourceValuesRequest { - public final List tables; + public final List tables; public final List requestColumns; public final List>> values; public boolean useDynamicParams; diff --git a/plugins/rest-interface/src/test/java/org/polypheny/db/restapi/RequestParserTest.java b/plugins/rest-interface/src/test/java/org/polypheny/db/restapi/RequestParserTest.java index f1ce56bb7e..eecc063b52 100644 --- a/plugins/rest-interface/src/test/java/org/polypheny/db/restapi/RequestParserTest.java +++ b/plugins/rest-interface/src/test/java/org/polypheny/db/restapi/RequestParserTest.java @@ -28,7 +28,7 @@ import org.junit.rules.ExpectedException; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -72,7 +72,7 @@ public void testParseCatalogTableName() throws UnknownTableException, UnknownSch null, "username", "testdb" ); - CatalogTable table = requestParser.parseCatalogTableName( "schema1.table1." ); + LogicalTable table = requestParser.parseCatalogTableName( "schema1.table1." ); verify( mockedCatalog ).getTable( "testdb", "schema1", "table1" ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index fbb1288f0a..bb4cfef196 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -40,7 +40,7 @@ import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogDefaultValue; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -251,7 +251,7 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { SqlNodeList oldColumnList = insert.getTargetColumnList(); if ( oldColumnList != null ) { - CatalogTable catalogTable = getCatalogTable( transaction, (SqlIdentifier) insert.getTargetTable() ); + LogicalTable catalogTable = getCatalogTable( transaction, (SqlIdentifier) insert.getTargetTable() ); NamespaceType namespaceType = Catalog.getInstance().getSchema( catalogTable.namespaceId ).namespaceType; catalogTable = getCatalogTable( transaction, (SqlIdentifier) insert.getTargetTable() ); @@ -361,8 +361,8 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { } - private CatalogTable getCatalogTable( Transaction transaction, SqlIdentifier tableName ) { - CatalogTable catalogTable; + private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tableName ) { + LogicalTable catalogTable; try { long schemaId; String tableOldName; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 9d0fa334f0..6852e8040f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -27,7 +27,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -66,8 +66,8 @@ public Operator getOperator() { } - protected CatalogTable getCatalogTable( Context context, SqlIdentifier tableName ) { - CatalogTable catalogTable; + protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName ) { + LogicalTable catalogTable; try { long schemaId; String tableOldName; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java index 71b17d809b..91adc8e436 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java @@ -20,7 +20,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -49,7 +49,7 @@ public class SqlDropMaterializedView extends SqlDropObject { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - final CatalogTable catalogTable; + final LogicalTable catalogTable; try { catalogTable = getCatalogTable( context, name ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java index 2143c01165..450df2bbef 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java @@ -20,7 +20,7 @@ import static org.polypheny.db.util.Static.RESOURCE; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -52,7 +52,7 @@ public class SqlDropTable extends SqlDropObject { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - final CatalogTable table; + final LogicalTable table; try { table = getCatalogTable( context, name ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java index d8d5075463..e3567978e3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java @@ -21,7 +21,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -53,7 +53,7 @@ public class SqlDropView extends SqlDropObject { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - final CatalogTable catalogTable; + final LogicalTable catalogTable; try { catalogTable = getCatalogTable( context, name ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlTruncate.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlTruncate.java index 4c2838ec85..08818d927a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlTruncate.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlTruncate.java @@ -20,7 +20,7 @@ import com.google.common.collect.ImmutableList; import java.util.List; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -77,7 +77,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable table = getCatalogTable( context, name ); + LogicalTable table = getCatalogTable( context, name ); DdlManager.getInstance().truncate( table, statement ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java index 3a2ef66786..ed06b2c822 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java @@ -23,7 +23,7 @@ import java.util.stream.Collectors; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -119,7 +119,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java index 1a8b6fd6be..66a2b77207 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -75,7 +75,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java index 68cbcd8f5e..f0e91cf3d0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java @@ -18,7 +18,7 @@ import java.util.List; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -66,7 +66,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, name ); + LogicalTable catalogTable = getCatalogTable( context, name ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java index 4f7eb5032b..2a2ab1e111 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -78,7 +78,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, oldName ); + LogicalTable catalogTable = getCatalogTable( context, oldName ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java index a6eeee3170..c2f364ce4e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -80,7 +80,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, materializedView ); + LogicalTable catalogTable = getCatalogTable( context, materializedView ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java index 40667b8d99..8a4e8290e0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java @@ -23,7 +23,7 @@ import java.util.Objects; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -111,7 +111,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.SOURCE ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a source table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index ff2f66be3e..d0ab735a59 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -23,7 +23,7 @@ import java.util.Objects; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; @@ -120,7 +120,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java index bd3dae5dbb..5632d57454 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java @@ -24,7 +24,7 @@ import java.util.stream.Collectors; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.ForeignKeyOption; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownForeignKeyOptionException; @@ -109,8 +109,8 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); - CatalogTable refTable = getCatalogTable( context, referencesTable ); + LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable refTable = getCatalogTable( context, referencesTable ); // Make sure that this is a table of type TABLE (and not SOURCE) if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java index ae928f943f..e1aa64471f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java @@ -24,7 +24,7 @@ import java.util.stream.Collectors; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -122,7 +122,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY && catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not possible to use ALTER TABLE ADD INDEX because " + catalogTable.name + " is not a table or materialized view." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java index 22a52e09c9..4ef1a32ce8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java @@ -25,7 +25,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -146,7 +146,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index dc9d2f4437..f8017eea06 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -27,7 +27,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.PlacementAlreadyExistsException; import org.polypheny.db.languages.ParserPos; @@ -117,7 +117,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); DataStore storeInstance = getDataStoreInstance( storeName ); if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java index dc0a4d2164..da4fd7479e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java @@ -23,7 +23,7 @@ import java.util.Objects; import java.util.stream.Collectors; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -82,7 +82,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java index 2a0b3dbab1..b684294035 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java @@ -23,7 +23,7 @@ import java.util.Objects; import java.util.stream.Collectors; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -89,7 +89,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java index c3e0b8237a..ead565fdc6 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.languages.ParserPos; @@ -79,7 +79,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY && catalogTable.entityType != EntityType.SOURCE ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java index 27562b4e4c..b9b8a68bca 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -79,7 +79,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java index 44e5dd8a71..c48cd21e05 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -80,7 +80,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java index 20d599ca17..9bda8a231e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -79,7 +79,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY && catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not possible to use ALTER TABLE DROP INDEX because " + catalogTable.name + " is not a table or materialized view." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java index b513ee97cf..c40dc04fba 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java @@ -23,7 +23,7 @@ import java.util.Objects; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.LastPlacementException; import org.polypheny.db.ddl.exception.PlacementNotExistsException; @@ -83,7 +83,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); DataStore storeInstance = getDataStoreInstance( storeName ); if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java index 818ed7d507..c25dab2c6b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -77,7 +77,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java index f07a193629..9c3cfc4549 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java @@ -22,7 +22,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; @@ -82,7 +82,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java index 02b0c6bff7..d0b9d2373d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java @@ -23,7 +23,7 @@ import lombok.NonNull; import org.polypheny.db.catalog.Catalog.Collation; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownCollationException; import org.polypheny.db.ddl.DdlManager; @@ -145,7 +145,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, tableName ); + LogicalTable catalogTable = getCatalogTable( context, tableName ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java index 04043007a7..3c1045c801 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java @@ -29,7 +29,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.LastPlacementException; import org.polypheny.db.languages.ParserPos; @@ -99,7 +99,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { Catalog catalog = Catalog.getInstance(); - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java index 2124ed3f8c..e88ab7adcb 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java @@ -25,7 +25,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.IndexPreventsRemovalException; import org.polypheny.db.ddl.exception.LastPlacementException; @@ -116,7 +116,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java index a7279fd033..a808b0c018 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java @@ -23,7 +23,7 @@ import java.util.Objects; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -94,7 +94,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); DataStore storeInstance = getDataStoreInstance( storeName ); if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java index 36ccca492a..46043f402c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java @@ -23,7 +23,7 @@ import java.util.Objects; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -92,7 +92,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); DataStore storeInstance = getDataStoreInstance( storeName ); if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java index d68de89896..0e68086aff 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -79,7 +79,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java index bbdf67894d..4a7bcf6832 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -78,7 +78,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable table = getCatalogTable( context, oldName ); + LogicalTable table = getCatalogTable( context, oldName ); if ( newName.names.size() != 1 ) { throw new RuntimeException( "No FQDN allowed here: " + newName.toString() ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java index 742dc0cb64..5a715c6c9c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -83,7 +83,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getCatalogTable( context, table ); try { DdlManager.getInstance().renameColumn( catalogTable, columnOldName.getSimple(), columnNewName.getSimple(), statement ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java index 0ae63097e6..a6b98cc8f9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -80,7 +80,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, oldName ); + LogicalTable catalogTable = getCatalogTable( context, oldName ); if ( catalogTable.entityType != EntityType.VIEW ) { throw new RuntimeException( "Not Possible to use ALTER VIEW because " + catalogTable.name + " is not a View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java index 2f8068d67c..b253a712a9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -79,7 +79,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - CatalogTable catalogTable = getCatalogTable( context, view ); + LogicalTable catalogTable = getCatalogTable( context, view ); if ( catalogTable.entityType != EntityType.VIEW ) { throw new RuntimeException( "Not Possible to use ALTER VIEW because " + catalogTable.name + " is not a View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index 2f3a41f724..0780b9784a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -24,7 +24,7 @@ import java.util.Map; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.sql.language.SqlCall; @@ -93,7 +93,7 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path final List resolves = ((ResolvedImpl) resolved).resolves; // Look in the default schema, then default catalog, then root schema. - CatalogTable table = validator.catalogReader.getRootSchema().getTable( names ); + LogicalTable table = validator.catalogReader.getRootSchema().getTable( names ); if ( table != null ) { resolves.add( new Resolve( validator.catalogReader.getRootSchema().getTable( names ) ) ); } @@ -104,7 +104,7 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path private void resolve_( final PolyphenyDbSchema rootSchema, List names, List schemaNames, NameMatcher nameMatcher, Path path, Resolved resolved ) { final List concat = ImmutableList.builder().addAll( schemaNames ).addAll( names ).build(); - CatalogTable table = rootSchema.getTable( concat ); + LogicalTable table = rootSchema.getTable( concat ); if ( table != null ) { resolved.found( table ); return; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 8e4013a7f4..de77010509 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.entity.CatalogCollection; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; @@ -619,7 +619,7 @@ public static boolean isTableRelational( SqlValidatorImpl validator ) { return false; } SqlIdentifier id = ((SqlIdentifier) validator.getTableScope().getNode()); - CatalogGraphDatabase graph = validator.getCatalogReader().getRootSchema().getGraph( id.names ); + LogicalGraph graph = validator.getCatalogReader().getRootSchema().getGraph( id.names ); if ( graph != null ) { return false; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 1f5f6e6dd0..9db06aa288 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -94,6 +94,7 @@ import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Uncollect; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalCorrelate; @@ -102,7 +103,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; @@ -123,7 +124,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; @@ -2161,7 +2162,7 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { if ( operator instanceof SqlUserDefinedTableMacro ) { final SqlUserDefinedTableMacro udf = (SqlUserDefinedTableMacro) operator; final TranslatableEntity table = udf.getTable( typeFactory, callBinding.sqlOperands() ); - final CatalogTable catalogTable = Catalog.getInstance().getTable( table.getId() ); + final LogicalTable catalogTable = Catalog.getInstance().getTable( table.getId() ); final AlgDataType rowType = table.getRowType( typeFactory ); AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, null ); AlgNode converted = toAlg( algOptEntity ); @@ -2886,16 +2887,15 @@ private AlgNode createModify( AlgOptEntity targetTable, AlgNode source ) { targetTable, catalogReader, source, - LogicalModify.Operation.INSERT, + Modify.Operation.INSERT, null, null, false ); } - return LogicalModify.create( + return LogicalRelModify.create( targetTable, - catalogReader, source, - LogicalModify.Operation.INSERT, + Modify.Operation.INSERT, null, null, false ); @@ -3159,11 +3159,10 @@ protected void collectInsertTargets( SqlInsert call, final RexNode sourceRef, fi private AlgNode convertDelete( SqlDelete call ) { AlgOptEntity targetTable = getTargetTable( call ); AlgNode sourceRel = convertSelect( call.getSourceSelect(), false ); - return LogicalModify.create( + return LogicalRelModify.create( targetTable, - catalogReader, sourceRel, - LogicalModify.Operation.DELETE, + Modify.Operation.DELETE, null, null, false ); @@ -3194,11 +3193,10 @@ private AlgNode convertUpdate( SqlUpdate call ) { AlgNode sourceRel = convertSelect( call.getSourceSelect(), false ); - return LogicalModify.create( + return LogicalRelModify.create( targetTable, - catalogReader, sourceRel, - LogicalModify.Operation.UPDATE, + Modify.Operation.UPDATE, targetColumnNameList, rexNodeSourceExpressionListBuilder.build(), false ); @@ -3271,11 +3269,10 @@ private AlgNode convertMerge( SqlMerge call ) { algBuilder.push( join ).project( projects ); - return LogicalModify.create( + return LogicalRelModify.create( targetTable, - catalogReader, algBuilder.build(), - LogicalModify.Operation.MERGE, + Modify.Operation.MERGE, targetColumnNameList, null, false ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java index f293ea27f3..3f76942710 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java @@ -27,7 +27,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.dialect.PolyphenyDbSqlDialect; import org.polypheny.db.sql.language.pretty.SqlPrettyWriter; @@ -39,7 +39,7 @@ public class SchemaToJsonMapper { private final static Gson gson = new Gson(); - public static String exportTableDefinitionAsJson( @NonNull CatalogTable catalogTable, boolean exportPrimaryKey, boolean exportDefaultValues ) { + public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogTable, boolean exportPrimaryKey, boolean exportDefaultValues ) { List columns = new LinkedList<>(); for ( CatalogColumn catalogColumn : Catalog.getInstance().getColumns( catalogTable.id ) ) { String defaultValue = null; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java index 1a74ab3a7e..c6fd42c3c3 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java @@ -43,9 +43,9 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.ExplainFormat; import org.polypheny.db.algebra.constant.ExplainLevel; -import org.polypheny.db.algebra.core.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.logical.relational.LogicalFilter; -import org.polypheny.db.algebra.logical.relational.LogicalModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -411,8 +411,8 @@ public Enumerable scan( DataContext root, List filters, int[] @Override - public Modify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode child, Modify.Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { - return LogicalModify.create( table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); + public RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, Prepare.CatalogReader catalogReader, AlgNode child, RelModify.Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + return LogicalRelModify.create( table, child, operation, updateColumnList, sourceExpressionList, flattened ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java index 63ce37ccf6..f52f0869e6 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java @@ -22,7 +22,7 @@ import org.junit.Before; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; -import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.externalize.AlgJsonReader; import org.polypheny.db.algebra.externalize.AlgJsonWriter; import org.polypheny.db.plan.AlgOptSchema; @@ -64,7 +64,7 @@ public static void foo( AlgNode alg ) { final AlgOptSchema[] schemas = { null }; alg.accept( new AlgShuttleImpl() { @Override - public AlgNode visit( Scan scan ) { + public AlgNode visit( RelScan scan ) { schemas[0] = scan.getEntity().getRelOptSchema(); return super.visit( scan ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java index 51a1395e2a..8ab408ef7b 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java @@ -33,7 +33,7 @@ import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.sql.SqlLanguageDependent; @@ -51,7 +51,7 @@ public class NamespaceToJsonMapperTest extends SqlLanguageDependent { @Ignore @Test public void exportTest() { - CatalogTable catalogTable = new CatalogTable( + LogicalTable catalogTable = new LogicalTable( 4, "stores", ImmutableList.of(), diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java index 4abcf666d7..0c784989d1 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java @@ -67,7 +67,7 @@ import org.polypheny.db.algebra.rules.SortRemoveRule; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.PolyphenyDbServerStatement; @@ -159,7 +159,7 @@ public Statistic getStatistic() { final AlgOptAbstractEntity t1 = new AlgOptAbstractEntity( algOptSchema, "t1", entity.getRowType( typeFactory ) ) { @Override - public CatalogTable getCatalogTable() { + public LogicalTable getCatalogTable() { return null; } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index dd43324659..27e4859cea 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -116,7 +116,7 @@ import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; @@ -300,7 +300,7 @@ Result getTable( final UIRequest request ) { } // determine if it is a view or a table - CatalogTable catalogTable; + LogicalTable catalogTable; try { catalogTable = catalog.getTable( this.databaseId, t[0], t[1] ); result.setNamespaceType( catalogTable.getNamespaceType() ); @@ -384,8 +384,8 @@ void getSchemaTree( final Context ctx ) { ArrayList tableTree = new ArrayList<>(); ArrayList viewTree = new ArrayList<>(); ArrayList collectionTree = new ArrayList<>(); - List tables = catalog.getTables( schema.id, null ); - for ( CatalogTable table : tables ) { + List tables = catalog.getTables( schema.id, null ); + for ( LogicalTable table : tables ) { String icon = "fa fa-table"; if ( table.entityType == EntityType.SOURCE ) { icon = "fa fa-plug"; @@ -471,9 +471,9 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getTables( databaseId, new Catalog.Pattern( requestedSchema ), null ); + List tables = catalog.getTables( databaseId, new Catalog.Pattern( requestedSchema ), null ); ArrayList result = new ArrayList<>(); - for ( CatalogTable t : tables ) { + for ( LogicalTable t : tables ) { result.add( new DbTable( t.name, t.getNamespaceName(), t.modifiable, t.entityType ) ); } ctx.json( result ); @@ -947,7 +947,7 @@ public static String uiValueToSql( final String value, final PolyType type, fina private String computeWherePK( final String tableName, final String columnName, final Map filter ) { StringJoiner joiner = new StringJoiner( " AND ", "", "" ); Map catalogColumns = getCatalogColumns( tableName, columnName ); - CatalogTable catalogTable; + LogicalTable catalogTable; try { catalogTable = catalog.getTable( databaseId, tableName, columnName ); CatalogPrimaryKey pk = catalog.getPrimaryKey( catalogTable.primaryKey ); @@ -1122,7 +1122,7 @@ void getColumns( final Context ctx ) { ArrayList cols = new ArrayList<>(); try { - CatalogTable catalogTable = catalog.getTable( databaseId, t[0], t[1] ); + LogicalTable catalogTable = catalog.getTable( databaseId, t[0], t[1] ); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); @@ -1167,7 +1167,7 @@ void getColumns( final Context ctx ) { void getDataSourceColumns( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - CatalogTable catalogTable = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); + LogicalTable catalogTable = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { ImmutableMap> underlyingTable = ((CatalogView) catalogTable).getUnderlyingTables(); @@ -1225,7 +1225,7 @@ void getDataSourceColumns( final Context ctx ) throws UnknownDatabaseException, void getAvailableSourceColumns( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - CatalogTable table = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); + LogicalTable table = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); ImmutableMap> placements = catalog.getColumnPlacementsByAdapter( table.id ); Set adapterIds = placements.keySet(); if ( adapterIds.size() > 1 ) { @@ -1268,7 +1268,7 @@ void getAvailableSourceColumns( final Context ctx ) throws UnknownDatabaseExcept void getMaterializedInfo( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); - CatalogTable catalogTable = catalog.getTable( databaseId, request.schema, request.table ); + LogicalTable catalogTable = catalog.getTable( databaseId, request.schema, request.table ); if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW ) { CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalogTable; @@ -1592,7 +1592,7 @@ void getConstraints( final Context ctx ) { Map> temp = new HashMap<>(); try { - CatalogTable catalogTable = catalog.getTable( databaseId, t[0], t[1] ); + LogicalTable catalogTable = catalog.getTable( databaseId, t[0], t[1] ); // get primary key if ( catalogTable.primaryKey != null ) { @@ -1750,7 +1750,7 @@ void getIndexes( final Context ctx ) { EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); Result result; try { - CatalogTable catalogTable = catalog.getTable( databaseId, request.schema, request.table ); + LogicalTable catalogTable = catalog.getTable( databaseId, request.schema, request.table ); List catalogIndexes = catalog.getIndexes( catalogTable.id, false ); DbColumn[] header = { @@ -1878,7 +1878,7 @@ void getUnderlyingTable( final Context ctx ) throws UnknownDatabaseException, Un UIRequest request = ctx.bodyAsClass( UIRequest.class ); - CatalogTable catalogTable = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); + LogicalTable catalogTable = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { ImmutableMap> underlyingTableOriginal = ((CatalogView) catalogTable).getUnderlyingTables(); @@ -1910,7 +1910,7 @@ private Placement getPlacements( final Index index ) { String schemaName = index.getSchema(); String tableName = index.getTable(); try { - CatalogTable table = catalog.getTable( databaseId, schemaName, tableName ); + LogicalTable table = catalog.getTable( databaseId, schemaName, tableName ); Placement p = new Placement( table.partitionProperty.isPartitioned, catalog.getPartitionGroupNames( table.id ), table.entityType ); if ( table.entityType == EntityType.VIEW ) { @@ -2502,8 +2502,8 @@ void getUml( final Context ctx ) { ArrayList fKeys = new ArrayList<>(); ArrayList tables = new ArrayList<>(); - List catalogEntities = catalog.getTables( databaseId, new Catalog.Pattern( request.schema ), null ); - for ( CatalogTable catalogTable : catalogEntities ) { + List catalogEntities = catalog.getTables( databaseId, new Catalog.Pattern( request.schema ), null ); + for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { // get foreign keys List foreignKeys = catalog.getForeignKeys( catalogTable.id ); @@ -3178,7 +3178,7 @@ public static Result executeSqlSelect( final Statement statement, final UIReques } EntityType entityType = null; - CatalogTable catalogTable = null; + LogicalTable catalogTable = null; if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); try { @@ -3589,7 +3589,7 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Cru private Map getCatalogColumns( String schemaName, String tableName ) { Map dataTypes = new HashMap<>(); try { - CatalogTable table = catalog.getTable( this.databaseId, schemaName, tableName ); + LogicalTable table = catalog.getTable( this.databaseId, schemaName, tableName ); List catalogColumns = catalog.getColumns( table.id ); for ( CatalogColumn catalogColumn : catalogColumns ) { dataTypes.put( catalogColumn.name, catalogColumn ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index e8268e0775..4a675d3f55 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -40,10 +40,10 @@ import org.polypheny.db.catalog.entity.CatalogCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogGraphDatabase; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownCollectionException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -190,7 +190,7 @@ public static Result getResult( QueryLanguage language, Statement statement, Que boolean hasMoreRows = implementation.hasMoreRows(); - CatalogTable catalogTable = null; + LogicalTable catalogTable = null; if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); try { @@ -324,12 +324,12 @@ public void getGraphPlacements( final Context ctx ) { private Placement getPlacements( final Index index ) { Catalog catalog = Catalog.getInstance(); String graphName = index.getSchema(); - List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ); + List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ); if ( graphs.size() != 1 ) { log.error( "The requested graph does not exist." ); return new Placement( new RuntimeException( "The requested graph does not exist." ) ); } - CatalogGraphDatabase graph = graphs.get( 0 ); + LogicalGraph graph = graphs.get( 0 ); EntityType type = EntityType.ENTITY; Placement p = new Placement( false, List.of(), EntityType.ENTITY ); if ( type == EntityType.VIEW ) { From 8fd352ca9f78220017b28dac607ab8e10a99dae4 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 26 Feb 2023 00:17:11 +0100 Subject: [PATCH 023/436] temporary fixing catalog calls, boilerplate for snapshot functions --- .../org/polypheny/db/PolyImplementation.java | 2 +- .../org/polypheny/db/adapter/Adapter.java | 6 +- .../org/polypheny/db/adapter/DataStore.java | 12 +- .../db/adapter/enumerable/EnumerableScan.java | 59 +-- .../enumerable/EnumerableScanRule.java | 4 +- .../EnumerableTableModifyToStreamerRule.java | 4 +- .../adapter/java/AbstractQueryableEntity.java | 4 +- .../db/adapter/java/ReflectiveSchema.java | 5 +- .../db/algebra/AlgHomogeneousShuttle.java | 2 +- .../org/polypheny/db/algebra/AlgNode.java | 2 +- .../org/polypheny/db/algebra/AlgShuttle.java | 2 +- .../polypheny/db/algebra/AlgShuttleImpl.java | 2 +- .../algebra/UnsupportedFromInsertShuttle.java | 9 +- .../db/algebra/core/AlgFactories.java | 2 +- .../algebra/core/document/DocumentScan.java | 2 +- .../algebra/core/document/DocumentValues.java | 2 +- .../common/LogicalConstraintEnforcer.java | 11 +- .../logical/common/LogicalStreamer.java | 10 +- .../document/LogicalDocumentModify.java | 14 +- .../logical/document/LogicalDocumentScan.java | 7 +- .../document/LogicalDocumentValues.java | 2 +- .../db/algebra/logical/lpg/LogicalGraph.java | 3 +- .../algebra/logical/lpg/LogicalLpgModify.java | 9 +- .../algebra/logical/lpg/LogicalLpgScan.java | 4 +- .../algebra/logical/lpg/LogicalLpgValues.java | 2 +- .../logical/relational/LogicalRelScan.java | 1 - .../db/algebra/metadata/AlgColumnOrigin.java | 13 +- .../db/algebra/metadata/AlgMdCollation.java | 10 +- .../algebra/metadata/AlgMdColumnOrigins.java | 9 +- .../metadata/AlgMdColumnUniqueness.java | 2 +- .../algebra/metadata/AlgMdDistribution.java | 5 +- .../metadata/AlgMdTableReferences.java | 2 +- .../db/algebra/metadata/AlgMetadataQuery.java | 3 +- .../db/algebra/mutable/MutableAlgs.java | 1 - .../db/algebra/mutable/MutableScan.java | 8 +- .../algebra/mutable/MutableTableModify.java | 20 +- .../db/algebra/rules/FilterScanRule.java | 7 +- .../algebra/rules/LoptSemiJoinOptimizer.java | 14 +- .../db/algebra/rules/ProjectScanRule.java | 10 +- .../polypheny/db/algebra/rules/ScanRule.java | 5 +- .../db/algebra/stream/StreamRules.java | 19 +- .../org/polypheny/db/catalog/Catalog.java | 494 +----------------- .../db/catalog/entity/CatalogAdapter.java | 2 +- .../db/catalog/entity/CatalogColumn.java | 4 +- .../entity/CatalogColumnPlacement.java | 2 +- .../db/catalog/entity/CatalogConstraint.java | 2 +- .../catalog/entity/CatalogDataPlacement.java | 4 +- .../db/catalog/entity/CatalogEntity.java | 22 +- .../db/catalog/entity/CatalogForeignKey.java | 2 +- .../db/catalog/entity/CatalogIndex.java | 2 +- .../entity/CatalogMaterializedView.java | 2 +- .../db/catalog/entity/CatalogNamespace.java | 2 +- .../entity/CatalogPartitionPlacement.java | 4 +- .../db/catalog/entity/CatalogSchema.java | 4 +- .../db/catalog/entity/CatalogView.java | 2 +- ...Collection.java => LogicalCollection.java} | 21 +- .../allocation/AllocationCollection.java | 36 ++ .../entity/allocation/AllocationGraph.java | 2 + .../catalog/entity/logical/LogicalGraph.java | 6 +- .../catalog/entity/logical/LogicalTable.java | 19 +- .../db/catalog/entity/physical/Physical.java | 6 +- .../entity/physical/PhysicalCollection.java | 36 ++ .../entity/physical/PhysicalGraph.java | 4 +- .../entity/physical/PhysicalTable.java | 4 +- .../db/catalog/logistic/Collation.java | 64 +++ .../db/catalog/logistic/ConstraintType.java | 56 ++ .../catalog/logistic/DataPlacementRole.java | 58 ++ .../db/catalog/logistic/EntityType.java | 76 +++ .../db/catalog/logistic/ForeignKeyOption.java | 68 +++ .../db/catalog/logistic/IndexType.java | 58 ++ .../catalog/{ => logistic}/NameGenerator.java | 4 +- .../db/catalog/logistic/NamespaceType.java | 84 +++ .../db/catalog/logistic/PartitionType.java | 62 +++ .../db/catalog/logistic/Pattern.java | 51 ++ .../db/catalog/logistic/PlacementType.java | 61 +++ .../db/catalog/refactor/Expressible.java | 25 + .../db/catalog/refactor/FilterableEntity.java | 21 + .../refactor/ProjectableFilterableEntity.java | 21 + .../db/catalog/refactor/QueryableEntity.java | 3 + .../db/catalog/refactor/ScannableEntity.java | 21 + .../java/org/polypheny/db/ddl/DdlManager.java | 16 +- .../polypheny/db/interpreter/ScanNode.java | 28 +- .../db/languages/LanguageManager.java | 2 +- .../polypheny/db/languages/QueryLanguage.java | 2 +- .../db/languages/QueryParameters.java | 2 +- .../db/partition/PartitionManagerFactory.java | 4 +- .../properties/PartitionProperty.java | 2 +- .../TemperaturePartitionProperty.java | 2 +- .../db/plan/AlgOptAbstractEntity.java | 2 +- .../org/polypheny/db/plan/AlgOptCluster.java | 24 +- .../org/polypheny/db/plan/AlgOptUtil.java | 7 +- .../db/prepare/AlgOptEntityImpl.java | 8 +- .../db/prepare/LixToAlgTranslator.java | 6 +- .../org/polypheny/db/prepare/PlannerImpl.java | 3 +- .../db/prepare/PolyphenyDbCatalogReader.java | 4 +- .../db/prepare/PolyphenyDbPrepareImpl.java | 14 +- .../db/prepare/QueryableAlgBuilder.java | 8 +- .../db/processing/DeepCopyShuttle.java | 2 +- .../processing/ExtendedQueryParameters.java | 2 +- .../processing/LogicalAlgAnalyzeShuttle.java | 27 +- .../polypheny/db/rex/RexTableInputRef.java | 15 +- .../java/org/polypheny/db/schema/Entity.java | 2 +- .../db/schema/LogicalCollection.java | 2 +- .../polypheny/db/schema/LogicalEntity.java | 55 +- .../polypheny/db/schema/LogicalRelView.java | 2 +- .../org/polypheny/db/schema/ModelTrait.java | 2 +- .../db/schema/PolySchemaBuilder.java | 10 +- .../db/schema/PolyphenyDbSchema.java | 74 ++- .../polypheny/db/schema/QueryableEntity.java | 4 +- .../org/polypheny/db/schema/SchemaPlus.java | 2 +- .../java/org/polypheny/db/schema/Schemas.java | 2 +- .../org/polypheny/db/tools/AlgBuilder.java | 2 +- .../polypheny/db/tools/RoutedAlgBuilder.java | 7 +- .../db/view/MaterializedViewManager.java | 4 +- .../org/polypheny/db/view/ViewManager.java | 17 +- .../org/polypheny/db/catalog/MockCatalog.java | 15 +- .../db/catalog/MockCatalogReader.java | 5 +- .../java/org/polypheny/db/PolyphenyDb.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 46 +- .../db/partition/FrequencyMapImpl.java | 6 +- .../PartitionManagerFactoryImpl.java | 4 +- .../db/processing/AbstractQueryProcessor.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 18 +- .../db/processing/DataMigratorImpl.java | 8 +- .../db/routing/routers/AbstractDqlRouter.java | 4 +- .../db/routing/routers/BaseRouter.java | 56 +- .../db/routing/routers/DmlRouterImpl.java | 120 ++--- .../db/transaction/EntityAccessMap.java | 2 +- .../db/view/MaterializedViewManagerImpl.java | 4 +- .../java/org/polypheny/db/cypher/DdlTest.java | 2 +- .../db/misc/HorizontalPartitioningTest.java | 4 +- .../db/misc/VerticalPartitioningTest.java | 2 +- .../java/org/polypheny/db/mql/DdlTest.java | 8 +- .../db/statistics/StatisticsTest.java | 2 +- .../statistics/DashboardInformation.java | 2 +- .../statistics/StatisticQueryProcessor.java | 4 +- .../monitoring/statistics/StatisticTable.java | 4 +- .../statistics/StatisticsManagerImpl.java | 4 +- .../org/polypheny/db/avatica/DbmsMeta.java | 8 +- .../db/avatica/PolyphenyDbSignature.java | 2 +- .../adapter/cottontail/CottontailEntity.java | 3 +- .../polypheny/db/cql/CqlLanguagePlugin.java | 2 +- .../db/adapter/csv/CsvTranslatableTable.java | 6 +- .../db/cypher/CypherLanguagePlugin.java | 2 +- .../db/cypher/CypherProcessorImpl.java | 2 +- .../admin/CypherAlterDatabaseAlias.java | 2 +- .../admin/CypherCreateDatabaseAlias.java | 2 +- .../db/cypher/admin/CypherDropAlias.java | 2 +- .../db/cypher/admin/CypherDropDatabase.java | 2 +- .../db/cypher/ddl/CypherAddPlacement.java | 2 +- .../db/cypher/ddl/CypherDropPlacement.java | 2 +- .../elasticsearch/ElasticsearchEntity.java | 3 +- .../adapter/file/FileTranslatableEntity.java | 3 +- .../db/adapter/geode/algebra/GeodeEntity.java | 3 +- .../db/hsqldb/stores/HsqldbStore.java | 2 +- .../polypheny/db/adapter/html/HtmlEntity.java | 4 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 3 +- .../jdbc/rel2sql/AlgToSqlConverterTest.java | 2 +- .../db/adapter/jdbc/rel2sql/PlannerTest.java | 2 +- .../rel2sql/RelToSqlConverterStructsTest.java | 2 +- .../org/polypheny/db/catalog/CatalogImpl.java | 38 +- .../org/polypheny/db/test/CatalogTest.java | 12 +- .../db/adapter/mongodb/MongoAlg.java | 2 +- .../db/adapter/mongodb/MongoEntity.java | 9 +- .../db/adapter/mongodb/MongoPlugin.java | 10 +- .../db/adapter/mongodb/MongoRules.java | 6 +- .../db/adapter/mongodb/MongoSchema.java | 4 +- .../db/languages/MongoLanguagePlugin.java | 2 +- .../db/languages/MqlProcessorImpl.java | 2 +- .../db/languages/mql/MqlAddPlacement.java | 6 +- .../db/languages/mql/MqlCreateCollection.java | 2 +- .../db/languages/mql/MqlCreateView.java | 2 +- .../db/languages/mql/MqlDeletePlacement.java | 6 +- .../polypheny/db/languages/mql/MqlDrop.java | 6 +- .../db/languages/mql/MqlQueryParameters.java | 2 +- .../db/languages/mql/MqlUseDatabase.java | 2 +- .../languages/mql2alg/MqlToAlgConverter.java | 2 +- .../polypheny/db/mql/mql2alg/Mql2AlgTest.java | 4 +- .../db/mql/mql2alg/MqlMockCatalog.java | 1 + .../db/adapter/neo4j/Neo4jPlugin.java | 2 +- .../org/polypheny/db/PigLanguagePlugin.java | 2 +- .../org/polypheny/db/tools/PigAlgBuilder.java | 2 +- .../polypheny/db/catalog/CatalogPlugin.java | 2 +- .../org/polypheny/db/catalog/NCatalog.java | 3 +- .../org/polypheny/db/catalog/PolyCatalog.java | 2 +- .../catalog/allocation/AllocationCatalog.java | 2 +- .../logical/document/DocumentCatalog.java | 2 +- .../catalog/logical/graph/GraphCatalog.java | 2 +- .../logical/relational/RelationalCatalog.java | 2 +- .../logical/LogicalDocumentSnapshot.java | 2 +- .../snapshot/logical/LogicalFullSnapshot.java | 2 +- .../logical/LogicalGraphSnapshot.java | 2 +- .../logical/LogicalRelationalSnapshot.java | 2 +- .../snapshot/logical/LogicalSnapshot.java | 2 +- .../java/org/polypheny/db/restapi/Rest.java | 6 +- .../polypheny/db/sql/SqlLanguagePlugin.java | 2 +- .../polypheny/db/sql/SqlProcessorImpl.java | 4 +- .../polypheny/db/sql/language/SqlInsert.java | 2 +- .../language/ddl/SqlColumnDeclaration.java | 2 +- .../ddl/SqlCreateMaterializedView.java | 2 +- .../db/sql/language/ddl/SqlCreateSchema.java | 2 +- .../db/sql/language/ddl/SqlCreateTable.java | 2 +- .../db/sql/language/ddl/SqlCreateView.java | 2 +- .../db/sql/language/ddl/SqlDdlNodes.java | 2 +- .../language/ddl/SqlDropMaterializedView.java | 2 +- .../db/sql/language/ddl/SqlDropView.java | 2 +- .../db/sql/language/ddl/SqlKeyConstraint.java | 2 +- .../SqlAlterMaterializedViewAddIndex.java | 2 +- .../SqlAlterMaterializedViewDropIndex.java | 2 +- ...lAlterMaterializedViewFreshnessManual.java | 2 +- .../SqlAlterMaterializedViewRename.java | 2 +- .../SqlAlterMaterializedViewRenameColumn.java | 2 +- .../SqlAlterSourceTableAddColumn.java | 2 +- .../altertable/SqlAlterTableAddColumn.java | 2 +- .../SqlAlterTableAddForeignKey.java | 4 +- .../ddl/altertable/SqlAlterTableAddIndex.java | 2 +- .../SqlAlterTableAddPartitions.java | 6 +- .../altertable/SqlAlterTableAddPlacement.java | 2 +- .../SqlAlterTableAddPrimaryKey.java | 2 +- .../SqlAlterTableAddUniqueConstraint.java | 2 +- .../altertable/SqlAlterTableDropColumn.java | 2 +- .../SqlAlterTableDropConstraint.java | 2 +- .../SqlAlterTableDropForeignKey.java | 2 +- .../altertable/SqlAlterTableDropIndex.java | 2 +- .../SqlAlterTableDropPlacement.java | 2 +- .../SqlAlterTableDropPrimaryKey.java | 2 +- .../SqlAlterTableMergePartitions.java | 6 +- .../altertable/SqlAlterTableModifyColumn.java | 4 +- .../SqlAlterTableModifyPartitions.java | 2 +- .../SqlAlterTableModifyPlacement.java | 2 +- ...SqlAlterTableModifyPlacementAddColumn.java | 2 +- ...qlAlterTableModifyPlacementDropColumn.java | 2 +- .../ddl/altertable/SqlAlterTableOwner.java | 2 +- .../ddl/alterview/SqlAlterViewRename.java | 2 +- .../alterview/SqlAlterViewRenameColumn.java | 2 +- .../language/validate/SqlValidatorImpl.java | 2 +- .../language/validate/SqlValidatorUtil.java | 4 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 4 +- .../org/polypheny/db/sql/AlgWriterTest.java | 2 +- .../org/polypheny/db/sql/FrameworksTest.java | 6 +- .../org/polypheny/db/sql/InterpreterTest.java | 2 +- .../org/polypheny/db/sql/PlannerTest.java | 2 +- .../polypheny/db/sql/SortRemoveRuleTest.java | 2 +- .../db/sql/language/SqlToAlgTestBase.java | 2 +- .../SqlToRelConverterExtendedTest.java | 2 +- .../validate/LexCaseSensitiveTest.java | 2 +- .../db/sql/map/NamespaceToJsonMapperTest.java | 4 +- .../db/sql/util/PlannerImplMock.java | 2 +- .../db/sql/volcano/PlannerTests.java | 2 +- .../db/sql/volcano/TraitPropagationTest.java | 2 +- .../java/org/polypheny/db/webui/Crud.java | 26 +- .../polypheny/db/webui/crud/LanguageCrud.java | 12 +- .../polypheny/db/webui/models/DbTable.java | 2 +- .../polypheny/db/webui/models/Placement.java | 6 +- .../org/polypheny/db/webui/models/Result.java | 2 +- .../org/polypheny/db/webui/models/Schema.java | 2 +- .../db/webui/models/SidebarElement.java | 2 +- .../models/requests/PartitioningRequest.java | 2 +- .../models/requests/SchemaTreeRequest.java | 2 +- 259 files changed, 1598 insertions(+), 1222 deletions(-) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogCollection.java => LogicalCollection.java} (77%) create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/Collation.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/ConstraintType.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/DataPlacementRole.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/EntityType.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/ForeignKeyOption.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/IndexType.java rename core/src/main/java/org/polypheny/db/catalog/{ => logistic}/NameGenerator.java (94%) create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/NamespaceType.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/PartitionType.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/Pattern.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/logistic/PlacementType.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/FilterableEntity.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/ProjectableFilterableEntity.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/refactor/ScannableEntity.java diff --git a/core/src/main/java/org/polypheny/db/PolyImplementation.java b/core/src/main/java/org/polypheny/db/PolyImplementation.java index a7ff5fc30b..b56b107ba6 100644 --- a/core/src/main/java/org/polypheny/db/PolyImplementation.java +++ b/core/src/main/java/org/polypheny/db/PolyImplementation.java @@ -42,7 +42,7 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.interpreter.BindableConvention; import org.polypheny.db.monitoring.events.StatementEvent; import org.polypheny.db.plan.AlgOptUtil; diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 5759dd88fa..191d65561e 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -47,8 +47,8 @@ import lombok.experimental.Accessors; import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -333,7 +333,7 @@ public void createGraphNamespace( PhysicalGraph graph ) { } - public Entity createDocumentSchema( CatalogCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { + public Entity createDocumentSchema( LogicalCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { throw new UnsupportedOperationException( "It is not supported to create a document with this adapter." ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index 712d91c354..85487379e6 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -28,9 +28,9 @@ import lombok.extern.slf4j.Slf4j; import org.pf4j.ExtensionPoint; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; @@ -156,7 +156,7 @@ private void dropGraphSubstitution( Context context, CatalogGraphPlacement graph * It comes with a substitution methods called by default and should be overwritten if the inheriting {@link DataStore} * support the document data model natively. */ - public void createCollection( Context prepareContext, CatalogCollection catalogCollection, long adapterId ) { + public void createCollection( Context prepareContext, LogicalCollection catalogCollection, long adapterId ) { // overwrite this if the datastore supports document createCollectionSubstitution( prepareContext, catalogCollection ); } @@ -166,7 +166,7 @@ public void createCollection( Context prepareContext, CatalogCollection catalogC * Substitution method, which is used to handle the {@link DataStore} required operations * as if the data model would be {@link NamespaceType#RELATIONAL}. */ - private void createCollectionSubstitution( Context prepareContext, CatalogCollection catalogCollection ) { + private void createCollectionSubstitution( Context prepareContext, LogicalCollection catalogCollection ) { Catalog catalog = Catalog.getInstance(); CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); @@ -180,7 +180,7 @@ private void createCollectionSubstitution( Context prepareContext, CatalogCollec * It comes with a substitution methods called by default and should be overwritten if the inheriting {@link DataStore} * support the document data model natively. */ - public void dropCollection( Context prepareContext, CatalogCollection catalogCollection ) { + public void dropCollection( Context prepareContext, LogicalCollection catalogCollection ) { // overwrite this if the datastore supports document dropCollectionSubstitution( prepareContext, catalogCollection ); } @@ -190,7 +190,7 @@ public void dropCollection( Context prepareContext, CatalogCollection catalogCol * Substitution method, which is used to handle the {@link DataStore} required operations * as if the data model would be {@link NamespaceType#RELATIONAL}. */ - private void dropCollectionSubstitution( Context prepareContext, CatalogCollection catalogCollection ) { + private void dropCollectionSubstitution( Context prepareContext, LogicalCollection catalogCollection ) { Catalog catalog = Catalog.getInstance(); CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java index ca85e07181..417dd90784 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java @@ -34,7 +34,6 @@ package org.polypheny.db.adapter.enumerable; -import com.google.common.collect.ImmutableList; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; @@ -56,18 +55,19 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.FilterableEntity; +import org.polypheny.db.catalog.refactor.ProjectableFilterableEntity; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.interpreter.Row; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.volcano.VolcanoCost; import org.polypheny.db.schema.Entity; -import org.polypheny.db.schema.FilterableEntity; -import org.polypheny.db.schema.ProjectableFilterableEntity; -import org.polypheny.db.schema.QueryableEntity; -import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.util.BuiltInMethod; @@ -75,9 +75,9 @@ /** * Implementation of {@link RelScan} in {@link EnumerableConvention enumerable calling convention}. */ -public class EnumerableScan extends RelScan implements EnumerableAlg { +public class EnumerableScan extends RelScan implements EnumerableAlg { - private final Class elementType; + private final Class elementType; /** @@ -85,7 +85,7 @@ public class EnumerableScan extends RelScan implements EnumerableAlg { * * Use {@link #create} unless you know what you are doing. */ - public EnumerableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, Class elementType ) { + public EnumerableScan( AlgOptCluster cluster, AlgTraitSet traitSet, PhysicalTable table, Class elementType ) { super( cluster, traitSet, table ); assert getConvention() instanceof EnumerableConvention; this.elementType = elementType; @@ -95,18 +95,13 @@ public EnumerableScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity /** * Creates an EnumerableScan. */ - public static EnumerableScan create( AlgOptCluster cluster, AlgOptEntity algOptEntity ) { - final Entity entity = algOptEntity.unwrap( Entity.class ); - Class elementType = EnumerableScan.deduceElementType( entity ); + public static EnumerableScan create( AlgOptCluster cluster, CatalogEntity entity ) { + PhysicalTable physicalTable = entity.unwrap( PhysicalTable.class ); + Class elementType = EnumerableScan.deduceElementType( physicalTable ); final AlgTraitSet traitSet = cluster.traitSetOf( EnumerableConvention.INSTANCE ) - .replaceIfs( AlgCollationTraitDef.INSTANCE, () -> { - if ( entity != null ) { - return entity.getStatistic().getCollations(); - } - return ImmutableList.of(); - } ); - return new EnumerableScan( cluster, traitSet, algOptEntity, elementType ); + .replaceIfs( AlgCollationTraitDef.INSTANCE, entity::getCollations ); + return new EnumerableScan( cluster, traitSet, physicalTable, elementType ); } @@ -114,13 +109,13 @@ public static EnumerableScan create( AlgOptCluster cluster, AlgOptEntity algOptE public boolean equals( Object obj ) { return obj == this || obj instanceof EnumerableScan - && table.equals( ((EnumerableScan) obj).table ); + && entity.id == ((EnumerableScan) obj).getEntity().id; } @Override public int hashCode() { - return table.hashCode(); + return entity.hashCode(); } @@ -133,7 +128,7 @@ public static boolean canHandle( Entity entity ) { } - public static Class deduceElementType( Entity entity ) { + public static Class deduceElementType( PhysicalTable entity ) { if ( entity instanceof QueryableEntity ) { final QueryableEntity queryableTable = (QueryableEntity) entity; final Type type = queryableTable.getElementType(); @@ -153,16 +148,8 @@ public static Class deduceElementType( Entity entity ) { } - public static JavaRowFormat deduceFormat( AlgOptEntity table ) { - final Class elementType = deduceElementType( table.unwrap( Entity.class ) ); - return elementType == Object[].class - ? JavaRowFormat.ARRAY - : JavaRowFormat.CUSTOM; - } - - private Expression getExpression( PhysType physType ) { - final Expression expression = table.getExpression( Queryable.class ); + final Expression expression = entity.asExpression(); final Expression expression2 = toEnumerable( expression ); assert Types.isAssignableFrom( Enumerable.class, expression2.getType() ); return toRows( physType, expression2 ); @@ -190,9 +177,9 @@ private Expression toRows( PhysType physType, Expression expression ) { if ( physType.getFormat() == JavaRowFormat.SCALAR && Object[].class.isAssignableFrom( elementType ) && getRowType().getFieldCount() == 1 - && (table.unwrap( ScannableEntity.class ) != null - || table.unwrap( FilterableEntity.class ) != null - || table.unwrap( ProjectableFilterableEntity.class ) != null) ) { + && (entity.unwrap( ScannableEntity.class ) != null + || entity.unwrap( FilterableEntity.class ) != null + || entity.unwrap( ProjectableFilterableEntity.class ) != null) ) { return Expressions.call( BuiltInMethod.SLICE0.method, expression ); } JavaRowFormat oldFormat = format(); @@ -200,7 +187,7 @@ && getRowType().getFieldCount() == 1 return expression; } final ParameterExpression row_ = Expressions.parameter( elementType, "row" ); - final int fieldCount = table.getRowType().getFieldCount(); + final int fieldCount = entity.getRowType().getFieldCount(); List expressionList = new ArrayList<>( fieldCount ); for ( int i = 0; i < fieldCount; i++ ) { expressionList.add( fieldExpression( row_, i, physType, oldFormat ) ); @@ -266,7 +253,7 @@ private boolean hasCollectionField( AlgDataType rowType ) { @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new EnumerableScan( getCluster(), traitSet, table, elementType ); + return new EnumerableScan( getCluster(), traitSet, entity, elementType ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java index a04cd654a0..401639cabf 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java @@ -40,6 +40,7 @@ import org.polypheny.db.algebra.convert.ConverterRule; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; import org.polypheny.db.schema.Entity; @@ -64,8 +65,7 @@ public EnumerableScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public AlgNode convert( AlgNode alg ) { LogicalRelScan scan = (LogicalRelScan) alg; - final AlgOptEntity algOptEntity = scan.getEntity(); - final Entity entity = algOptEntity.unwrap( Entity.class ); + final LogicalTable entity = scan.getEntity().unwrap( LogicalTable.class ); if ( !EnumerableScan.canHandle( entity ) ) { return null; } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java index 484070d7bf..e7838c2dca 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java @@ -38,11 +38,11 @@ public EnumerableTableModifyToStreamerRule() { @Override public void onMatch( AlgOptRuleCall call ) { - RelModify modify = call.alg( 0 ); + RelModify modify = call.alg( 0 ); LogicalStreamer streamer = LogicalStreamer.create( modify, - AlgFactories.LOGICAL_BUILDER.create( modify.getCluster(), modify.getCatalogReader() ) ); + AlgFactories.LOGICAL_BUILDER.create( modify.getCluster(), modify.getCluster().getRootSchema() ) ); if ( streamer != null ) { call.transformTo( streamer ); diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java index 8a35d9c22f..b5684af75a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java @@ -37,8 +37,8 @@ import java.lang.reflect.Type; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.schema.Entity; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.QueryableEntity; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.impl.AbstractEntity; @@ -64,7 +64,7 @@ public Type getElementType() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { return Schemas.tableExpression( schema, elementType, tableName, clazz ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index f5249a7a9f..c25bc5b779 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -63,6 +63,7 @@ import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; @@ -281,7 +282,7 @@ public Enumerable scan( DataContext root ) { @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { return new AbstractTableQueryable( dataContext, schema, this, tableName ) { @Override @SuppressWarnings("unchecked") @@ -362,7 +363,7 @@ public Statistic getStatistic() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { return Expressions.field( schema.unwrap( ReflectiveSchema.class ).getTargetExpression( schema.getParentSchema(), schema.getName() ), field ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgHomogeneousShuttle.java b/core/src/main/java/org/polypheny/db/algebra/AlgHomogeneousShuttle.java index 60e76c4fc5..12d9dc675f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgHomogeneousShuttle.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgHomogeneousShuttle.java @@ -68,7 +68,7 @@ public AlgNode visit( LogicalMatch match ) { @Override - public AlgNode visit( RelScan scan ) { + public AlgNode visit( RelScan scan ) { return visit( (AlgNode) scan ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java index 5b28b1045e..5e4fc3be50 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java @@ -43,7 +43,7 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.metadata.Metadata; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgImplementor; import org.polypheny.db.plan.AlgOptCost; diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgShuttle.java b/core/src/main/java/org/polypheny/db/algebra/AlgShuttle.java index 0a15f7d59b..862526900a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgShuttle.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgShuttle.java @@ -76,7 +76,7 @@ */ public interface AlgShuttle { - AlgNode visit( RelScan scan ); + AlgNode visit( RelScan scan ); AlgNode visit( TableFunctionScan scan ); diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgShuttleImpl.java b/core/src/main/java/org/polypheny/db/algebra/AlgShuttleImpl.java index aa028a831b..e873773e5b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgShuttleImpl.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgShuttleImpl.java @@ -127,7 +127,7 @@ public AlgNode visit( LogicalMatch match ) { @Override - public AlgNode visit( RelScan scan ) { + public AlgNode visit( RelScan scan ) { return scan; } diff --git a/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java b/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java index b006739491..d117971e5f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java +++ b/core/src/main/java/org/polypheny/db/algebra/UnsupportedFromInsertShuttle.java @@ -32,17 +32,16 @@ private UnsupportedFromInsertShuttle( Long tableId ) { } - public static boolean contains( RelModify modify ) { - long id = modify.getEntity().getCatalogEntity().id; - UnsupportedFromInsertShuttle shuttle = new UnsupportedFromInsertShuttle( id ); + public static boolean contains( RelModify modify ) { + UnsupportedFromInsertShuttle shuttle = new UnsupportedFromInsertShuttle( modify.entity.id ); modify.accept( shuttle ); return shuttle.containsOtherTableId; } @Override - public AlgNode visit( RelScan scan ) { - if ( !Objects.equals( scan.getEntity().getCatalogEntity().id, tableId ) ) { + public AlgNode visit( RelScan scan ) { + if ( !Objects.equals( scan.getEntity().id, tableId ) ) { containsOtherTableId = true; } return super.visit( scan ); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java index ea15080f55..a8ab171c55 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/AlgFactories.java @@ -67,7 +67,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java index 093a3d4926..478f2bd6bf 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentScan.java @@ -24,7 +24,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DocumentType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java index 3a5e5854b9..49709b6a8d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java @@ -124,7 +124,7 @@ public DocType getDocType() { public LogicalValues getRelationalEquivalent() { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); - AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder() ); + AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), traitSet, rootSchema ); return new LogicalValues( cluster, out, ((DocumentType) rowType).asRelational(), relationalize( documentTuples, cluster.getRexBuilder() ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 811df67ad1..4a528eca47 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -38,7 +38,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.ConstraintType; +import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; @@ -84,13 +84,13 @@ public LogicalConstraintEnforcer( AlgOptCluster cluster, AlgTraitSet traitSet, A private static EnforcementInformation getControl( AlgNode node, Statement statement ) { ModifyExtractor extractor = new ModifyExtractor(); node.accept( extractor ); - RelModify modify = extractor.getModify(); + RelModify modify = extractor.getModify(); if ( modify == null ) { throw new RuntimeException( "The tree did no conform, while generating the constraint enforcement query!" ); } - final LogicalTable table = getCatalogTable( modify ); + final LogicalTable table = modify.entity.unwrap( LogicalTable.class ); AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = modify.getCluster().getRexBuilder(); @@ -162,9 +162,8 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { for ( final CatalogForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { builder.clear(); - final AlgOptSchema algOptSchema = modify.getCatalogReader(); - final AlgOptEntity scanOptTable = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getTableName() ) ); - final AlgOptEntity refOptTable = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getReferencedKeyTableName() ) ); + final LogicalTable scanOptTable = statement.getDataContext().getRootSchema().getTable( foreignKey.tableId ); + final LogicalTable refOptTable = statement.getDataContext().getRootSchema().getTable( foreignKey.referencedKeyTableId ); final AlgNode scan = LogicalRelScan.create( modify.getCluster(), scanOptTable ); final LogicalRelScan ref = LogicalRelScan.create( modify.getCluster(), refOptTable ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java index 07185fd6e0..916f4c11ac 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalStreamer.java @@ -63,7 +63,7 @@ public static LogicalStreamer create( AlgNode provider, AlgNode collector ) { } - public static LogicalStreamer create( RelModify modify, AlgBuilder algBuilder ) { + public static LogicalStreamer create( RelModify modify, AlgBuilder algBuilder ) { RexBuilder rexBuilder = algBuilder.getRexBuilder(); if ( !isModifyApplicable( modify ) ) { @@ -79,7 +79,7 @@ public static LogicalStreamer create( RelModify modify, AlgBuilder algBuilder ) } - private static LogicalStreamer getLogicalStreamer( RelModify modify, AlgBuilder algBuilder, RexBuilder rexBuilder, AlgNode input ) { + private static LogicalStreamer getLogicalStreamer( RelModify modify, AlgBuilder algBuilder, RexBuilder rexBuilder, AlgNode input ) { if ( input == null ) { throw new RuntimeException( "Error while creating Streamer." ); } @@ -136,7 +136,7 @@ private static LogicalStreamer getLogicalStreamer( RelModify modify, AlgBuilder } - private static List createSourceList( RelModify modify, RexBuilder rexBuilder ) { + private static List createSourceList( RelModify modify, RexBuilder rexBuilder ) { return modify.getUpdateColumnList() .stream() .map( name -> { @@ -148,7 +148,7 @@ private static List createSourceList( RelModify modify, RexBuilder rexB } - private static void attachFilter( RelModify modify, AlgBuilder algBuilder, RexBuilder rexBuilder ) { + private static void attachFilter( RelModify modify, AlgBuilder algBuilder, RexBuilder rexBuilder ) { List fields = new ArrayList<>(); int i = 0; for ( AlgDataTypeField field : modify.getEntity().getRowType().getFieldList() ) { @@ -173,7 +173,7 @@ private static AlgNode getChild( AlgNode child ) { } - public static boolean isModifyApplicable( RelModify modify ) { + public static boolean isModifyApplicable( RelModify modify ) { // simple delete, which all store should be able to handle by themselves if ( modify.isInsert() && modify.getInput() instanceof Values ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java index 846ef690a8..a239f50c0a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java @@ -28,30 +28,30 @@ import org.polypheny.db.rex.RexNode; -public class LogicalDocumentModify extends DocumentModify implements RelationalTransformable { +public class LogicalDocumentModify extends DocumentModify implements RelationalTransformable { /** * Subclass of {@link DocumentModify} not targeted at any particular engine or calling convention. */ - public LogicalDocumentModify( AlgTraitSet traits, AlgOptEntity table, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { - super( traits, table, catalogReader, input, operation, keys, updates ); + public LogicalDocumentModify( AlgTraitSet traits, CatalogEntity entity, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { + super( traits, entity, catalogReader, input, operation, keys, updates ); } - public static LogicalDocumentModify create( AlgOptEntity table, AlgNode input, CatalogReader catalogReader, Operation operation, List keys, List updates ) { - return new LogicalDocumentModify( input.getTraitSet(), table, catalogReader, input, operation, keys, updates ); + public static LogicalDocumentModify create( CatalogEntity entity, AlgNode input, CatalogReader catalogReader, Operation operation, List keys, List updates ) { + return new LogicalDocumentModify( input.getTraitSet(), entity, catalogReader, input, operation, keys, updates ); } @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new LogicalDocumentModify( traitSet, getCollection(), getCatalogReader(), inputs.get( 0 ), operation, getKeys(), getUpdates() ); + return new LogicalDocumentModify( traitSet, entity, getCatalogReader(), inputs.get( 0 ), operation, getKeys(), getUpdates() ); } @Override public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { - return List.of( RelationalTransformable.getModify( entities.get( 0 ), catalogReader, values.get( 0 ), operation ) ); + return List.of( RelationalTransformable.getModify( entities.get( 0 ), values.get( 0 ), operation ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java index b7530355f2..e9d76cd001 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java @@ -23,6 +23,7 @@ import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; @@ -31,17 +32,17 @@ import org.polypheny.db.schema.ModelTrait; -public class LogicalDocumentScan extends DocumentScan implements RelationalTransformable { +public class LogicalDocumentScan extends DocumentScan implements RelationalTransformable { /** * Subclass of {@link DocumentScan} not targeted at any particular engine or calling convention. */ - public LogicalDocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity document ) { + public LogicalDocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, LogicalCollection document ) { super( cluster, traitSet.replace( ModelTrait.DOCUMENT ), document ); } - public static AlgNode create( AlgOptCluster cluster, AlgOptEntity collection ) { + public static AlgNode create( AlgOptCluster cluster, LogicalCollection collection ) { return new LogicalDocumentScan( cluster, cluster.traitSet(), collection ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java index 524d4cbebe..bde3840010 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java @@ -30,7 +30,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java index b88e18ef13..db606e3aff 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java @@ -26,6 +26,7 @@ import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -47,7 +48,7 @@ public class LogicalGraph implements RelationalTransformable, Namespace, Graph, /** - * {@link org.polypheny.db.catalog.Catalog.NamespaceType#GRAPH} implementation of an entity, called graph + * {@link NamespaceType#GRAPH} implementation of an entity, called graph */ public LogicalGraph( long id ) { this.id = id; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java index e8bb277847..8e803d907f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java @@ -21,6 +21,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; @@ -54,12 +55,12 @@ public List getRelationalEquivalent( List inputs, List modifies = new ArrayList<>(); // modify of nodes - RelModify nodeModify = RelationalTransformable.getModify( entities.get( 0 ), catalogReader, inputs.get( 0 ), operation ); + Modify nodeModify = RelationalTransformable.getModify( entities.get( 0 ), inputs.get( 0 ), operation ); modifies.add( nodeModify ); // modify of properties if ( inputs.get( 1 ) != null ) { - RelModify nodePropertyModify = RelationalTransformable.getModify( entities.get( 1 ), catalogReader, inputs.get( 1 ), operation ); + Modify nodePropertyModify = RelationalTransformable.getModify( entities.get( 1 ), inputs.get( 1 ), operation ); modifies.add( nodePropertyModify ); } @@ -68,12 +69,12 @@ public List getRelationalEquivalent( List inputs, List edgeModify = RelationalTransformable.getModify( entities.get( 2 ), catalogReader, inputs.get( 2 ), operation ); + Modify edgeModify = RelationalTransformable.getModify( entities.get( 2 ), inputs.get( 2 ), operation ); modifies.add( edgeModify ); // modify of edge properties if ( inputs.get( 3 ) != null ) { - RelModify edgePropertyModify = RelationalTransformable.getModify( entities.get( 3 ), catalogReader, inputs.get( 3 ), operation ); + Modify edgePropertyModify = RelationalTransformable.getModify( entities.get( 3 ), inputs.get( 3 ), operation ); modifies.add( edgePropertyModify ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java index 15e54802f5..c7d4d04c83 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java @@ -39,7 +39,7 @@ import org.polypheny.db.schema.ModelTrait; -public class LogicalLpgScan extends LpgScan implements RelationalTransformable { +public class LogicalLpgScan extends LpgScan implements RelationalTransformable { /** * Subclass of {@link LpgScan} not targeted at any particular engine or calling convention. @@ -86,7 +86,7 @@ public List getRelationalEquivalent( List inputs, List inputs ) { - return new LogicalLpgScan( inputs.get( 0 ).getCluster(), traitSet, graph, rowType ); + return new LogicalLpgScan( inputs.get( 0 ).getCluster(), traitSet, entity, rowType ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java index d8f3e5ac8f..2907746b5a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java @@ -117,7 +117,7 @@ public static LogicalLpgValues create( public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); - AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder() ); + AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), traitSet, rootSchema ); LogicalValues nodeValues = new LogicalValues( cluster, out, entities.get( 0 ).getRowType(), getNodeValues( nodes ) ); LogicalValues nodePropertyValues = new LogicalValues( cluster, out, entities.get( 1 ).getRowType(), getNodePropertyValues( nodes ) ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java index 8b3f0ea26e..1e3c32902d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java @@ -103,7 +103,6 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { * Creates a LogicalScan. * * @param cluster Cluster - * @param algOptEntity Table */ public static LogicalRelScan create( AlgOptCluster cluster, final CatalogEntity entity ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java index aa22c99fb0..d244a603c3 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgColumnOrigin.java @@ -34,6 +34,7 @@ package org.polypheny.db.algebra.metadata; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; @@ -42,15 +43,15 @@ */ public class AlgColumnOrigin { - private final AlgOptEntity originTable; + private final CatalogEntity originTable; private final int iOriginColumn; private final boolean isDerived; - public AlgColumnOrigin( AlgOptEntity originTable, int iOriginColumn, boolean isDerived ) { - this.originTable = originTable; + public AlgColumnOrigin( CatalogEntity origin, int iOriginColumn, boolean isDerived ) { + this.originTable = origin; this.iOriginColumn = iOriginColumn; this.isDerived = isDerived; } @@ -59,7 +60,7 @@ public AlgColumnOrigin( AlgOptEntity originTable, int iOriginColumn, boolean isD /** * @return table of origin */ - public AlgOptEntity getOriginTable() { + public CatalogEntity getOriginTable() { return originTable; } @@ -88,7 +89,7 @@ public boolean equals( Object obj ) { return false; } AlgColumnOrigin other = (AlgColumnOrigin) obj; - return originTable.getCatalogEntity().id == other.originTable.getCatalogEntity().id + return originTable.id == other.originTable.id && (iOriginColumn == other.iOriginColumn) && (isDerived == other.isDerived); } @@ -96,7 +97,7 @@ public boolean equals( Object obj ) { // override Object public int hashCode() { - return originTable.getCatalogEntity().hashCode() + iOriginColumn + (isDerived ? 313 : 0); + return originTable.hashCode() + iOriginColumn + (isDerived ? 313 : 0); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java index 898bd09a2f..7bf51605fb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdCollation.java @@ -73,6 +73,8 @@ import org.polypheny.db.algebra.core.Window; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.hep.HepAlgVertex; import org.polypheny.db.plan.volcano.AlgSubset; @@ -135,8 +137,8 @@ public ImmutableList collations( Filter alg, AlgMetadataQuery mq ) } - public ImmutableList collations( RelScan scan, AlgMetadataQuery mq ) { - return ImmutableList.copyOf( table( scan.getEntity() ) ); + public ImmutableList collations( RelScan scan, AlgMetadataQuery mq ) { + return ImmutableList.copyOf( table( scan.entity ) ); } @@ -204,8 +206,8 @@ public ImmutableList collations( AlgSubset alg, AlgMetadataQuery m /** * Helper method to determine a {@link RelScan}'s collation. */ - public static List table( AlgOptEntity table ) { - return table.getCollationList(); + public static List table( CatalogEntity table ) { + return table.getCollations(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java index 461176afac..002a0cfe00 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnOrigins.java @@ -47,6 +47,7 @@ import org.polypheny.db.algebra.core.SetOp; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.TableFunctionScan; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexNode; @@ -222,19 +223,19 @@ public Set getColumnOrigins( AlgNode alg, AlgMetadataQuery mq, final Set set = new HashSet<>(); - AlgOptEntity table = alg.getEntity(); - if ( table == null ) { + CatalogEntity entity = alg.getEntity(); + if ( entity == null ) { // Somebody is making column values up out of thin air, like a VALUES clause, so we return an empty set. return set; } // Detect the case where a physical table expression is performing projection, and say we don't know instead of making any assumptions. // (Theoretically we could try to map the projection using column names.) This detection assumes the table expression doesn't handle rename as well. - if ( table.getRowType() != alg.getRowType() ) { + if ( entity.getRowType() != alg.getRowType() ) { return null; } - set.add( new AlgColumnOrigin( table, iOutputColumn, false ) ); + set.add( new AlgColumnOrigin( entity, iOutputColumn, false ) ); return set; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java index 51476f4a1f..ac0b2a9bdf 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdColumnUniqueness.java @@ -88,7 +88,7 @@ public MetadataDef getDef() { } - public Boolean areColumnsUnique( RelScan alg, AlgMetadataQuery mq, ImmutableBitSet columns, boolean ignoreNulls ) { + public Boolean areColumnsUnique( RelScan alg, AlgMetadataQuery mq, ImmutableBitSet columns, boolean ignoreNulls ) { return alg.getEntity().isKey( columns ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java index 736e012a64..748e93bf2a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistribution.java @@ -49,6 +49,7 @@ import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.hep.HepAlgVertex; import org.polypheny.db.rex.RexLiteral; @@ -102,7 +103,7 @@ public AlgDistribution distribution( SetOp alg, AlgMetadataQuery mq ) { } - public AlgDistribution distribution( RelScan scan, AlgMetadataQuery mq ) { + public AlgDistribution distribution( RelScan scan, AlgMetadataQuery mq ) { return table( scan.getEntity() ); } @@ -130,7 +131,7 @@ public AlgDistribution distribution( HepAlgVertex alg, AlgMetadataQuery mq ) { /** * Helper method to determine a {@link RelScan}'s distribution. */ - public static AlgDistribution table( AlgOptEntity table ) { + public static AlgDistribution table( CatalogEntity table ) { return table.getDistribution(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java index 00902338b3..fa566b6bf7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTableReferences.java @@ -102,7 +102,7 @@ public Set getTableReferences( AlgSubset alg, AlgMetadataQuery mq ) /** * Scan table reference. */ - public Set getTableReferences( RelScan alg, AlgMetadataQuery mq ) { + public Set getTableReferences( RelScan alg, AlgMetadataQuery mq ) { return ImmutableSet.of( AlgTableRef.of( alg.getEntity(), 0 ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java index 765307b9c5..9624f378c4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java @@ -48,6 +48,7 @@ import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.ExplainLevel; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPredicateList; @@ -392,7 +393,7 @@ public Set getTableReferences( AlgNode alg ) { * @param alg the AlgNode * @return the table, if the {@link AlgNode} is a simple table; otherwise null */ - public AlgOptEntity getTableOrigin( AlgNode alg ) { + public CatalogEntity getTableOrigin( AlgNode alg ) { // Determine the simple origin of the first column in the/ AlgNode. If it's simple, then that means that the underlying table is also simple, even if the column itself is derived. if ( alg.getRowType().getFieldCount() == 0 ) { return null; diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java index fd23b533e4..9854f6f357 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableAlgs.java @@ -380,7 +380,6 @@ public static MutableAlg toMutable( AlgNode alg ) { modify.getRowType(), input, modify.getEntity(), - modify.getCatalogReader(), modify.getOperation(), modify.getUpdateColumnList(), modify.getSourceExpressionList(), diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java index 4542a5433d..5aba72dd60 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableScan.java @@ -42,7 +42,7 @@ */ public class MutableScan extends MutableLeafAlg { - private MutableScan( RelScan alg ) { + private MutableScan( RelScan alg ) { super( MutableAlgType.TABLE_SCAN, alg ); } @@ -52,7 +52,7 @@ private MutableScan( RelScan alg ) { * * @param scan The underlying Scan object */ - public static MutableScan of( RelScan scan ) { + public static MutableScan of( RelScan scan ) { return new MutableScan( scan ); } @@ -73,13 +73,13 @@ public int hashCode() { @Override public StringBuilder digest( StringBuilder buf ) { - return buf.append( "Scan(table: " ).append( alg.getEntity().getCatalogEntity().name ).append( ")" ); + return buf.append( "Scan(table: " ).append( alg.getEntity().name ).append( ")" ); } @Override public MutableAlg clone() { - return MutableScan.of( (RelScan) alg ); + return MutableScan.of( (RelScan) alg ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java index 4074d6f6d8..6f8ad5f481 100644 --- a/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/mutable/MutableTableModify.java @@ -39,6 +39,7 @@ import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexNode; @@ -49,18 +50,16 @@ */ public class MutableTableModify extends MutableSingleAlg { - public final Prepare.CatalogReader catalogReader; - public final AlgOptEntity table; + public final CatalogEntity table; public final Operation operation; public final List updateColumnList; public final List sourceExpressionList; public final boolean flattened; - private MutableTableModify( AlgDataType rowType, MutableAlg input, AlgOptEntity table, Prepare.CatalogReader catalogReader, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + private MutableTableModify( AlgDataType rowType, MutableAlg input, CatalogEntity table, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( MutableAlgType.TABLE_MODIFY, rowType, input ); this.table = table; - this.catalogReader = catalogReader; this.operation = operation; this.updateColumnList = updateColumnList; this.sourceExpressionList = sourceExpressionList; @@ -74,14 +73,13 @@ private MutableTableModify( AlgDataType rowType, MutableAlg input, AlgOptEntity * @param rowType Row type * @param input Input relational expression * @param table Target table to modify - * @param catalogReader Accessor to the table metadata * @param operation Modify operation (INSERT, UPDATE, DELETE) * @param updateColumnList List of column identifiers to be updated (e.g. ident1, ident2); null if not UPDATE * @param sourceExpressionList List of value expressions to be set (e.g. exp1, exp2); null if not UPDATE * @param flattened Whether set flattens the input row type */ - public static MutableTableModify of( AlgDataType rowType, MutableAlg input, AlgOptEntity table, Prepare.CatalogReader catalogReader, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { - return new MutableTableModify( rowType, input, table, catalogReader, operation, updateColumnList, sourceExpressionList, flattened ); + public static MutableTableModify of( AlgDataType rowType, MutableAlg input, CatalogEntity table, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + return new MutableTableModify( rowType, input, table, operation, updateColumnList, sourceExpressionList, flattened ); } @@ -89,7 +87,7 @@ public static MutableTableModify of( AlgDataType rowType, MutableAlg input, AlgO public boolean equals( Object obj ) { return obj == this || obj instanceof MutableTableModify - && table.getCatalogEntity().id == ((MutableTableModify) obj).table.getCatalogEntity().id + && table.id == ((MutableTableModify) obj).table.id && operation == ((MutableTableModify) obj).operation && Objects.equals( updateColumnList, ((MutableTableModify) obj).updateColumnList ) && PAIRWISE_STRING_EQUIVALENCE.equivalent( sourceExpressionList, ((MutableTableModify) obj).sourceExpressionList ) @@ -102,7 +100,7 @@ public boolean equals( Object obj ) { public int hashCode() { return Objects.hash( input, - table.getCatalogEntity().id, + table.id, operation, updateColumnList, PAIRWISE_STRING_EQUIVALENCE.hash( sourceExpressionList ), @@ -112,7 +110,7 @@ public int hashCode() { @Override public StringBuilder digest( StringBuilder buf ) { - buf.append( "Modify(table: " ).append( table.getCatalogEntity().name ).append( ", operation: " ).append( operation ); + buf.append( "Modify(table: " ).append( table.name ).append( ", operation: " ).append( operation ); if ( updateColumnList != null ) { buf.append( ", updateColumnList: " ).append( updateColumnList ); } @@ -125,7 +123,7 @@ public StringBuilder digest( StringBuilder buf ) { @Override public MutableAlg clone() { - return MutableTableModify.of( rowType, input.clone(), table, catalogReader, operation, updateColumnList, sourceExpressionList, flattened ); + return MutableTableModify.of( rowType, input.clone(), table, operation, updateColumnList, sourceExpressionList, flattened ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java index 733dd1eca5..6d28948af5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/FilterScanRule.java @@ -115,14 +115,13 @@ protected FilterScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilde } - public static boolean test( RelScan scan ) { + public static boolean test( RelScan scan ) { // We can only push filters into a FilterableTable or ProjectableFilterableTable. - final AlgOptEntity table = scan.getEntity(); - return table.unwrap( FilterableEntity.class ) != null || table.unwrap( ProjectableFilterableEntity.class ) != null; + return scan.entity.unwrap( FilterableEntity.class ) != null || scan.entity.unwrap( ProjectableFilterableEntity.class ) != null; } - protected void apply( AlgOptRuleCall call, Filter filter, RelScan scan ) { + protected void apply( AlgOptRuleCall call, Filter filter, RelScan scan ) { final ImmutableIntList projects; final ImmutableList.Builder filters = ImmutableList.builder(); if ( scan instanceof BindableScan ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java index b77abe086d..e428051e31 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java @@ -51,6 +51,8 @@ import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptEntity; @@ -243,7 +245,7 @@ private SemiJoin findSemiJoinIndexByCost( LoptMultiJoin multiJoin, List // Find the best index final List bestKeyOrder = new ArrayList<>(); - LcsScan tmpFactRel = (LcsScan) factTable.toAlg( factRel::getCluster, factRel.getTraitSet() ); + LcsScan tmpFactRel = (LcsScan) factTable.unwrap( TranslatableEntity.class ).toAlg( factRel::getCluster, factRel.getTraitSet() ); LcsIndexOptimizer indexOptimizer = new LcsIndexOptimizer( tmpFactRel ); FemLocalIndex bestIndex = @@ -339,7 +341,7 @@ private RexNode adjustSemiJoinCondition( LoptMultiJoin multiJoin, int leftAdjust */ private LcsEntity validateKeys( AlgNode factRel, List leftKeys, List rightKeys, List actualLeftKeys ) { int keyIdx = 0; - AlgOptEntity theTable = null; + CatalogEntity theTable = null; ListIterator keyIter = leftKeys.listIterator(); while ( keyIter.hasNext() ) { boolean removeKey = false; @@ -349,7 +351,7 @@ private LcsEntity validateKeys( AlgNode factRel, List leftKeys, List b ) { + super( b ); + } } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java index 631cef93ed..3c5bc9e15b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/ProjectScanRule.java @@ -108,16 +108,14 @@ public ProjectScanRule( AlgOptRuleOperand operand, AlgBuilderFactory algBuilderF } - protected static boolean test( RelScan scan ) { + protected static boolean test( RelScan scan ) { // We can only push projects into a ProjectableFilterableTable. - final AlgOptEntity table = scan.getEntity(); - return table.unwrap( ProjectableFilterableEntity.class ) != null; + return scan.entity.unwrap( ProjectableFilterableEntity.class ) != null; } - protected void apply( AlgOptRuleCall call, Project project, RelScan scan ) { - final AlgOptEntity table = scan.getEntity(); - assert table.unwrap( ProjectableFilterableEntity.class ) != null; + protected void apply( AlgOptRuleCall call, Project project, RelScan scan ) { + assert scan.entity.unwrap( ProjectableFilterableEntity.class ) != null; final TargetMapping mapping = project.getMapping(); if ( mapping == null || Mappings.isIdentity( mapping ) ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java index f16d54bc4b..f4758177cd 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java @@ -20,7 +20,10 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.catalog.refactor.TranslatableEntity; +import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.tools.AlgBuilderFactory; @@ -47,7 +50,7 @@ public ScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final LogicalRelScan oldAlg = call.alg( 0 ); - AlgNode newAlg = oldAlg.getEntity().toAlg( oldAlg::getCluster, oldAlg.getTraitSet() ); + AlgNode newAlg = oldAlg.getEntity().unwrap( TranslatableEntity.class ).toAlg( oldAlg::getCluster, oldAlg.getTraitSet() ); call.transformTo( newAlg ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java index 530637b51a..3e35c96b33 100644 --- a/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java +++ b/core/src/main/java/org/polypheny/db/algebra/stream/StreamRules.java @@ -56,6 +56,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; @@ -267,21 +268,20 @@ public DeltaScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final Delta delta = call.alg( 0 ); - final RelScan scan = call.alg( 1 ); + final RelScan scan = call.alg( 1 ); final AlgOptCluster cluster = delta.getCluster(); - final AlgOptEntity algOptEntity = scan.getEntity(); - final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); + final StreamableEntity streamableTable = scan.entity.unwrap( StreamableEntity.class ); if ( streamableTable != null ) { final Entity entity1 = streamableTable.stream(); - final LogicalTable catalogTable = algOptEntity.getCatalogEntity().unwrap( LogicalTable.class ); - final CatalogPartitionPlacement placement = algOptEntity.getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ); + final LogicalTable catalogTable = scan.entity.unwrap( LogicalTable.class ); + /*final CatalogPartitionPlacement placement = scan.entity.unwrap( PhysicalTable.class ).getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ); final AlgOptEntity algOptEntity2 = AlgOptEntityImpl.create( algOptEntity.getRelOptSchema(), algOptEntity.getRowType(), entity1, catalogTable, - placement ); - final LogicalRelScan newScan = LogicalRelScan.create( cluster, algOptEntity2 ); + placement );*/ + final LogicalRelScan newScan = LogicalRelScan.create( cluster, null ); call.transformTo( newScan ); } } @@ -309,9 +309,8 @@ public DeltaScanToEmptyRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final Delta delta = call.alg( 0 ); - final RelScan scan = call.alg( 1 ); - final AlgOptEntity algOptEntity = scan.getEntity(); - final StreamableEntity streamableTable = algOptEntity.unwrap( StreamableEntity.class ); + final RelScan scan = call.alg( 1 ); + final StreamableEntity streamableTable = scan.getEntity().unwrap( StreamableEntity.class ); final AlgBuilder builder = call.builder(); if ( streamableTable == null ) { call.transformTo( builder.values( delta.getRowType() ).build() ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index c2d254a094..1bca25de8f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -19,14 +19,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.gson.annotations.SerializedName; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; -import java.util.ArrayList; import java.util.List; import java.util.Map; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; import org.pf4j.ExtensionPoint; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.AlgCollation; @@ -34,7 +30,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -61,38 +57,26 @@ import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownCollationException; -import org.polypheny.db.catalog.exceptions.UnknownCollationIdRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintTypeException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintTypeRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyOptionException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyOptionRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownIndexException; -import org.polypheny.db.catalog.exceptions.UnknownIndexTypeException; -import org.polypheny.db.catalog.exceptions.UnknownIndexTypeRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownPlacementRoleException; -import org.polypheny.db.catalog.exceptions.UnknownPlacementRoleRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownPlacementTypeException; -import org.polypheny.db.catalog.exceptions.UnknownPlacementTypeRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaTypeException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaTypeRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownTableTypeException; -import org.polypheny.db.catalog.exceptions.UnknownTableTypeRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownUserException; -import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; -import org.polypheny.db.plan.AlgTrait; -import org.polypheny.db.schema.ModelTrait; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.type.PolyType; @@ -1820,7 +1804,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param collectionId The id of the graph * @return The requested collection */ - public abstract CatalogCollection getCollection( long collectionId ); + public abstract LogicalCollection getCollection( long collectionId ); /** * Get a collection of collections which match the given naming pattern. @@ -1829,7 +1813,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param namePattern The naming pattern of the collection itself, null if all are matched * @return collection of collections matching conditions */ - public abstract List getCollections( long namespaceId, Pattern namePattern ); + public abstract List getCollections( long namespaceId, Pattern namePattern ); /** * Add a new collection with the given parameters. @@ -1924,458 +1908,4 @@ protected final boolean isValidIdentifier( final String str ) { public abstract void clear(); - public enum EntityType { - ENTITY( 1 ), - SOURCE( 2 ), - VIEW( 3 ), - MATERIALIZED_VIEW( 4 ); - // STREAM, ... - - private final int id; - - - EntityType( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static EntityType getById( final int id ) { - for ( EntityType t : values() ) { - if ( t.id == id ) { - return t; - } - } - throw new UnknownTableTypeRuntimeException( id ); - } - - - public static EntityType getByName( final String name ) throws UnknownTableTypeException { - for ( EntityType t : values() ) { - if ( t.name().equalsIgnoreCase( name ) ) { - return t; - } - } - throw new UnknownTableTypeException( name ); - } - - - // Used for creating ResultSets - public Object[] getParameterArray() { - return new Object[]{ name() }; - } - - - // Required for building JDBC result set - @RequiredArgsConstructor - public static class PrimitiveTableType { - - public final String tableType; - - } - } - - - public enum NamespaceType { - @SerializedName("relational") - RELATIONAL( 1 ), - @SerializedName("document") - DOCUMENT( 2 ), - @SerializedName("graph") - GRAPH( 3 ); - - // GRAPH, DOCUMENT, ... - - private final int id; - - - NamespaceType( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static NamespaceType getDefault() { - //return (NamespaceType) ConfigManager.getInstance().getConfig( "runtime/defaultSchemaModel" ).getEnum(); - return NamespaceType.RELATIONAL; - } - - - public static NamespaceType getById( final int id ) throws UnknownSchemaTypeException { - for ( NamespaceType t : values() ) { - if ( t.id == id ) { - return t; - } - } - throw new UnknownSchemaTypeRuntimeException( id ); - } - - - public static NamespaceType getByName( final String name ) throws UnknownSchemaTypeException { - for ( NamespaceType t : values() ) { - if ( t.name().equalsIgnoreCase( name ) ) { - return t; - } - } - throw new UnknownSchemaTypeException( name ); - } - - - public AlgTrait getModelTrait() { - if ( this == NamespaceType.RELATIONAL ) { - return ModelTrait.RELATIONAL; - } else if ( this == NamespaceType.DOCUMENT ) { - return ModelTrait.DOCUMENT; - } else if ( this == NamespaceType.GRAPH ) { - return ModelTrait.GRAPH; - } - throw new RuntimeException( "Not found a suitable NamespaceType." ); - } - } - - - public enum Collation { - CASE_SENSITIVE( 1 ), - CASE_INSENSITIVE( 2 ); - - private final int id; - - - Collation( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static Collation getById( int id ) { - for ( Collation c : values() ) { - if ( c.id == id ) { - return c; - } - } - throw new UnknownCollationIdRuntimeException( id ); - } - - - public static Collation parse( @NonNull String str ) throws UnknownCollationException { - if ( str.equalsIgnoreCase( "CASE SENSITIVE" ) ) { - return Collation.CASE_SENSITIVE; - } else if ( str.equalsIgnoreCase( "CASE INSENSITIVE" ) ) { - return Collation.CASE_INSENSITIVE; - } - throw new UnknownCollationException( str ); - } - - - public static Collation getDefaultCollation() { - return getById( RuntimeConfig.DEFAULT_COLLATION.getInteger() ); - } - } - - - public enum IndexType { - MANUAL( 1 ), - AUTOMATIC( 2 ); - - private final int id; - - - IndexType( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static Catalog.IndexType getById( int id ) { - for ( Catalog.IndexType e : values() ) { - if ( e.id == id ) { - return e; - } - } - throw new UnknownIndexTypeRuntimeException( id ); - } - - - public static Catalog.IndexType parse( @NonNull String str ) throws UnknownIndexTypeException { - if ( str.equalsIgnoreCase( "MANUAL" ) ) { - return Catalog.IndexType.MANUAL; - } else if ( str.equalsIgnoreCase( "AUTOMATIC" ) ) { - return Catalog.IndexType.AUTOMATIC; - } - throw new UnknownIndexTypeException( str ); - } - } - - - public enum ConstraintType { - UNIQUE( 1 ), - PRIMARY( 2 ); - - private final int id; - - - ConstraintType( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static ConstraintType getById( int id ) { - for ( ConstraintType e : values() ) { - if ( e.id == id ) { - return e; - } - } - throw new UnknownConstraintTypeRuntimeException( id ); - } - - - public static ConstraintType parse( @NonNull String str ) throws UnknownConstraintTypeException { - if ( str.equalsIgnoreCase( "UNIQUE" ) ) { - return ConstraintType.UNIQUE; - } - throw new UnknownConstraintTypeException( str ); - } - } - - - public enum ForeignKeyOption { - NONE( -1 ), - // IDs according to JDBC standard - //CASCADE( 0 ), - RESTRICT( 1 ); - //SET_NULL( 2 ), - //SET_DEFAULT( 4 ); - - private final int id; - - - ForeignKeyOption( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static ForeignKeyOption getById( int id ) { - for ( ForeignKeyOption e : values() ) { - if ( e.id == id ) { - return e; - } - } - throw new UnknownForeignKeyOptionRuntimeException( id ); - } - - - public static ForeignKeyOption parse( @NonNull String str ) throws UnknownForeignKeyOptionException { - if ( str.equalsIgnoreCase( "NONE" ) ) { - return ForeignKeyOption.NONE; - } else if ( str.equalsIgnoreCase( "RESTRICT" ) ) { - return ForeignKeyOption.RESTRICT; - } /*else if ( str.equalsIgnoreCase( "CASCADE" ) ) { - return ForeignKeyOption.CASCADE; - } else if ( str.equalsIgnoreCase( "SET NULL" ) ) { - return ForeignKeyOption.SET_NULL; - } else if ( str.equalsIgnoreCase( "SET DEFAULT" ) ) { - return ForeignKeyOption.SET_DEFAULT; - }*/ - throw new UnknownForeignKeyOptionException( str ); - } - } - - - public enum PlacementType { - MANUAL( 1 ), - AUTOMATIC( 2 ), - STATIC( 3 ); - - private final int id; - - - PlacementType( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static PlacementType getById( int id ) { - for ( PlacementType e : values() ) { - if ( e.id == id ) { - return e; - } - } - throw new UnknownPlacementTypeRuntimeException( id ); - } - - - public static PlacementType parse( @NonNull String str ) throws UnknownPlacementTypeException { - if ( str.equalsIgnoreCase( "MANUAL" ) ) { - return PlacementType.MANUAL; - } else if ( str.equalsIgnoreCase( "AUTOMATIC" ) ) { - return PlacementType.AUTOMATIC; - } - throw new UnknownPlacementTypeException( str ); - } - - - } - - - public enum PartitionType { - NONE( 0 ), - RANGE( 1 ), - LIST( 2 ), - HASH( 3 ), - //TODO @HENNLO think about excluding "UDPF" here, these should only be used for internal Partition Functions - TEMPERATURE( 4 ); - - private final int id; - - - PartitionType( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static PartitionType getById( final int id ) { - for ( PartitionType t : values() ) { - if ( t.id == id ) { - return t; - } - } - throw new UnknownPartitionTypeRuntimeException( id ); - } - - - public static PartitionType getByName( final String name ) throws UnknownPartitionTypeException { - for ( PartitionType t : values() ) { - if ( t.name().equalsIgnoreCase( name ) ) { - return t; - } - } - throw new UnknownPartitionTypeException( name ); - } - - } - - - public enum DataPlacementRole { - UPTODATE( 0 ), - REFRESHABLE( 1 ); - - private final int id; - - - DataPlacementRole( int id ) { - this.id = id; - } - - - public int getId() { - return id; - } - - - public static DataPlacementRole getById( final int id ) { - for ( DataPlacementRole t : values() ) { - if ( t.id == id ) { - return t; - } - } - throw new UnknownPlacementRoleRuntimeException( id ); - } - - - public static DataPlacementRole getByName( final String name ) throws UnknownPlacementRoleException { - for ( DataPlacementRole t : values() ) { - if ( t.name().equalsIgnoreCase( name ) ) { - return t; - } - } - throw new UnknownPlacementRoleException( name ); - } - - } - - - public static class Pattern { - - public final String pattern; - public final boolean containsWildcards; - - - public Pattern( String pattern ) { - this.pattern = pattern; - containsWildcards = pattern.contains( "%" ) || pattern.contains( "_" ); - } - - - public Pattern toLowerCase() { - return new Pattern( pattern.toLowerCase() ); - } - - - public static Pattern of( String pattern ) { - return new Pattern( pattern ); - } - - - public String toRegex() { - return pattern.replace( "_", "(.)" ).replace( "%", "(.*)" ); - } - - - @Override - public String toString() { - return "Pattern[" + pattern + "]"; - } - - } - - - /* - * Helpers - */ - - - public static List convertTableTypeList( @NonNull final List stringTypeList ) throws UnknownTableTypeException { - final List typeList = new ArrayList<>( stringTypeList.size() ); - for ( String s : stringTypeList ) { - typeList.add( EntityType.getByName( s ) ); - } - return typeList; - } - } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java index 5e0967ffdd..ff4ac29667 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java @@ -25,7 +25,7 @@ import lombok.NonNull; import org.polypheny.db.adapter.Adapter.AdapterProperties; import org.polypheny.db.catalog.Adapter; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode public class CatalogAdapter implements CatalogObject { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumn.java index a753212f08..a34d9666be 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumn.java @@ -24,8 +24,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Collation; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.type.PolyType; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java index bb1af8813e..92dfc5de30 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java @@ -21,7 +21,7 @@ import lombok.NonNull; import lombok.SneakyThrows; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.PlacementType; @EqualsAndHashCode diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java index fb0d8b3ed3..52a2b75f4b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java @@ -20,7 +20,7 @@ import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.NonNull; -import org.polypheny.db.catalog.Catalog.ConstraintType; +import org.polypheny.db.catalog.logistic.ConstraintType; @EqualsAndHashCode diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java index 4852d85095..08bea250f4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java @@ -29,8 +29,8 @@ import lombok.Setter; import lombok.SneakyThrows; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.DataPlacementRole; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.PlacementType; /** diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index c52a5baa98..8e8aff2a5c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -18,16 +18,20 @@ import java.io.Serializable; import java.util.List; +import lombok.experimental.SuperBuilder; import org.polypheny.db.StatisticsManager; +import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.entity.logical.Logical; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.CatalogType; +import org.polypheny.db.catalog.refactor.Expressible; import org.polypheny.db.plan.AlgMultipleTrait; import org.polypheny.db.schema.Wrapper; +import org.polypheny.db.util.ImmutableBitSet; -public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializable, CatalogType, Logical { +@SuperBuilder(toBuilder = true) +public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializable, CatalogType, Expressible { public final long id; public final EntityType entityType; @@ -73,4 +77,14 @@ public List getCollations() { return null; } + + public Boolean isKey( ImmutableBitSet columns ) { + return null; + } + + + public AlgDistribution getDistribution() { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java index 0269e2ae8e..037a5e743c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java @@ -26,7 +26,7 @@ import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; @EqualsAndHashCode(callSuper = true) diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java index 30a963e040..081d51c381 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java @@ -25,7 +25,7 @@ import lombok.NonNull; import lombok.RequiredArgsConstructor; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.IndexType; +import org.polypheny.db.catalog.logistic.IndexType; @EqualsAndHashCode(callSuper = false) diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java index 6fb5d507f9..dc262bd092 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java @@ -23,7 +23,7 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java index 29a313a10d..fbd6d35aa6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java @@ -17,7 +17,7 @@ package org.polypheny.db.catalog.entity; import java.io.Serializable; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; public abstract class CatalogNamespace implements CatalogObject, Serializable { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionPlacement.java index 37bb908fb5..90ca316826 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionPlacement.java @@ -19,8 +19,8 @@ import java.io.Serializable; import lombok.NonNull; -import org.polypheny.db.catalog.Catalog.DataPlacementRole; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.PlacementType; /** diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java index 35b1a431ca..a243204182 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java @@ -24,7 +24,7 @@ import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = false) @@ -51,7 +51,7 @@ public CatalogSchema( final long databaseId, final int ownerId, @NonNull final String ownerName, - @NonNull final Catalog.NamespaceType namespaceType, + @NonNull final NamespaceType namespaceType, boolean caseSensitive ) { super( id, namespaceType ); this.id = id; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index ba91903f88..32a5fe5fe4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -27,7 +27,7 @@ import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java similarity index 77% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java rename to core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java index 3fe27fc346..5ea51014ec 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java @@ -26,11 +26,12 @@ import lombok.NonNull; import lombok.SneakyThrows; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.entity.logical.Logical; -public class CatalogCollection extends CatalogEntity implements CatalogObject { +public class LogicalCollection extends CatalogEntity implements CatalogObject, Logical { private static final long serialVersionUID = -6490762948368178584L; @@ -44,7 +45,7 @@ public class CatalogCollection extends CatalogEntity implements CatalogObject { public final String physicalName; - public CatalogCollection( long databaseId, long namespaceId, long id, String name, @NonNull Collection placements, EntityType type, String physicalName ) { + public LogicalCollection( long databaseId, long namespaceId, long id, String name, @NonNull Collection placements, EntityType type, String physicalName ) { super( id, name, EntityType.ENTITY, NamespaceType.DOCUMENT ); this.id = id; this.databaseId = databaseId; @@ -62,16 +63,16 @@ public Serializable[] getParameterArray() { } - public CatalogCollection addPlacement( int adapterId ) { + public LogicalCollection addPlacement( int adapterId ) { List placements = new ArrayList<>( this.placements ); placements.add( adapterId ); - return new CatalogCollection( databaseId, namespaceId, id, name, placements, EntityType.ENTITY, physicalName ); + return new LogicalCollection( databaseId, namespaceId, id, name, placements, EntityType.ENTITY, physicalName ); } - public CatalogCollection removePlacement( int adapterId ) { + public LogicalCollection removePlacement( int adapterId ) { List placements = this.placements.stream().filter( id -> id != adapterId ).collect( Collectors.toList() ); - return new CatalogCollection( databaseId, namespaceId, id, name, placements, EntityType.ENTITY, physicalName ); + return new LogicalCollection( databaseId, namespaceId, id, name, placements, EntityType.ENTITY, physicalName ); } @@ -81,8 +82,8 @@ public String getNamespaceName() { } - public CatalogCollection setPhysicalName( String physicalCollectionName ) { - return new CatalogCollection( databaseId, namespaceId, id, name, placements, entityType, physicalCollectionName ); + public LogicalCollection setPhysicalName( String physicalCollectionName ) { + return new LogicalCollection( databaseId, namespaceId, id, name, placements, entityType, physicalCollectionName ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java new file mode 100644 index 0000000000..a64cd4c3a9 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity.allocation; + +import java.io.Serializable; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; + +public class AllocationCollection extends CatalogEntity implements Allocation { + + protected AllocationCollection( long id, String name, EntityType type, NamespaceType namespaceType ) { + super( id, name, type, namespaceType ); + } + + + @Override + public Serializable[] getParameterArray() { + return new Serializable[0]; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java index d125dd4cd6..3a46062fd6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -39,4 +39,6 @@ public Serializable[] getParameterArray() { return new Serializable[0]; } + + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index 4447386e1c..bd6ec67436 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -26,14 +26,14 @@ import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.experimental.SuperBuilder; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogObject; @SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = false) -public class LogicalGraph extends CatalogEntity implements CatalogObject, Comparable, Logical { +public class LogicalGraph extends CatalogEntity implements Comparable, Logical { private static final long serialVersionUID = 7343856827901459672L; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 8f3cf7ccae..a6c37e4b75 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -26,16 +26,17 @@ import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogObject; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.partition.properties.PartitionProperty; @EqualsAndHashCode(callSuper = false) -public class LogicalTable extends CatalogEntity implements CatalogObject, Comparable, Logical { +public class LogicalTable extends CatalogEntity implements Comparable, Logical { private static final long serialVersionUID = 4653390333258552102L; @@ -65,7 +66,7 @@ public LogicalTable( final long namespaceId, final long databaseId, final int ownerId, - @NonNull final Catalog.EntityType type, + @NonNull final EntityType type, final Long primaryKey, @NonNull final ImmutableList dataPlacements, boolean modifiable, @@ -99,7 +100,7 @@ public LogicalTable( final long namespaceId, final long databaseId, final int ownerId, - @NonNull final Catalog.EntityType type, + @NonNull final EntityType type, final Long primaryKey, @NonNull final ImmutableList dataPlacements, boolean modifiable, @@ -263,6 +264,12 @@ public LogicalTable getTableWithColumns( ImmutableList newColumnIds ) { } + @Override + public Expression asExpression() { + return Expressions.call( Expressions.call( Catalog.class, "getInstance" ), "getTable", Expressions.constant( id ) ); + } + + @RequiredArgsConstructor public static class PrimitiveCatalogTable { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java index 8761342ba2..64b8348c1a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java @@ -16,7 +16,11 @@ package org.polypheny.db.catalog.entity.physical; -public interface Physical { +import org.polypheny.db.catalog.refactor.CatalogType; +public interface Physical extends CatalogType { + default State getCatalogType() { + return State.PHYSICAL; + } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java new file mode 100644 index 0000000000..1cfbc7cf9f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity.physical; + +import java.io.Serializable; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; + +public class PhysicalCollection extends CatalogEntity implements Physical { + + protected PhysicalCollection( long id, String name, EntityType type, NamespaceType namespaceType ) { + super( id, name, type, namespaceType ); + } + + + @Override + public Serializable[] getParameterArray() { + return new Serializable[0]; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index e681c82beb..e31c5d0e4b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -17,8 +17,8 @@ package org.polypheny.db.catalog.entity.physical; import java.io.Serializable; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; public class PhysicalGraph extends CatalogEntity implements Physical { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 36c9625519..32a15664dd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -25,8 +25,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/Collation.java b/core/src/main/java/org/polypheny/db/catalog/logistic/Collation.java new file mode 100644 index 0000000000..50cafcf52d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/Collation.java @@ -0,0 +1,64 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import lombok.NonNull; +import org.polypheny.db.catalog.exceptions.UnknownCollationException; +import org.polypheny.db.catalog.exceptions.UnknownCollationIdRuntimeException; +import org.polypheny.db.config.RuntimeConfig; + +public enum Collation { + CASE_SENSITIVE( 1 ), + CASE_INSENSITIVE( 2 ); + + private final int id; + + + Collation( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static Collation getById( int id ) { + for ( Collation c : values() ) { + if ( c.id == id ) { + return c; + } + } + throw new UnknownCollationIdRuntimeException( id ); + } + + + public static Collation parse( @NonNull String str ) throws UnknownCollationException { + if ( str.equalsIgnoreCase( "CASE SENSITIVE" ) ) { + return Collation.CASE_SENSITIVE; + } else if ( str.equalsIgnoreCase( "CASE INSENSITIVE" ) ) { + return Collation.CASE_INSENSITIVE; + } + throw new UnknownCollationException( str ); + } + + + public static Collation getDefaultCollation() { + return getById( RuntimeConfig.DEFAULT_COLLATION.getInteger() ); + } +} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/ConstraintType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/ConstraintType.java new file mode 100644 index 0000000000..fab48f8147 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/ConstraintType.java @@ -0,0 +1,56 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import lombok.NonNull; +import org.polypheny.db.catalog.exceptions.UnknownConstraintTypeException; +import org.polypheny.db.catalog.exceptions.UnknownConstraintTypeRuntimeException; + +public enum ConstraintType { + UNIQUE( 1 ), + PRIMARY( 2 ); + + private final int id; + + + ConstraintType( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static ConstraintType getById( int id ) { + for ( ConstraintType e : values() ) { + if ( e.id == id ) { + return e; + } + } + throw new UnknownConstraintTypeRuntimeException( id ); + } + + + public static ConstraintType parse( @NonNull String str ) throws UnknownConstraintTypeException { + if ( str.equalsIgnoreCase( "UNIQUE" ) ) { + return ConstraintType.UNIQUE; + } + throw new UnknownConstraintTypeException( str ); + } +} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/DataPlacementRole.java b/core/src/main/java/org/polypheny/db/catalog/logistic/DataPlacementRole.java new file mode 100644 index 0000000000..5a3e1ce675 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/DataPlacementRole.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import org.polypheny.db.catalog.exceptions.UnknownPlacementRoleException; +import org.polypheny.db.catalog.exceptions.UnknownPlacementRoleRuntimeException; + +public enum DataPlacementRole { + UPTODATE( 0 ), + REFRESHABLE( 1 ); + + private final int id; + + + DataPlacementRole( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static DataPlacementRole getById( final int id ) { + for ( DataPlacementRole t : values() ) { + if ( t.id == id ) { + return t; + } + } + throw new UnknownPlacementRoleRuntimeException( id ); + } + + + public static DataPlacementRole getByName( final String name ) throws UnknownPlacementRoleException { + for ( DataPlacementRole t : values() ) { + if ( t.name().equalsIgnoreCase( name ) ) { + return t; + } + } + throw new UnknownPlacementRoleException( name ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/EntityType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/EntityType.java new file mode 100644 index 0000000000..4273d95793 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/EntityType.java @@ -0,0 +1,76 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import lombok.RequiredArgsConstructor; +import org.polypheny.db.catalog.exceptions.UnknownTableTypeException; +import org.polypheny.db.catalog.exceptions.UnknownTableTypeRuntimeException; + +public enum EntityType { + ENTITY( 1 ), + SOURCE( 2 ), + VIEW( 3 ), + MATERIALIZED_VIEW( 4 ); + // STREAM, ... + + private final int id; + + + EntityType( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static EntityType getById( final int id ) { + for ( EntityType t : values() ) { + if ( t.id == id ) { + return t; + } + } + throw new UnknownTableTypeRuntimeException( id ); + } + + + public static EntityType getByName( final String name ) throws UnknownTableTypeException { + for ( EntityType t : values() ) { + if ( t.name().equalsIgnoreCase( name ) ) { + return t; + } + } + throw new UnknownTableTypeException( name ); + } + + + // Used for creating ResultSets + public Object[] getParameterArray() { + return new Object[]{ name() }; + } + + + // Required for building JDBC result set + @RequiredArgsConstructor + public static class PrimitiveTableType { + + public final String tableType; + + } +} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/ForeignKeyOption.java b/core/src/main/java/org/polypheny/db/catalog/logistic/ForeignKeyOption.java new file mode 100644 index 0000000000..eab7c4be4a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/ForeignKeyOption.java @@ -0,0 +1,68 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import lombok.NonNull; +import org.polypheny.db.catalog.exceptions.UnknownForeignKeyOptionException; +import org.polypheny.db.catalog.exceptions.UnknownForeignKeyOptionRuntimeException; + +public enum ForeignKeyOption { + NONE( -1 ), + // IDs according to JDBC standard + //CASCADE( 0 ), + RESTRICT( 1 ); + //SET_NULL( 2 ), + //SET_DEFAULT( 4 ); + + private final int id; + + + ForeignKeyOption( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static ForeignKeyOption getById( int id ) { + for ( ForeignKeyOption e : values() ) { + if ( e.id == id ) { + return e; + } + } + throw new UnknownForeignKeyOptionRuntimeException( id ); + } + + + public static ForeignKeyOption parse( @NonNull String str ) throws UnknownForeignKeyOptionException { + if ( str.equalsIgnoreCase( "NONE" ) ) { + return ForeignKeyOption.NONE; + } else if ( str.equalsIgnoreCase( "RESTRICT" ) ) { + return ForeignKeyOption.RESTRICT; + } /*else if ( str.equalsIgnoreCase( "CASCADE" ) ) { + return ForeignKeyOption.CASCADE; + } else if ( str.equalsIgnoreCase( "SET NULL" ) ) { + return ForeignKeyOption.SET_NULL; + } else if ( str.equalsIgnoreCase( "SET DEFAULT" ) ) { + return ForeignKeyOption.SET_DEFAULT; + }*/ + throw new UnknownForeignKeyOptionException( str ); + } +} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/IndexType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/IndexType.java new file mode 100644 index 0000000000..d54f952467 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/IndexType.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import lombok.NonNull; +import org.polypheny.db.catalog.exceptions.UnknownIndexTypeException; +import org.polypheny.db.catalog.exceptions.UnknownIndexTypeRuntimeException; + +public enum IndexType { + MANUAL( 1 ), + AUTOMATIC( 2 ); + + private final int id; + + + IndexType( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static IndexType getById( int id ) { + for ( IndexType e : values() ) { + if ( e.id == id ) { + return e; + } + } + throw new UnknownIndexTypeRuntimeException( id ); + } + + + public static IndexType parse( @NonNull String str ) throws UnknownIndexTypeException { + if ( str.equalsIgnoreCase( "MANUAL" ) ) { + return IndexType.MANUAL; + } else if ( str.equalsIgnoreCase( "AUTOMATIC" ) ) { + return IndexType.AUTOMATIC; + } + throw new UnknownIndexTypeException( str ); + } +} diff --git a/core/src/main/java/org/polypheny/db/catalog/NameGenerator.java b/core/src/main/java/org/polypheny/db/catalog/logistic/NameGenerator.java similarity index 94% rename from core/src/main/java/org/polypheny/db/catalog/NameGenerator.java rename to core/src/main/java/org/polypheny/db/catalog/logistic/NameGenerator.java index cd86220250..9aa1fc2981 100644 --- a/core/src/main/java/org/polypheny/db/catalog/NameGenerator.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/NameGenerator.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog; +package org.polypheny.db.catalog.logistic; import java.util.concurrent.atomic.AtomicInteger; diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/NamespaceType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/NamespaceType.java new file mode 100644 index 0000000000..6c4500bb1d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/NamespaceType.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import com.google.gson.annotations.SerializedName; +import org.polypheny.db.catalog.exceptions.UnknownSchemaTypeException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaTypeRuntimeException; +import org.polypheny.db.plan.AlgTrait; +import org.polypheny.db.schema.ModelTrait; + +public enum NamespaceType { + @SerializedName("relational") + RELATIONAL( 1 ), + @SerializedName("document") + DOCUMENT( 2 ), + @SerializedName("graph") + GRAPH( 3 ); + + // GRAPH, DOCUMENT, ... + + private final int id; + + + NamespaceType( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static NamespaceType getDefault() { + //return (NamespaceType) ConfigManager.getInstance().getConfig( "runtime/defaultSchemaModel" ).getEnum(); + return NamespaceType.RELATIONAL; + } + + + public static NamespaceType getById( final int id ) throws UnknownSchemaTypeException { + for ( NamespaceType t : values() ) { + if ( t.id == id ) { + return t; + } + } + throw new UnknownSchemaTypeRuntimeException( id ); + } + + + public static NamespaceType getByName( final String name ) throws UnknownSchemaTypeException { + for ( NamespaceType t : values() ) { + if ( t.name().equalsIgnoreCase( name ) ) { + return t; + } + } + throw new UnknownSchemaTypeException( name ); + } + + + public AlgTrait getModelTrait() { + if ( this == NamespaceType.RELATIONAL ) { + return ModelTrait.RELATIONAL; + } else if ( this == NamespaceType.DOCUMENT ) { + return ModelTrait.DOCUMENT; + } else if ( this == NamespaceType.GRAPH ) { + return ModelTrait.GRAPH; + } + throw new RuntimeException( "Not found a suitable NamespaceType." ); + } +} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/PartitionType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/PartitionType.java new file mode 100644 index 0000000000..d49c676e7a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/PartitionType.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; +import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeRuntimeException; + +public enum PartitionType { + NONE( 0 ), + RANGE( 1 ), + LIST( 2 ), + HASH( 3 ), + //TODO @HENNLO think about excluding "UDPF" here, these should only be used for internal Partition Functions + TEMPERATURE( 4 ); + + private final int id; + + + PartitionType( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static PartitionType getById( final int id ) { + for ( PartitionType t : values() ) { + if ( t.id == id ) { + return t; + } + } + throw new UnknownPartitionTypeRuntimeException( id ); + } + + + public static PartitionType getByName( final String name ) throws UnknownPartitionTypeException { + for ( PartitionType t : values() ) { + if ( t.name().equalsIgnoreCase( name ) ) { + return t; + } + } + throw new UnknownPartitionTypeException( name ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/Pattern.java b/core/src/main/java/org/polypheny/db/catalog/logistic/Pattern.java new file mode 100644 index 0000000000..ccd3678f7a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/Pattern.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +public class Pattern { + + public final String pattern; + public final boolean containsWildcards; + + + public Pattern( String pattern ) { + this.pattern = pattern; + containsWildcards = pattern.contains( "%" ) || pattern.contains( "_" ); + } + + + public Pattern toLowerCase() { + return new Pattern( pattern.toLowerCase() ); + } + + + public static Pattern of( String pattern ) { + return new Pattern( pattern ); + } + + + public String toRegex() { + return pattern.replace( "_", "(.)" ).replace( "%", "(.*)" ); + } + + + @Override + public String toString() { + return "Pattern[" + pattern + "]"; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/PlacementType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/PlacementType.java new file mode 100644 index 0000000000..014a0bce0f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/PlacementType.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.logistic; + +import lombok.NonNull; +import org.polypheny.db.catalog.exceptions.UnknownPlacementTypeException; +import org.polypheny.db.catalog.exceptions.UnknownPlacementTypeRuntimeException; + +public enum PlacementType { + MANUAL( 1 ), + AUTOMATIC( 2 ), + STATIC( 3 ); + + private final int id; + + + PlacementType( int id ) { + this.id = id; + } + + + public int getId() { + return id; + } + + + public static PlacementType getById( int id ) { + for ( PlacementType e : values() ) { + if ( e.id == id ) { + return e; + } + } + throw new UnknownPlacementTypeRuntimeException( id ); + } + + + public static PlacementType parse( @NonNull String str ) throws UnknownPlacementTypeException { + if ( str.equalsIgnoreCase( "MANUAL" ) ) { + return PlacementType.MANUAL; + } else if ( str.equalsIgnoreCase( "AUTOMATIC" ) ) { + return PlacementType.AUTOMATIC; + } + throw new UnknownPlacementTypeException( str ); + } + + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java b/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java new file mode 100644 index 0000000000..59644bc8bf --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java @@ -0,0 +1,25 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +import org.apache.calcite.linq4j.tree.Expression; + +public interface Expressible { + + Expression asExpression(); + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/FilterableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/FilterableEntity.java new file mode 100644 index 0000000000..d9228549f9 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/FilterableEntity.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +public interface FilterableEntity { + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/ProjectableFilterableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/ProjectableFilterableEntity.java new file mode 100644 index 0000000000..f5343c85ef --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/ProjectableFilterableEntity.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +public interface ProjectableFilterableEntity { + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java index 5e968a202d..e16b44de0f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java @@ -16,6 +16,7 @@ package org.polypheny.db.catalog.refactor; +import java.lang.reflect.Type; import org.apache.calcite.linq4j.Queryable; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.schema.graph.QueryableGraph; @@ -27,4 +28,6 @@ public interface QueryableEntity { */ Queryable asQueryable( DataContext root, QueryableGraph graph ); + Type getElementType(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/ScannableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/ScannableEntity.java new file mode 100644 index 0000000000..4793a08261 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/ScannableEntity.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.refactor; + +public interface ScannableEntity { + +} diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 3c79bb21d5..67d8240142 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -25,13 +25,13 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.catalog.Catalog.Collation; -import org.polypheny.db.catalog.Catalog.ConstraintType; -import org.polypheny.db.catalog.Catalog.ForeignKeyOption; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; @@ -582,9 +582,9 @@ public static DdlManager getInstance() { public abstract void removeGraphDatabasePlacement( long graphId, DataStore dataStores, Statement statement ); - public abstract void dropCollection( CatalogCollection catalogCollection, Statement statement ); + public abstract void dropCollection( LogicalCollection catalogCollection, Statement statement ); - public abstract void dropCollectionPlacement( long namespaceId, CatalogCollection collection, List dataStores, Statement statement ); + public abstract void dropCollectionPlacement( long namespaceId, LogicalCollection collection, List dataStores, Statement statement ); /** diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index a812780b89..291463d4cc 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -90,50 +90,48 @@ public void run() { * * Tries various table SPIs, and negotiates with the table which filters and projects it can implement. Adds to the Enumerable implementations of any filters and projects that cannot be implemented by the table. */ - static ScanNode create( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects ) { - final AlgOptEntity algOptEntity = alg.getEntity(); - final ProjectableFilterableEntity pfTable = algOptEntity.unwrap( ProjectableFilterableEntity.class ); + static ScanNode create( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects ) { + final ProjectableFilterableEntity pfTable = alg.entity.unwrap( ProjectableFilterableEntity.class ); if ( pfTable != null ) { return createProjectableFilterable( compiler, alg, filters, projects, pfTable ); } - final FilterableEntity filterableTable = algOptEntity.unwrap( FilterableEntity.class ); + final FilterableEntity filterableTable = alg.entity.unwrap( FilterableEntity.class ); if ( filterableTable != null ) { return createFilterable( compiler, alg, filters, projects, filterableTable ); } - final ScannableEntity scannableTable = algOptEntity.unwrap( ScannableEntity.class ); + final ScannableEntity scannableTable = alg.entity.unwrap( ScannableEntity.class ); if ( scannableTable != null ) { return createScannable( compiler, alg, filters, projects, scannableTable ); } //noinspection unchecked - final Enumerable enumerable = algOptEntity.unwrap( Enumerable.class ); + final Enumerable enumerable = alg.entity.unwrap( Enumerable.class ); if ( enumerable != null ) { return createEnumerable( compiler, alg, enumerable, null, filters, projects ); } - final QueryableEntity queryableTable = algOptEntity.unwrap( QueryableEntity.class ); + final QueryableEntity queryableTable = alg.entity.unwrap( QueryableEntity.class ); if ( queryableTable != null ) { return createQueryable( compiler, alg, filters, projects, queryableTable ); } - throw new AssertionError( "cannot convert table " + algOptEntity + " to enumerable" ); + throw new AssertionError( "cannot convert table " + alg.entity + " to enumerable" ); } - private static ScanNode createScannable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, ScannableEntity scannableTable ) { + private static ScanNode createScannable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, ScannableEntity scannableTable ) { final Enumerable rowEnumerable = Enumerables.toRow( scannableTable.scan( compiler.getDataContext() ) ); return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } - private static ScanNode createQueryable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, QueryableEntity queryableTable ) { + private static ScanNode createQueryable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, QueryableEntity queryableTable ) { final DataContext root = compiler.getDataContext(); - final AlgOptEntity algOptEntity = alg.getEntity(); final Type elementType = queryableTable.getElementType(); final Enumerable rowEnumerable; if ( elementType instanceof Class ) { //noinspection unchecked - final Queryable queryable = Schemas.queryable( root, (Class) elementType, List.of( algOptEntity.getCatalogEntity().unwrap( LogicalTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); + final Queryable queryable = (Queryable) Schemas.queryable( root, (Class) elementType, List.of( alg.entity.unwrap( LogicalTable.class ).getNamespaceName(), alg.entity.name ) ); ImmutableList.Builder fieldBuilder = ImmutableList.builder(); - Class type = (Class) elementType; + Class type = (Class) elementType; for ( Field field : type.getFields() ) { if ( Modifier.isPublic( field.getModifiers() ) && !Modifier.isStatic( field.getModifiers() ) ) { fieldBuilder.add( field ); @@ -153,13 +151,13 @@ private static ScanNode createQueryable( Compiler compiler, RelScan alg, Immutab return new Row( values ); } ); } else { - rowEnumerable = Schemas.queryable( root, Row.class, List.of( algOptEntity.getCatalogEntity().unwrap( LogicalTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); + rowEnumerable = Schemas.queryable( root, Row.class, List.of( alg.entity.unwrap( LogicalTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); } return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } - private static ScanNode createFilterable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, FilterableEntity filterableTable ) { + private static ScanNode createFilterable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, FilterableEntity filterableTable ) { final DataContext root = compiler.getDataContext(); final List mutableFilters = Lists.newArrayList( filters ); final Enumerable enumerable = filterableTable.scan( root, mutableFilters ); diff --git a/core/src/main/java/org/polypheny/db/languages/LanguageManager.java b/core/src/main/java/org/polypheny/db/languages/LanguageManager.java index e0beb33276..d33f8e43af 100644 --- a/core/src/main/java/org/polypheny/db/languages/LanguageManager.java +++ b/core/src/main/java/org/polypheny/db/languages/LanguageManager.java @@ -23,7 +23,7 @@ import java.util.function.BiFunction; import java.util.function.Supplier; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.prepare.Context; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; diff --git a/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java b/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java index 2290cfe3a7..b0351f489c 100644 --- a/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java +++ b/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java @@ -26,7 +26,7 @@ import java.util.function.BiFunction; import java.util.function.Supplier; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.prepare.Context; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; diff --git a/core/src/main/java/org/polypheny/db/languages/QueryParameters.java b/core/src/main/java/org/polypheny/db/languages/QueryParameters.java index 5c70c58f67..5ea2e05611 100644 --- a/core/src/main/java/org/polypheny/db/languages/QueryParameters.java +++ b/core/src/main/java/org/polypheny/db/languages/QueryParameters.java @@ -17,7 +17,7 @@ package org.polypheny.db.languages; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.interpreter.Node; /** diff --git a/core/src/main/java/org/polypheny/db/partition/PartitionManagerFactory.java b/core/src/main/java/org/polypheny/db/partition/PartitionManagerFactory.java index f01e91028c..bf443003f4 100644 --- a/core/src/main/java/org/polypheny/db/partition/PartitionManagerFactory.java +++ b/core/src/main/java/org/polypheny/db/partition/PartitionManagerFactory.java @@ -17,7 +17,7 @@ package org.polypheny.db.partition; -import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.logistic.PartitionType; public abstract class PartitionManagerFactory { @@ -43,6 +43,6 @@ public static PartitionManagerFactory getInstance() { } - public abstract PartitionManager getPartitionManager( Catalog.PartitionType partitionType ); + public abstract PartitionManager getPartitionManager( PartitionType partitionType ); } diff --git a/core/src/main/java/org/polypheny/db/partition/properties/PartitionProperty.java b/core/src/main/java/org/polypheny/db/partition/properties/PartitionProperty.java index d06124d20f..935428a915 100644 --- a/core/src/main/java/org/polypheny/db/partition/properties/PartitionProperty.java +++ b/core/src/main/java/org/polypheny/db/partition/properties/PartitionProperty.java @@ -20,7 +20,7 @@ import java.io.Serializable; import lombok.Getter; import lombok.experimental.SuperBuilder; -import org.polypheny.db.catalog.Catalog.PartitionType; +import org.polypheny.db.catalog.logistic.PartitionType; @SuperBuilder diff --git a/core/src/main/java/org/polypheny/db/partition/properties/TemperaturePartitionProperty.java b/core/src/main/java/org/polypheny/db/partition/properties/TemperaturePartitionProperty.java index eaca6f7771..88c3240875 100644 --- a/core/src/main/java/org/polypheny/db/partition/properties/TemperaturePartitionProperty.java +++ b/core/src/main/java/org/polypheny/db/partition/properties/TemperaturePartitionProperty.java @@ -18,7 +18,7 @@ import lombok.Getter; import lombok.experimental.SuperBuilder; -import org.polypheny.db.catalog.Catalog.PartitionType; +import org.polypheny.db.catalog.logistic.PartitionType; @SuperBuilder diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java index c44b1000a2..416477423f 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java @@ -127,7 +127,7 @@ public List getReferentialConstraints() { @Override public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { - return LogicalRelScan.create( context.getCluster(), this ); + return null;//return LogicalRelScan.create( context.getCluster(), this ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java index 3ac1713309..059156d780 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java @@ -50,6 +50,7 @@ import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModelTrait; +import org.polypheny.db.schema.PolyphenyDbSchema; /** @@ -69,13 +70,16 @@ public class AlgOptCluster { private final AlgTraitSet emptyTraitSet; private AlgMetadataQuery mq; + @Getter + private final PolyphenyDbSchema rootSchema; + /** * Creates a cluster. * * For use only from {@link #create} and {@link AlgOptQuery}. */ - private AlgOptCluster( AlgOptPlanner planner, AlgDataTypeFactory typeFactory, RexBuilder rexBuilder, AlgTraitSet traitSet ) { + private AlgOptCluster( AlgOptPlanner planner, AlgDataTypeFactory typeFactory, RexBuilder rexBuilder, AlgTraitSet traitSet, PolyphenyDbSchema rootSchema ) { this.nextCorrel = new AtomicInteger( 0 ); this.mapCorrelToAlg = new HashMap<>(); this.planner = Objects.requireNonNull( planner ); @@ -87,33 +91,29 @@ private AlgOptCluster( AlgOptPlanner planner, AlgDataTypeFactory typeFactory, Re setMetadataProvider( DefaultAlgMetadataProvider.INSTANCE ); this.emptyTraitSet = traitSet; assert emptyTraitSet.size() == planner.getAlgTraitDefs().size(); + this.rootSchema = rootSchema; } /** * Creates a cluster. */ - public static AlgOptCluster create( AlgOptPlanner planner, RexBuilder rexBuilder ) { - return AlgOptCluster.create( planner, rexBuilder, planner.emptyTraitSet() ); + public static AlgOptCluster create( AlgOptPlanner planner, RexBuilder rexBuilder, AlgTraitSet traitSet, PolyphenyDbSchema rootSchema ) { + return AlgOptCluster.create( planner, rexBuilder, planner.emptyTraitSet(), rootSchema ); } - public static AlgOptCluster createDocument( AlgOptPlanner planner, RexBuilder rexBuilder ) { + public static AlgOptCluster createDocument( AlgOptPlanner planner, RexBuilder rexBuilder, PolyphenyDbSchema rootSchema ) { AlgTraitSet traitSet = planner.emptyTraitSet().replace( ModelTrait.DOCUMENT ); - return AlgOptCluster.create( planner, rexBuilder, traitSet ); + return AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); } - public static AlgOptCluster createGraph( AlgOptPlanner planner, RexBuilder rexBuilder ) { + public static AlgOptCluster createGraph( AlgOptPlanner planner, RexBuilder rexBuilder, PolyphenyDbSchema rootSchema ) { AlgTraitSet traitSet = planner.emptyTraitSet().replace( ModelTrait.GRAPH ); - return AlgOptCluster.create( planner, rexBuilder, traitSet ); - } - - - private static AlgOptCluster create( AlgOptPlanner planner, RexBuilder rexBuilder, AlgTraitSet traitSet ) { - return new AlgOptCluster( planner, rexBuilder.getTypeFactory(), rexBuilder, traitSet ); + return AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java index 5bee679029..3736b29392 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptUtil.java @@ -100,6 +100,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; import org.polypheny.db.rex.LogicVisitor; @@ -187,7 +188,7 @@ public static boolean isOrder( AlgNode alg ) { /** * Returns a set of tables used by this expression or its children */ - public static Set findTables( AlgNode alg ) { + public static Set findTables( AlgNode alg ) { return new LinkedHashSet<>( findAllTables( alg ) ); } @@ -195,9 +196,9 @@ public static Set findTables( AlgNode alg ) { /** * Returns a list of all tables used by this expression or its children */ - public static List findAllTables( AlgNode alg ) { + public static List findAllTables( AlgNode alg ) { final Multimap, AlgNode> nodes = AlgMetadataQuery.instance().getNodeTypes( alg ); - final List usedTables = new ArrayList<>(); + final List usedTables = new ArrayList<>(); for ( Entry, Collection> e : nodes.asMap().entrySet() ) { if ( RelScan.class.isAssignableFrom( e.getKey() ) ) { for ( AlgNode node : e.getValue() ) { diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index bb92ae9525..162f9c4ddc 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -287,18 +287,18 @@ public T unwrap( Class clazz ) { } final AlgOptCluster cluster = context.getCluster(); if ( Hook.ENABLE_BINDABLE.get( false ) ) { - return LogicalRelScan.create( cluster, this ); + //return LogicalRelScan.create( cluster, this ); } if ( PolyphenyDbPrepareImpl.ENABLE_ENUMERABLE && entity instanceof QueryableEntity ) { - return EnumerableScan.create( cluster, this ); + // return EnumerableScan.create( cluster, this ); } if ( entity instanceof ScannableEntity || entity instanceof FilterableEntity || entity instanceof ProjectableFilterableEntity ) { - return LogicalRelScan.create( cluster, this ); + //return LogicalRelScan.create( cluster, this ); } if ( PolyphenyDbPrepareImpl.ENABLE_ENUMERABLE ) { - return EnumerableScan.create( cluster, this ); + //return EnumerableScan.create( cluster, this ); } throw new AssertionError(); } diff --git a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java index a910cbd4b0..8293f5bc9a 100644 --- a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java +++ b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java @@ -98,14 +98,14 @@ public AlgNode translate( Expression expression ) { input = translate( call.targetExpression ); return LogicalProject.create( input, - toRex( input, (FunctionExpression) call.expressions.get( 0 ) ), + toRex( input, (FunctionExpression) call.expressions.get( 0 ) ), (List) null ); case WHERE: input = translate( call.targetExpression ); return LogicalFilter.create( input, - toRex( (FunctionExpression) call.expressions.get( 0 ), input ) ); + toRex( (FunctionExpression) call.expressions.get( 0 ), input ) ); case AS_QUERYABLE: return LogicalRelScan.create( @@ -131,7 +131,7 @@ public AlgNode translate( Expression expression ) { } - private List toRex( AlgNode child, FunctionExpression expression ) { + private List toRex( AlgNode child, FunctionExpression expression ) { RexBuilder rexBuilder = cluster.getRexBuilder(); List list = Collections.singletonList( rexBuilder.makeRangeReference( child ) ); PolyphenyDbPrepareImpl.ScalarTranslator translator = diff --git a/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java b/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java index 001dd7d7f7..9a3781e771 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java @@ -141,8 +141,7 @@ private void ready() { } ensure( State.STATE_1_RESET ); Frameworks.withPlanner( - ( cluster, algOptSchema, rootSchema ) -> { - Util.discard( rootSchema ); // use our own defaultSchema + ( cluster, rootSchema ) -> { typeFactory = (JavaTypeFactory) cluster.getTypeFactory(); planner = cluster.getPlanner(); planner.setExecutor( executor ); diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index a4fc8d45d1..e78e4d37e5 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -42,7 +42,7 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -85,7 +85,7 @@ public PreparingEntity getTable( final List names ) { @Override public AlgOptEntity getCollection( final List names ) { // First look in the default schema, if any. If not found, look in the root schema. - CatalogCollection collection = rootSchema.getCollection( names ); + LogicalCollection collection = rootSchema.getCollection( names ); if ( collection != null ) { return AlgOptEntityImpl.create( this, collection.getRowType(), collection, null, null ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java index ecbb032c61..5d256821d2 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java @@ -120,6 +120,7 @@ import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptUtil; +import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Contexts; import org.polypheny.db.plan.Convention; import org.polypheny.db.plan.ConventionTraitDef; @@ -262,8 +263,8 @@ public void executeDdl( Context context, Node node ) { /** * Factory method for cluster. */ - protected AlgOptCluster createCluster( AlgOptPlanner planner, RexBuilder rexBuilder ) { - return AlgOptCluster.create( planner, rexBuilder ); + protected AlgOptCluster createCluster( AlgOptPlanner planner, RexBuilder rexBuilder, AlgTraitSet traitSet, PolyphenyDbSchema rootSchema ) { + return AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); } /** @@ -453,15 +454,10 @@ private static String getTypeName( AlgDataType type ) { public R perform( PrepareAction action ) { final Context prepareContext = action.getConfig().getPrepareContext(); final JavaTypeFactory typeFactory = prepareContext.getTypeFactory(); - final PolyphenyDbSchema schema = - action.getConfig().getDefaultSchema() != null - ? action.getConfig().getDefaultSchema() - : prepareContext.getRootSchema(); - PolyphenyDbCatalogReader catalogReader = new PolyphenyDbCatalogReader( schema, typeFactory ); final RexBuilder rexBuilder = new RexBuilder( typeFactory ); final AlgOptPlanner planner = createPlanner( prepareContext, action.getConfig().getContext(), action.getConfig().getCostFactory() ); - final AlgOptCluster cluster = createCluster( planner, rexBuilder ); - return action.apply( cluster, catalogReader, prepareContext.getRootSchema() ); + final AlgOptCluster cluster = createCluster( planner, rexBuilder, null, prepareContext.getRootSchema() ); + return action.apply( cluster, prepareContext.getRootSchema() ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index 074460a5e7..33da94fada 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -103,15 +103,9 @@ AlgNode toAlg( Queryable queryable ) { return alg; } if ( queryable instanceof AbstractTableQueryable ) { - final AbstractTableQueryable tableQueryable = (AbstractTableQueryable) queryable; + final AbstractTableQueryable tableQueryable = (AbstractTableQueryable) queryable; final QueryableEntity table = tableQueryable.table; - final AlgOptEntityImpl algOptTable = AlgOptEntityImpl.create( - null, - table.getRowType( translator.typeFactory ), - table.getCatalogEntity(), - table.getPartitionPlacement(), - null ); if ( table instanceof TranslatableEntity ) { return ((TranslatableEntity) table).toAlg( translator.toAlgContext(), algOptTable, translator.cluster.traitSet() ); } else { diff --git a/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java b/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java index 7ea4a851a7..6ad49cb193 100644 --- a/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/DeepCopyShuttle.java @@ -50,7 +50,7 @@ private AlgTraitSet copy( final AlgTraitSet other ) { @Override - public AlgNode visit( RelScan scan ) { + public AlgNode visit( RelScan scan ) { final AlgNode node = super.visit( scan ); return new LogicalRelScan( node.getCluster(), copy( node.getTraitSet() ), node.getEntity() ); } diff --git a/core/src/main/java/org/polypheny/db/processing/ExtendedQueryParameters.java b/core/src/main/java/org/polypheny/db/processing/ExtendedQueryParameters.java index d5c7db6652..38393c540a 100644 --- a/core/src/main/java/org/polypheny/db/processing/ExtendedQueryParameters.java +++ b/core/src/main/java/org/polypheny/db/processing/ExtendedQueryParameters.java @@ -20,7 +20,7 @@ import java.util.List; import lombok.Getter; import lombok.Setter; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.QueryParameters; diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 8c7a61294b..126e145b32 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -59,6 +59,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.transaction.Statement; @@ -149,7 +150,7 @@ public AlgNode visit( LogicalLpgModify modify ) { @Override public AlgNode visit( LogicalLpgScan scan ) { - hashBasis.add( scan.getClass().getSimpleName() + "#" + scan.getGraph().id ); + hashBasis.add( scan.getClass().getSimpleName() + "#" + scan.entity.id ); return super.visit( scan ); } @@ -250,7 +251,7 @@ public AlgNode visit( LogicalDocumentProject project ) { @Override public AlgNode visit( LogicalDocumentScan scan ) { - hashBasis.add( "LogicalDocumentScan#" + scan.getCollection().getCatalogEntity().id ); + hashBasis.add( "LogicalDocumentScan#" + scan.entity.id ); return super.visit( scan ); } @@ -278,14 +279,14 @@ public AlgNode visit( LogicalConstraintEnforcer enforcer ) { @Override public AlgNode visit( LogicalMatch match ) { - hashBasis.add( "LogicalMatch#" + match.getEntity().getCatalogEntity().id ); + hashBasis.add( "LogicalMatch#" + match.getEntity().id ); return visitChild( match, 0, match.getInput() ); } @Override - public AlgNode visit( RelScan scan ) { - hashBasis.add( "Scan#" + scan.getEntity().getCatalogEntity().id ); + public AlgNode visit( RelScan scan ) { + hashBasis.add( "Scan#" + scan.getEntity().id ); // get available columns for every table scan this.getAvailableColumns( scan ); @@ -324,7 +325,7 @@ public AlgNode visit( LogicalCorrelate correlate ) { @Override public AlgNode visit( LogicalJoin join ) { if ( join.getLeft() instanceof LogicalRelScan && join.getRight() instanceof LogicalRelScan ) { - hashBasis.add( "LogicalJoin#" + join.getLeft().getEntity().getCatalogEntity().id + "#" + join.getRight().getEntity().getCatalogEntity().id ); + hashBasis.add( "LogicalJoin#" + join.getLeft().getEntity().id + "#" + join.getRight().getEntity().id ); } super.visit( join ); @@ -389,8 +390,8 @@ public AlgNode visit( AlgNode other ) { private void getAvailableColumns( AlgNode scan ) { - this.entityId.add( scan.getEntity().getCatalogEntity().id ); - final LogicalTable table = (LogicalTable) scan.getEntity().getCatalogEntity(); + this.entityId.add( scan.getEntity().id ); + final LogicalTable table = scan.getEntity().unwrap( LogicalTable.class ); if ( table != null ) { final List ids = table.fieldIds; final List names = table.getColumnNames(); @@ -405,12 +406,12 @@ private void getAvailableColumns( AlgNode scan ) { private void getPartitioningInfo( LogicalFilter filter ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) filter.getInput().getEntity(); + CatalogEntity table = filter.getInput().getEntity(); if ( table == null ) { return; } - handleIfPartitioned( filter, (LogicalTable) table.getCatalogEntity() ); + handleIfPartitioned( filter, table.unwrap( LogicalTable.class ) ); } @@ -440,12 +441,12 @@ private void handleIfPartitioned( AlgNode node, LogicalTable catalogTable ) { private void getPartitioningInfo( LogicalDocumentFilter filter ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) filter.getInput().getEntity(); - if ( table == null ) { + CatalogEntity entity = filter.getInput().getEntity(); + if ( entity == null ) { return; } - handleIfPartitioned( filter, (LogicalTable) table.getCatalogEntity() ); + handleIfPartitioned( filter, (LogicalTable) entity ); } diff --git a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java index 165a55a685..e3a9e87bcb 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java +++ b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java @@ -39,6 +39,7 @@ import org.polypheny.db.algebra.metadata.BuiltInMetadata.AllPredicates; import org.polypheny.db.algebra.metadata.BuiltInMetadata.ExpressionLineage; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptEntity; @@ -130,15 +131,15 @@ public Kind getKind() { */ public static class AlgTableRef implements Comparable { - private final AlgOptEntity table; + private final CatalogEntity table; private final int entityNumber; private final String digest; - private AlgTableRef( AlgOptEntity table, int entityNumber ) { + private AlgTableRef( CatalogEntity table, int entityNumber ) { this.table = table; this.entityNumber = entityNumber; - this.digest = table.getCatalogEntity().id + ".#" + entityNumber; + this.digest = table.id + ".#" + entityNumber; } @@ -146,7 +147,7 @@ private AlgTableRef( AlgOptEntity table, int entityNumber ) { public boolean equals( Object obj ) { return this == obj || obj instanceof AlgTableRef - && table.getCatalogEntity().id == ((AlgTableRef) obj).getTable().getCatalogEntity().id + && table.id == ((AlgTableRef) obj).getTable().id && entityNumber == ((AlgTableRef) obj).entityNumber; } @@ -157,13 +158,13 @@ public int hashCode() { } - public AlgOptEntity getTable() { + public CatalogEntity getTable() { return table; } public List getQualifiedName() { - return List.of( table.getCatalogEntity().unwrap( LogicalTable.class ).getNamespaceName(), table.getCatalogEntity().name ); + return List.of( table.unwrap( LogicalTable.class ).getNamespaceName(), table.name ); } @@ -178,7 +179,7 @@ public String toString() { } - public static AlgTableRef of( AlgOptEntity table, int entityNumber ) { + public static AlgTableRef of( CatalogEntity table, int entityNumber ) { return new AlgTableRef( table, entityNumber ); } diff --git a/core/src/main/java/org/polypheny/db/schema/Entity.java b/core/src/main/java/org/polypheny/db/schema/Entity.java index 513fc7a0aa..8cb513dbd4 100644 --- a/core/src/main/java/org/polypheny/db/schema/Entity.java +++ b/core/src/main/java/org/polypheny/db/schema/Entity.java @@ -36,7 +36,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.nodes.Call; diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java b/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java index 7ed1c30937..b3a5e82efe 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java @@ -18,7 +18,7 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; public class LogicalCollection extends LogicalEntity { diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java index 230e849e89..d66f7a61d2 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java @@ -16,22 +16,28 @@ package org.polypheny.db.schema; +import java.io.Serializable; import java.util.List; import java.util.Set; import java.util.TreeSet; import lombok.Getter; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Queryable; +import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.ScannableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; @@ -41,7 +47,7 @@ import org.polypheny.db.rex.RexNode; -public class LogicalEntity extends AbstractQueryableEntity implements TranslatableEntity, ScannableEntity, ModifiableEntity { +public class LogicalEntity extends CatalogEntity implements TranslatableEntity, ScannableEntity, ModifiableEntity { private AlgProtoDataType protoRowType; @@ -70,7 +76,7 @@ public LogicalEntity( List logicalColumnNames, AlgProtoDataType protoRowType, NamespaceType namespaceType ) { - super( Object[].class, tableId, null, null ); + super( tableId, logicalTableName, EntityType.ENTITY, NamespaceType.RELATIONAL ); this.logicalSchemaName = logicalSchemaName; this.logicalTableName = logicalTableName; this.columnIds = columnIds; @@ -86,50 +92,39 @@ public String toString() { @Override - public RelModify toModificationAlg( - AlgOptCluster cluster, - AlgOptEntity table, - CatalogReader catalogReader, - AlgNode input, - Operation operation, - List updateColumnList, - List sourceExpressionList, - boolean flattened ) { + public Modify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, CatalogEntity entity, AlgNode child, Operation operation, List targets, List sources ) { return new LogicalRelModify( - cluster, cluster.traitSetOf( Convention.NONE ), - table, - catalogReader, - input, + entity, + child, operation, - updateColumnList, - sourceExpressionList, - flattened ); + targets, + sources); } @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return protoRowType.apply( typeFactory ); + public Serializable[] getParameterArray() { + return new Serializable[0]; } @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - throw new RuntimeException( "asQueryable() is not implemented for Logical Tables!" ); + public State getCatalogType() { + return null; } @Override - public Enumerable scan( DataContext root ) { - throw new RuntimeException( "scan() is not implemented for Logical Tables!" ); + public Expression asExpression() { + return null; } + @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { - throw new RuntimeException( "toAlg() is not implemented for Logical Tables!" ); + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { + return null; } - } diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java b/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java index 414a56256a..e2119f8f5f 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java @@ -18,7 +18,7 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; public class LogicalRelView extends LogicalEntity { diff --git a/core/src/main/java/org/polypheny/db/schema/ModelTrait.java b/core/src/main/java/org/polypheny/db/schema/ModelTrait.java index b9bdf1ba80..d2d404e23b 100644 --- a/core/src/main/java/org/polypheny/db/schema/ModelTrait.java +++ b/core/src/main/java/org/polypheny/db/schema/ModelTrait.java @@ -18,7 +18,7 @@ import lombok.Getter; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTrait; import org.polypheny.db.plan.AlgTraitDef; diff --git a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java index fa8f909499..99c809344d 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -35,9 +35,9 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDatabase; @@ -141,7 +141,7 @@ private Map, CatalogEntity> buildDocumentLogical( Catalog catal continue; } - for ( CatalogCollection catalogEntity : catalog.getCollections( catalogSchema.id, null ) ) { + for ( LogicalCollection catalogEntity : catalog.getCollections( catalogSchema.id, null ) ) { entities.put( Pair.of( catalogSchema.id, catalogEntity.id ), catalogEntity ); } } @@ -197,7 +197,7 @@ private Map, CatalogEntityPlacement> buildPhysicalDocum //adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); for ( long collectionId : collectionIds ) { - CatalogCollection catalogCollection = catalog.getCollection( collectionId ); + LogicalCollection catalogCollection = catalog.getCollection( collectionId ); for ( CatalogCollectionPlacement partitionPlacement : catalogCollection.placements.stream().map( p -> catalog.getCollectionPlacement( collectionId, adapter.getAdapterId() ) ).collect( Collectors.toList() ) ) { if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT && catalogAdapter.getSupportedNamespaces().contains( catalogSchema.namespaceType ) ) { @@ -293,7 +293,7 @@ private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map names ) { switch ( names.size() ) { case 3: @@ -47,7 +48,20 @@ default LogicalTable getTable( List names ) { } } - default CatalogCollection getCollection( List names ) { + default LogicalTable getTable( long id ) { + return Catalog.getInstance().getTable( id ); + } + + default AllocationTable getAllocTable( long id ){ + return null; + } + + default PhysicalTable getPhysicalTable( long id ){ + return null; + } + + + default LogicalCollection getCollection( List names ) { CatalogNamespace namespace; switch ( names.size() ) { case 3: @@ -65,33 +79,43 @@ default CatalogCollection getCollection( List names ) { } } - default LogicalGraph getGraph( List names ) { + default LogicalCollection getCollection( long id ) { + return Catalog.getInstance().getCollection( id ); + } + + default AllocationCollection getAllocCollection( long id ){ + return null; + } + default PhysicalCollection getPhysicalCollection( long id ){ + return null; + } + + default LogicalGraph getGraph( List names ) { if ( names.size() == 1 ) {// TODO add methods return Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ) ).get( 0 ); } return null; } - default List getNamespaceNames() { - return Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, null ).stream().map( t -> t.name ).collect( Collectors.toList() ); + default LogicalGraph getGraph( long id ) { + return Catalog.getInstance().getGraph( id ); } - /** - * Schema that has no parents. - */ - class RootSchema extends AbstractNamespace implements Schema { - - RootSchema() { - super( -1L ); - } + default AllocationGraph getAllocGraph( long id ){ + return null; + } + default PhysicalGraph getPhysicalGraph( long id ){ + return null; + } - @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { - return Expressions.call( DataContext.ROOT, BuiltInMethod.DATA_CONTEXT_GET_ROOT_SCHEMA.method ); - } + default List getNamespaceNames() { + return Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, null ).stream().map( t -> t.name ).collect( Collectors.toList() ); + } + default boolean isPartitioned( long id ){ + return false; } } diff --git a/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java index 6735b54804..6317735597 100644 --- a/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java @@ -48,7 +48,7 @@ public interface QueryableEntity extends Entity { /** * Converts this table into a {@link Queryable}. */ - Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ); + Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ); /** * Returns the element type of the collection that will implement this table. @@ -62,6 +62,6 @@ public interface QueryableEntity extends Entity { * @param tableName Table name (unique within schema) * @param clazz The desired collection class; for example {@code Queryable}. */ - Expression getExpression( SchemaPlus schema, String tableName, Class clazz ); + Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ); } diff --git a/core/src/main/java/org/polypheny/db/schema/SchemaPlus.java b/core/src/main/java/org/polypheny/db/schema/SchemaPlus.java index b25861942e..37ac21b55b 100644 --- a/core/src/main/java/org/polypheny/db/schema/SchemaPlus.java +++ b/core/src/main/java/org/polypheny/db/schema/SchemaPlus.java @@ -35,7 +35,7 @@ import com.google.common.collect.ImmutableList; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.schema.Namespace.Schema; diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 0d62ae90e5..4e147691c0 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -204,7 +204,7 @@ public static Queryable queryable( DataContext root, Class clazz, Iter public static Queryable queryable( DataContext root, PolyphenyDbSchema schema, Class clazz, String tableName ) { //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); LogicalTable table = schema.getTable( List.of( tableName ) ); - return table.asQueryable( root, schema, tableName ); + return table.unwrap( QueryableEntity.class ).asQueryable( root, schema, tableName ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 3a6e437fe7..20b1a5540e 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -290,7 +290,7 @@ public Void apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { public static AlgBuilder create( Statement statement ) { final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, traitSet, rootSchema ); return create( statement, cluster ); } diff --git a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java index cde4d2dbb5..1dde205b4e 100644 --- a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java @@ -33,6 +33,7 @@ import org.polypheny.db.plan.Contexts; import org.polypheny.db.processing.DeepCopyShuttle; import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Pair; @@ -46,13 +47,13 @@ public class RoutedAlgBuilder extends AlgBuilder { protected Map>> physicalPlacementsOfPartitions = new HashMap<>(); // PartitionId -> List - public RoutedAlgBuilder( Context context, AlgOptCluster cluster, AlgOptSchema algOptSchema ) { - super( context, cluster, algOptSchema ); + public RoutedAlgBuilder( Context context, AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { + super( context, cluster, rootSchema ); } public static RoutedAlgBuilder create( Statement statement, AlgOptCluster cluster ) { - return new RoutedAlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader() ); + return new RoutedAlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader().getRootSchema() ); } diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index 2bd14eb56b..2c849eefe5 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -91,8 +91,8 @@ public static class TableUpdateVisitor extends AlgShuttleImpl { @Override public AlgNode visit( LogicalRelModify modify ) { if ( modify.getOperation() != Modify.Operation.MERGE ) { - if ( (modify.getEntity().getCatalogEntity() != null) ) { - LogicalTable table = modify.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); + if ( (modify.getEntity() != null) ) { + LogicalTable table = modify.getEntity().unwrap( LogicalTable.class ); names.add( table.getNamespaceName() ); names.add( table.name ); } diff --git a/core/src/main/java/org/polypheny/db/view/ViewManager.java b/core/src/main/java/org/polypheny/db/view/ViewManager.java index 0b4e20794b..f7f47ed8d3 100644 --- a/core/src/main/java/org/polypheny/db/view/ViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/ViewManager.java @@ -27,8 +27,8 @@ import org.polypheny.db.algebra.BiAlg; import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.core.Project; -import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.TableFunctionScan; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalCorrelate; @@ -38,18 +38,17 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.prepare.AlgOptEntityImpl; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; @@ -116,7 +115,7 @@ public AlgNode visit( LogicalMatch match ) { @Override - public AlgNode visit( RelScan scan ) { + public AlgNode visit( RelScan scan ) { if ( depth == 0 ) { return checkNode( scan ); } @@ -252,11 +251,9 @@ public AlgNode checkNode( AlgNode other ) { if ( other instanceof LogicalRelViewScan ) { return expandViewNode( other ); } else if ( doesSubstituteOrderBy && other instanceof LogicalRelScan ) { - if ( other.getEntity() instanceof AlgOptEntityImpl ) { - LogicalTable catalogTable = other.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); - if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((CatalogMaterializedView) catalogTable).isOrdered() ) { - return orderMaterialized( other ); - } + LogicalTable catalogTable = other.getEntity().unwrap( LogicalTable.class ); + if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((CatalogMaterializedView) catalogTable).isOrdered() ) { + return orderMaterialized( other ); } } handleNodeType( other ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 1ec9aa900b..84d0b555e0 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -27,7 +27,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -63,6 +63,15 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.transaction.Transaction; @@ -510,13 +519,13 @@ public void updateMaterializedViewRefreshTime( long materializedId ) { @Override - public CatalogCollection getCollection( long collectionId ) { + public LogicalCollection getCollection( long collectionId ) { throw new NotImplementedException(); } @Override - public List getCollections( long namespaceId, Pattern namePattern ) { + public List getCollections( long namespaceId, Pattern namePattern ) { throw new NotImplementedException(); } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index 0aec5365e8..3a808b1939 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -78,7 +78,6 @@ import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ExtensibleEntity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.schema.TableType; @@ -346,7 +345,7 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { return null; } @@ -358,7 +357,7 @@ public Type getElementType() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { return null; } diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index 4bbd79e4a6..2eee906ecb 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -34,7 +34,7 @@ import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 7205fc4dcf..26952e41b9 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -50,19 +50,19 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Collation; -import org.polypheny.db.catalog.Catalog.ConstraintType; -import org.polypheny.db.catalog.Catalog.DataPlacementRole; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.ForeignKeyOption; -import org.polypheny.db.catalog.Catalog.IndexType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.Catalog.PartitionType; -import org.polypheny.db.catalog.Catalog.PlacementType; -import org.polypheny.db.catalog.NameGenerator; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.logistic.NameGenerator; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -327,7 +327,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } for ( long id : collectionsToDrop ) { - CatalogCollection collection = catalog.getCollection( id ); + LogicalCollection collection = catalog.getCollection( id ); // Make sure that there is only one adapter if ( collection.placements.size() != 1 ) { @@ -2140,10 +2140,10 @@ private List getColumnInformation( List projectedColum private Map> findUnderlyingTablesOfView( AlgNode algNode, Map> underlyingTables, AlgDataType fieldList ) { if ( algNode instanceof LogicalRelScan ) { List underlyingColumns = getUnderlyingColumns( algNode, fieldList ); - underlyingTables.put( algNode.getEntity().getCatalogEntity().id, underlyingColumns ); + underlyingTables.put( algNode.getEntity().id, underlyingColumns ); } else if ( algNode instanceof LogicalRelViewScan ) { List underlyingColumns = getUnderlyingColumns( algNode, fieldList ); - underlyingTables.put( algNode.getEntity().getCatalogEntity().id, underlyingColumns ); + underlyingTables.put( algNode.getEntity().id, underlyingColumns ); } if ( algNode instanceof BiAlg ) { findUnderlyingTablesOfView( ((BiAlg) algNode).getLeft(), underlyingTables, fieldList ); @@ -2156,7 +2156,7 @@ private Map> findUnderlyingTablesOfView( AlgNode algNode, Map getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList ) { - LogicalTable table = algNode.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); + LogicalTable table = algNode.getEntity().unwrap( LogicalTable.class ); List columnIds = table.fieldIds; List logicalColumnNames = table.getColumnNames(); List underlyingColumns = new ArrayList<>(); @@ -2280,7 +2280,7 @@ public void createCollection( long schemaId, String name, boolean ifNotExists, L true ); // Initially create DataPlacement containers on every store the table should be placed. - CatalogCollection catalogCollection = catalog.getCollection( collectionId ); + LogicalCollection catalogCollection = catalog.getCollection( collectionId ); // Trigger rebuild of schema; triggers schema creation on adapters PolySchemaBuilder.getInstance().getCurrent(); @@ -2315,7 +2315,7 @@ private boolean assertEntityExists( long namespaceId, String name, boolean ifNot @Override - public void dropCollection( CatalogCollection catalogCollection, Statement statement ) { + public void dropCollection( LogicalCollection catalogCollection, Statement statement ) { AdapterManager manager = AdapterManager.getInstance(); for ( Integer adapterId : catalogCollection.placements ) { @@ -2328,7 +2328,7 @@ public void dropCollection( CatalogCollection catalogCollection, Statement state } - public void removeDocumentLogistics( CatalogCollection catalogCollection, Statement statement ) { + public void removeDocumentLogistics( LogicalCollection catalogCollection, Statement statement ) { CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); LogicalTable table = catalog.getTable( mapping.collectionId ); catalog.deleteTable( table.id ); @@ -2345,7 +2345,7 @@ public void addCollectionPlacement( long namespaceId, String name, List dataStores, Statement statement ) { + public void dropCollectionPlacement( long namespaceId, LogicalCollection collection, List dataStores, Statement statement ) { for ( DataStore store : dataStores ) { store.dropCollection( statement.getPrepareContext(), collection ); @@ -2378,7 +2378,7 @@ public void dropCollectionPlacement( long namespaceId, CatalogCollection collect } - private void removeDocumentPlacementLogistics( CatalogCollection collection, DataStore store, Statement statement ) { + private void removeDocumentPlacementLogistics( LogicalCollection collection, DataStore store, Statement statement ) { CatalogCollectionMapping mapping = catalog.getCollectionMapping( collection.id ); LogicalTable table = catalog.getTable( mapping.collectionId ); @@ -2923,8 +2923,8 @@ public void dropSchema( long databaseId, String schemaName, boolean ifExists, St CatalogSchema catalogSchema = catalog.getSchema( databaseId, schemaName ); // Drop all collections in this namespace - List collections = catalog.getCollections( catalogSchema.id, null ); - for ( CatalogCollection collection : collections ) { + List collections = catalog.getCollections( catalogSchema.id, null ); + for ( LogicalCollection collection : collections ) { dropCollection( collection, statement ); } diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index accc4cc8d3..40cdc83051 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -31,9 +31,9 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.DataPlacementRole; -import org.polypheny.db.catalog.Catalog.PartitionType; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogPartition; diff --git a/dbms/src/main/java/org/polypheny/db/partition/PartitionManagerFactoryImpl.java b/dbms/src/main/java/org/polypheny/db/partition/PartitionManagerFactoryImpl.java index 048a425065..28d475b544 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/PartitionManagerFactoryImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/PartitionManagerFactoryImpl.java @@ -15,13 +15,13 @@ */ package org.polypheny.db.partition; -import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.logistic.PartitionType; public class PartitionManagerFactoryImpl extends PartitionManagerFactory { @Override - public PartitionManager getPartitionManager( Catalog.PartitionType partitionType ) { + public PartitionManager getPartitionManager( PartitionType partitionType ) { switch ( partitionType ) { case HASH: return new HashPartitionManager(); diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 3adbc61fe8..2552de2645 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -78,7 +78,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index a5997279a6..264e6eb07b 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -52,9 +52,9 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.ConstraintType; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -128,13 +128,13 @@ public static AlgRoot attachOnQueryConstraints( AlgRoot root, Statement statemen public static void attachOnCommitConstraints( AlgNode node, Statement statement ) { ModifyExtractor extractor = new ModifyExtractor(); node.accept( extractor ); - RelModify modify = extractor.getModify(); + RelModify modify = extractor.getModify(); if ( modify == null ) { throw new RuntimeException( "The tree did no conform, while generating the constraint enforcement query!" ); } - statement.getTransaction().getCatalogTables().add( LogicalConstraintEnforcer.getCatalogTable( modify ) ); + statement.getTransaction().getCatalogTables().add( modify.entity.unwrap( LogicalTable.class ) ); } @@ -197,7 +197,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme if ( !(logicalRoot.alg instanceof RelModify) ) { return logicalRoot; } - final RelModify root = (RelModify) logicalRoot.alg; + final RelModify root = (RelModify) logicalRoot.alg; final Catalog catalog = Catalog.getInstance(); final LogicalTable table; @@ -205,7 +205,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final List constraints; final List foreignKeys; final List exportedKeys; - table = root.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); + table = root.getEntity().unwrap( LogicalTable.class ); primaryKey = catalog.getPrimaryKey( table.primaryKey ); constraints = new ArrayList<>( Catalog.getInstance().getConstraints( table.id ) ); foreignKeys = Catalog.getInstance().getForeignKeys( table.id ); @@ -332,8 +332,8 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final AlgNode input = root.getInput().accept( new DeepCopyShuttle() ); final RexBuilder rexBuilder = root.getCluster().getRexBuilder(); for ( final CatalogForeignKey foreignKey : foreignKeys ) { - final AlgOptSchema algOptSchema = root.getCatalogReader(); - final AlgOptEntity algOptEntity = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getReferencedKeyTableName() ) ); + + final LogicalTable algOptEntity = statement.getDataContext().getRootSchema().getTable( foreignKey.referencedKeyTableId); final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), algOptEntity ); RexNode joinCondition = rexBuilder.makeLiteral( true ); builder.push( input ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 7e91705f79..477a1dc814 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -326,7 +326,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List columnNames = new LinkedList<>(); @@ -368,7 +368,7 @@ public AlgRoot buildInsertStatement( Statement statement, List scans = new ArrayList<>(); @@ -474,7 +475,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogSchema names private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespace, Statement statement, Integer placementId ) { AlgOptCluster cluster = alg.getCluster(); - List collections = catalog.getCollections( namespace.id, null ); + List collections = catalog.getCollections( namespace.id, null ); List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); @@ -493,13 +494,12 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespa public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement statement ) { - CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.getGraph().id ); + CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); - PreparingEntity nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); - PreparingEntity nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); - PreparingEntity edgesTable = getSubstitutionTable( statement, mapping.edgesId, mapping.idEdgeId, adapterId ); - - PreparingEntity edgePropertiesTable = getSubstitutionTable( statement, mapping.edgesPropertyId, mapping.idEdgesPropertyId, adapterId ); + PhysicalTable nodesTable = statement.getDataContext().getRootSchema().getTable( mapping.nodesId ).unwrap( PhysicalTable.class ); + PhysicalTable nodePropertiesTable = statement.getDataContext().getRootSchema().getTable( mapping.nodesPropertyId ).unwrap( PhysicalTable.class ); + PhysicalTable edgesTable = statement.getDataContext().getRootSchema().getTable( mapping.edgesId ).unwrap( PhysicalTable.class ); + PhysicalTable edgePropertiesTable = statement.getDataContext().getRootSchema().getTable( mapping.edgesPropertyId ).unwrap( PhysicalTable.class ); AlgNode node = buildSubstitutionJoin( alg, nodesTable, nodePropertiesTable ); @@ -510,7 +510,7 @@ public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement s } - protected PreparingEntity getSubstitutionTable( Statement statement, long tableId, long columnId, int adapterId ) { + protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, long columnId, int adapterId ) { LogicalTable nodes = Catalog.getInstance().getTable( tableId ); CatalogColumnPlacement placement = Catalog.getInstance().getColumnPlacement( adapterId, columnId ); List qualifiedTableName = ImmutableList.of( @@ -521,11 +521,11 @@ protected PreparingEntity getSubstitutionTable( Statement statement, long tableI ), nodes.name + "_" + nodes.partitionProperty.partitionIds.get( 0 ) ); - return statement.getTransaction().getCatalogReader().getTableForMember( qualifiedTableName ); + return statement.getDataContext().getRootSchema().getTable( qualifiedTableName ); } - protected AlgNode buildSubstitutionJoin( AlgNode alg, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable ) { + protected AlgNode buildSubstitutionJoin( AlgNode alg, CatalogEntity nodesTable, CatalogEntity nodePropertiesTable ) { AlgTraitSet out = alg.getTraitSet().replace( ModelTrait.RELATIONAL ); LogicalRelScan nodes = new LogicalRelScan( alg.getCluster(), out, nodesTable ); LogicalRelScan nodesProperty = new LogicalRelScan( alg.getCluster(), out, nodePropertiesTable ); @@ -541,19 +541,19 @@ protected AlgNode buildSubstitutionJoin( AlgNode alg, PreparingEntity nodesTable } - protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder, Integer adapterId ) { + protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder, Integer adapterId ) { Catalog catalog = Catalog.getInstance(); PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); - if ( alg.getCollection().getCatalogEntity().namespaceType != NamespaceType.DOCUMENT ) { - if ( alg.getCollection().getCatalogEntity().namespaceType == NamespaceType.GRAPH ) { + if ( alg.entity.namespaceType != NamespaceType.DOCUMENT ) { + if ( alg.entity.namespaceType == NamespaceType.GRAPH ) { return handleDocumentOnGraph( alg, statement, builder ); } return handleTransformerDocScan( alg, statement, builder ); } - CatalogCollection collection = alg.getCollection().getCatalogEntity().unwrap( CatalogCollection.class ); + LogicalCollection collection = alg.entity.unwrap( LogicalCollection.class ); List scans = new ArrayList<>(); @@ -589,8 +589,8 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement state } - private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder ) { - AlgNode scan = buildJoinedScan( statement, alg.getCluster(), selectPlacement( catalog.getTable( alg.getCollection().getCatalogEntity().id ) ) ); + private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder ) { + AlgNode scan = buildJoinedScan( statement, alg.getCluster(), selectPlacement( catalog.getTable( alg.entity.id ) ) ); builder.push( scan ); AlgTraitSet out = alg.getTraitSet().replace( ModelTrait.RELATIONAL ); @@ -600,10 +600,10 @@ private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statement s @NotNull - private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer adapterId, Statement statement, RoutedAlgBuilder builder ) { - List columns = catalog.getColumns( node.getCollection().getCatalogEntity().id ); + private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer adapterId, Statement statement, RoutedAlgBuilder builder ) { + List columns = catalog.getColumns( node.entity.id ); AlgTraitSet out = node.getTraitSet().replace( ModelTrait.RELATIONAL ); - PreparingEntity subTable = getSubstitutionTable( statement, node.getCollection().getCatalogEntity().id, columns.get( 0 ).id, adapterId ); + CatalogEntity subTable = getSubstitutionTable( statement, node.entity.id, columns.get( 0 ).id, adapterId ); builder.scan( subTable ); builder.project( node.getCluster().getRexBuilder().makeInputRef( subTable.getRowType().getFieldList().get( 1 ).getType(), 1 ) ); builder.push( new LogicalTransformer( builder.getCluster(), List.of( builder.build() ), null, out.replace( ModelTrait.DOCUMENT ), ModelTrait.RELATIONAL, ModelTrait.DOCUMENT, node.getRowType(), false ) ); @@ -611,10 +611,10 @@ private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer } - private RoutedAlgBuilder handleDocumentOnGraph( DocumentScan alg, Statement statement, RoutedAlgBuilder builder ) { + private RoutedAlgBuilder handleDocumentOnGraph( DocumentScan alg, Statement statement, RoutedAlgBuilder builder ) { AlgTraitSet out = alg.getTraitSet().replace( ModelTrait.GRAPH ); - builder.lpgScan( alg.getCollection().getCatalogEntity().id ); - builder.lpgMatch( List.of( builder.lpgNodeMatch( List.of( alg.getCollection().getCatalogEntity().name ) ) ), List.of( "n" ) ); + builder.lpgScan( alg.entity.id ); + builder.lpgMatch( List.of( builder.lpgNodeMatch( List.of( alg.entity.name ) ) ), List.of( "n" ) ); AlgNode unrouted = builder.build(); builder.push( new LogicalTransformer( builder.getCluster(), List.of( routeGraph( builder, (AlgNode & LpgAlg) unrouted, statement ) ), null, out.replace( ModelTrait.DOCUMENT ), ModelTrait.GRAPH, ModelTrait.DOCUMENT, alg.getRowType(), true ) ); return builder; diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 31202b82d9..bfb98ffd42 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -78,28 +78,30 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.processing.WhereClauseVisitor; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -112,8 +114,6 @@ import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.ModifiableCollection; -import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.schema.graph.ModifiableGraph; import org.polypheny.db.tools.AlgBuilder; @@ -135,10 +135,9 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { throw new RuntimeException( "Unexpected operator!" ); } - AlgOptEntityImpl table = (AlgOptEntityImpl) modify.getEntity(); + LogicalTable catalogTable = modify.getEntity().unwrap( LogicalTable.class ); // Get placements of this table - LogicalTable catalogTable = table.getCatalogEntity().unwrap( LogicalTable.class ); // Make sure that this table can be modified if ( !catalogTable.modifiable ) { @@ -412,19 +411,18 @@ else if ( identifiedPartitionForSetValue != -1 ) { pkPlacement.physicalSchemaName ), catalogTable.name + "_" + currentPartitionId ); - AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); + PhysicalTable physical = catalogReader.getRootSchema().getTable( qualifiedTableName ).unwrap( PhysicalTable.class ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML - RelModify adjustedModify = modifiableTable.toModificationAlg( + Modify adjustedModify = modifiableTable.toModificationAlg( cluster, + cluster.traitSet(), physical, - catalogReader, input, modify.getOperation(), updateColumnList, - sourceExpressionList, - modify.isFlattened() ); + sourceExpressionList ); modifies.add( adjustedModify ); @@ -503,19 +501,18 @@ else if ( identifiedPartitionForSetValue != -1 ) { pkPlacement.physicalSchemaName ), catalogTable.name + "_" + entry.getKey() ); - AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); + PhysicalTable physical = catalogReader.getRootSchema().getTable( qualifiedTableName ).unwrap( PhysicalTable.class ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML - RelModify adjustedModify = modifiableTable.toModificationAlg( + Modify adjustedModify = modifiableTable.toModificationAlg( cluster, + modify.getTraitSet(), physical, - catalogReader, input, modify.getOperation(), updateColumnList, - sourceExpressionList, - modify.isFlattened() ); + sourceExpressionList ); statement.getDataContext().addContext(); modifies.add( new LogicalContextSwitcher( adjustedModify ) ); @@ -562,7 +559,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { if ( worstCaseRouting ) { log.debug( "PartitionColumnID was not an explicit part of statement, partition routing will therefore assume worst-case: Routing to ALL PARTITIONS" ); - accessedPartitionList = catalogTable.partitionProperty.partitionIds.stream().collect( Collectors.toSet() ); + accessedPartitionList = new HashSet<>( catalogTable.partitionProperty.partitionIds ); } } else { // un-partitioned tables only have one partition anyway @@ -592,10 +589,10 @@ else if ( identifiedPartitionForSetValue != -1 ) { pkPlacement.physicalSchemaName ), catalogTable.name + "_" + partitionId ); - AlgOptEntity physical = catalogReader.getTableForMember( qualifiedTableName ); + PhysicalTable physical = catalogReader.getRootSchema().getTable( qualifiedTableName ).unwrap( PhysicalTable.class ); // Build DML - RelModify adjustedModify; + Modify adjustedModify; AlgNode input = buildDml( super.recursiveCopy( modify.getInput( 0 ) ), RoutedAlgBuilder.create( statement, cluster ), @@ -612,13 +609,12 @@ else if ( identifiedPartitionForSetValue != -1 ) { if ( modifiableTable != null && modifiableTable == physical.unwrap( Entity.class ) ) { adjustedModify = modifiableTable.toModificationAlg( cluster, + input.getTraitSet(), physical, - catalogReader, input, modify.getOperation(), updateColumnList, - sourceExpressionList, - modify.isFlattened() + sourceExpressionList ); } else { adjustedModify = LogicalRelModify.create( @@ -719,7 +715,7 @@ public AlgNode handleBatchIterator( AlgNode alg, Statement statement, LogicalQue public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Integer adapterId ) { PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); - CatalogCollection collection = alg.getEntity().getCatalogEntity().unwrap( CatalogCollection.class ); + LogicalCollection collection = alg.entity.unwrap( LogicalCollection.class ); List modifies = new ArrayList<>(); @@ -735,17 +731,17 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, String collectionName = collection.name + "_" + placement.id; - AlgOptEntity document = reader.getCollection( List.of( namespaceName, collectionName ) ); + PhysicalCollection document = reader.getRootSchema().getCollection( List.of( namespaceName, collectionName ) ).unwrap( PhysicalCollection.class ); if ( !adapter.getSupportedNamespaces().contains( NamespaceType.DOCUMENT ) ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, statement, placementId, queryInformation ) ); continue; } - modifies.add( ((ModifiableCollection) document.getEntity()).toModificationAlg( + modifies.add( document.unwrap( ModifiableEntity.class ).toModificationAlg( alg.getCluster(), + alg.getTraitSet(), document, - statement.getTransaction().getCatalogReader(), buildDocumentDml( alg.getInput(), statement, queryInformation ), alg.operation, alg.getKeys(), @@ -762,16 +758,13 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, @Override public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement ) { - LogicalGraph catalogGraph = alg.getGraph(); + LogicalGraph catalogGraph = alg.entity.unwrap( LogicalGraph.class ); return routeGraphDml( alg, statement, catalogGraph, catalogGraph.placements ); } @Override public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ) { - if ( alg.getGraph() == null ) { - throw new RuntimeException( "Error while routing graph" ); - } PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); @@ -855,16 +848,14 @@ private AlgNode buildGraphDml( AlgNode node, Statement statement, int adapterId private AlgNode attachRelationalModify( LogicalDocumentModify alg, Statement statement, int adapterId, LogicalQueryInformation queryInformation ) { - CatalogCollectionMapping mapping = Catalog.getInstance().getCollectionMapping( alg.getCollection().getCatalogEntity().id ); - - PreparingEntity collectionTable = getSubstitutionTable( statement, mapping.collectionId, mapping.idId, adapterId ); + CatalogCollectionMapping mapping = Catalog.getInstance().getCollectionMapping( alg.entity.id ); switch ( alg.operation ) { case INSERT: - return attachRelationalDocInsert( alg, statement, collectionTable, queryInformation, adapterId ).get( 0 ); + return attachRelationalDocInsert( alg, statement, alg.entity, queryInformation, adapterId ).get( 0 ); case UPDATE: case DELETE: - return attachRelationalDoc( alg, statement, collectionTable, queryInformation, adapterId ).get( 0 ); + return attachRelationalDoc( alg, statement, alg.entity, queryInformation, adapterId ).get( 0 ); case MERGE: throw new RuntimeException( "MERGE is not supported." ); default: @@ -874,7 +865,7 @@ private AlgNode attachRelationalModify( LogicalDocumentModify alg, Statement sta } - private List attachRelationalDoc( LogicalDocumentModify alg, Statement statement, PreparingEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { + private List attachRelationalDoc( LogicalDocumentModify alg, Statement statement, CatalogEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { RoutedAlgBuilder builder = attachDocUpdate( alg.getInput(), statement, collectionTable, RoutedAlgBuilder.create( statement, alg.getCluster() ), queryInformation, adapterId ); RexBuilder rexBuilder = alg.getCluster().getRexBuilder(); AlgBuilder algBuilder = AlgBuilder.create( statement ); @@ -918,11 +909,11 @@ private AlgNode createDocumentTransform( AlgNode query, RexBuilder rexBuilder ) } - private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, PreparingEntity collectionTable, RoutedAlgBuilder builder, LogicalQueryInformation information, int adapterId ) { + private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, CatalogEntity collectionTable, RoutedAlgBuilder builder, LogicalQueryInformation information, int adapterId ) { switch ( ((DocumentAlg) alg).getDocType() ) { case SCAN: - handleDocumentScan( (DocumentScan) alg, statement, builder, adapterId ); + handleDocumentScan( (DocumentScan) alg, statement, builder, adapterId ); break; case VALUES: builder.push( LogicalDocumentValues.create( alg.getCluster(), ((DocumentValues) alg).documentTuples ) ); @@ -955,7 +946,7 @@ private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, Prep } - private List attachRelationalDocInsert( LogicalDocumentModify alg, Statement statement, PreparingEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { + private List attachRelationalDocInsert( LogicalDocumentModify alg, Statement statement, CatalogEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { if ( alg.getInput() instanceof DocumentValues ) { // simple value insert AlgNode values = ((LogicalDocumentValues) alg.getInput()).getRelationalEquivalent( List.of(), List.of( collectionTable ), statement.getTransaction().getCatalogReader() ).get( 0 ); @@ -969,10 +960,10 @@ private List attachRelationalDocInsert( LogicalDocumentModify alg, Stat private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Statement statement ) { CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); - PreparingEntity nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ); - PreparingEntity nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ); - PreparingEntity edgesTable = getSubstitutionTable( statement, mapping.edgesId, mapping.idEdgeId, adapterId ); - PreparingEntity edgePropertiesTable = getSubstitutionTable( statement, mapping.edgesPropertyId, mapping.idEdgesPropertyId, adapterId ); + PhysicalTable nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ).unwrap( PhysicalTable.class ); + PhysicalTable nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ).unwrap( PhysicalTable.class ); + PhysicalTable edgesTable = getSubstitutionTable( statement, mapping.edgesId, mapping.idEdgeId, adapterId ).unwrap( PhysicalTable.class ); + PhysicalTable edgePropertiesTable = getSubstitutionTable( statement, mapping.edgesPropertyId, mapping.idEdgesPropertyId, adapterId ).unwrap( PhysicalTable.class ); List inputs = new ArrayList<>(); switch ( alg.operation ) { @@ -1018,7 +1009,7 @@ private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Sta } - private AlgNode attachRelationalGraphUpdate( LogicalLpgModify alg, Statement statement, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, int adapterId ) { + private AlgNode attachRelationalGraphUpdate( LogicalLpgModify alg, Statement statement, CatalogEntity nodesTable, CatalogEntity nodePropertiesTable, CatalogEntity edgesTable, CatalogEntity edgePropertiesTable, int adapterId ) { AlgNode project = new LogicalLpgProject( alg.getCluster(), alg.getTraitSet(), buildGraphDml( alg.getInput(), statement, adapterId ), alg.operations, alg.ids ); List inputs = new ArrayList<>(); @@ -1042,7 +1033,7 @@ private AlgNode attachRelationalGraphUpdate( LogicalLpgModify alg, Statement sta } - private AlgNode attachRelationalGraphDelete( LogicalLpgModify alg, Statement statement, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, int adapterId ) { + private AlgNode attachRelationalGraphDelete( LogicalLpgModify alg, Statement statement, CatalogEntity nodesTable, CatalogEntity nodePropertiesTable, CatalogEntity edgesTable, CatalogEntity edgePropertiesTable, int adapterId ) { AlgNode project = new LogicalLpgProject( alg.getCluster(), alg.getTraitSet(), buildGraphDml( alg.getInput(), statement, adapterId ), alg.operations, alg.ids ); List inputs = new ArrayList<>(); @@ -1064,7 +1055,7 @@ private AlgNode attachRelationalGraphDelete( LogicalLpgModify alg, Statement sta } - private List attachPreparedGraphNodeModifyDelete( AlgOptCluster cluster, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, Statement statement ) { + private List attachPreparedGraphNodeModifyDelete( AlgOptCluster cluster, CatalogEntity nodesTable, CatalogEntity nodePropertiesTable, Statement statement ) { AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); @@ -1095,7 +1086,7 @@ private List attachPreparedGraphNodeModifyDelete( AlgOptCluster cluster } - private AlgNode attachRelationalRelatedInsert( LogicalLpgModify alg, Statement statement, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, int adapterId ) { + private AlgNode attachRelationalRelatedInsert( LogicalLpgModify alg, Statement statement, CatalogEntity nodesTable, CatalogEntity nodePropertiesTable, CatalogEntity edgesTable, CatalogEntity edgePropertiesTable, int adapterId ) { AlgNode project = buildGraphDml( alg.getInput(), statement, adapterId ); List inputs = new ArrayList<>(); @@ -1116,7 +1107,7 @@ private AlgNode attachRelationalRelatedInsert( LogicalLpgModify alg, Statement s } - private List attachPreparedGraphNodeModifyInsert( AlgOptCluster cluster, PreparingEntity nodesTable, PreparingEntity nodePropertiesTable, Statement statement ) { + private List attachPreparedGraphNodeModifyInsert( AlgOptCluster cluster, CatalogEntity nodesTable, CatalogEntity nodePropertiesTable, Statement statement ) { AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); @@ -1145,7 +1136,7 @@ private List attachPreparedGraphNodeModifyInsert( AlgOptCluster cluster } - private List attachPreparedGraphEdgeModifyDelete( AlgOptCluster cluster, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, Statement statement ) { + private List attachPreparedGraphEdgeModifyDelete( AlgOptCluster cluster, CatalogEntity edgesTable, CatalogEntity edgePropertiesTable, Statement statement ) { AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); @@ -1173,7 +1164,7 @@ private List attachPreparedGraphEdgeModifyDelete( AlgOptCluster cluster } - private List attachPreparedGraphEdgeModifyInsert( AlgOptCluster cluster, PreparingEntity edgesTable, PreparingEntity edgePropertiesTable, Statement statement ) { + private List attachPreparedGraphEdgeModifyInsert( AlgOptCluster cluster, CatalogEntity edgesTable, CatalogEntity edgePropertiesTable, Statement statement ) { AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); @@ -1210,8 +1201,8 @@ private AlgNode switchContext( AlgNode node ) { } - private RelModify getModify( AlgOptEntity table, AlgNode input, Statement statement, Operation operation, List updateList, List sourceList ) { - return table.unwrap( ModifiableEntity.class ).toModificationAlg( input.getCluster(), table, statement.getTransaction().getCatalogReader(), input, operation, updateList, sourceList, true ); + private Modify getModify( CatalogEntity table, AlgNode input, Statement statement, Operation operation, List updateList, List sourceList ) { + return table.unwrap( org.polypheny.db.catalog.refactor.ModifiableEntity.class ).toModificationAlg( input.getCluster(), input.getTraitSet(), table, input, operation, updateList, sourceList ); } @@ -1252,11 +1243,11 @@ private AlgBuilder buildDml( builder.push( scan.copy( scan.getTraitSet().replace( ModelTrait.DOCUMENT ), scan.getInputs() ) ); return builder; } else if ( node instanceof LogicalRelScan && node.getEntity() != null ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) node.getEntity(); + // Special handling for INSERT INTO foo SELECT * FROM foo2 - if ( table.getCatalogEntity().id != catalogTable.id ) { - return handleSelectFromOtherTable( builder, catalogTable, statement, table ); + if ( false ) { + return handleSelectFromOtherTable( builder, catalogTable, statement ); } builder = super.handleScan( @@ -1341,12 +1332,10 @@ private AlgBuilder buildDml( } - private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, LogicalTable catalogTable, Statement statement, AlgOptEntityImpl table ) { - LogicalTable fromTable; + private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, LogicalTable catalogTable, Statement statement ) { + LogicalTable fromTable = catalogTable; // Select from other table - fromTable = table.getCatalogEntity().unwrap( LogicalTable.class ); - - if ( fromTable.partitionProperty.isPartitioned ) { + if ( statement.getDataContext().getRootSchema().isPartitioned( fromTable.id )) { throw new UnsupportedOperationException( "DMLs from other partitioned tables is not supported" ); } @@ -1358,8 +1347,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical List nodes = new ArrayList<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { - catalog.getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); - fromTable = table.getCatalogEntity().unwrap( LogicalTable.class ); + catalog.getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); CatalogPartitionPlacement partition = catalog.getPartitionPlacement( pkPlacement.adapterId, fromTable.partitionProperty.partitionIds.get( 0 ) ); @@ -1373,7 +1361,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical pkPlacements.get( 0 ).physicalSchemaName, partition.physicalTableName, partition.partitionId, - fromTable.getNamespaceType() ).build() ); + fromTable.namespaceType ).build() ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index d64ffcf958..2e62ae402a 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -39,7 +39,7 @@ import org.polypheny.db.algebra.core.lpg.LpgAlg; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plan.AlgOptEntity; diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 6e6634209b..ff1df6a1f6 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -41,7 +41,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogMaterializedView; @@ -433,7 +433,7 @@ public void commitTransaction( Transaction transaction ) { private void prepareSourceRel( Statement sourceStatement, AlgCollation algCollation, AlgNode sourceRel ) { AlgOptCluster cluster = AlgOptCluster.create( sourceStatement.getQueryProcessor().getPlanner(), - new RexBuilder( sourceStatement.getTransaction().getTypeFactory() ) ); + new RexBuilder( sourceStatement.getTransaction().getTypeFactory() ), traitSet, rootSchema ); prepareNode( sourceRel, cluster, algCollation ); } diff --git a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java index c1e43d4e99..38841f10d9 100644 --- a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java @@ -26,7 +26,7 @@ import org.polypheny.db.AdapterTestSuite; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.excluded.CassandraExcluded; import org.polypheny.db.webui.models.Result; diff --git a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java index 89892a3437..0e7f5a9d70 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java @@ -34,8 +34,8 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.PartitionType; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; diff --git a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java index cc39db093b..d63185b2ae 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java @@ -31,7 +31,7 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.excluded.CassandraExcluded; diff --git a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java index 869965dcef..3b366d8417 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java @@ -32,8 +32,8 @@ import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.TestHelper.MongoConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.excluded.CassandraExcluded; @@ -90,7 +90,7 @@ public void addPlacementTest() throws UnknownSchemaException, SQLException { execute( "db.createCollection(\"" + collectionName + "\")" ); - CatalogCollection collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); + LogicalCollection collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); assertEquals( collection.placements.size(), 1 ); @@ -121,7 +121,7 @@ public void deletePlacementTest() throws UnknownSchemaException, SQLException { CatalogSchema namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); - CatalogCollection collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); + LogicalCollection collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); assertEquals( collection.placements.size(), 1 ); diff --git a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java index ae4266cbde..0c3b4a9e59 100644 --- a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java +++ b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java @@ -31,7 +31,7 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java index df8af62994..ae56932a4a 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java @@ -21,7 +21,7 @@ import lombok.Getter; import lombok.Setter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.monitoring.core.MonitoringServiceProvider; import org.polypheny.db.monitoring.events.metrics.DmlDataPoint; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 5853a13bc9..203e35a336 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -27,8 +27,8 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalTable; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index 20841637a2..0dd8f89e59 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -23,8 +23,8 @@ import lombok.Getter; import lombok.Setter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.logical.LogicalTable; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index dd02e7380f..f7004f88c4 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -53,7 +53,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -536,7 +536,7 @@ private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); AlgBuilder relBuilder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = relBuilder.getRexBuilder(); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, traitSet, rootSchema ); AlgNode queryNode; LogicalRelScan tableScan = getLogicalScan( queryResult.getSchema(), queryResult.getTable(), reader, cluster ); diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index a9107aebff..84921db7cd 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -67,10 +67,10 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.EntityType.PrimitiveTableType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.EntityType.PrimitiveTableType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumn.PrimitiveCatalogColumn; import org.polypheny.db.catalog.entity.CatalogDatabase; diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbSignature.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbSignature.java index 74ff2c1add..9c853e0099 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbSignature.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbSignature.java @@ -35,7 +35,7 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.routing.ExecutionTimeMonitor; import org.polypheny.db.runtime.Bindable; import org.polypheny.db.schema.PolyphenyDbSchema; diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java index f4a3811a09..9c24add7ef 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java @@ -40,6 +40,7 @@ import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; @@ -133,7 +134,7 @@ public RelModify toModificationAlg( @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { return new CottontailTableQueryable( dataContext, schema, tableName ); } diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java index 95286ebd92..35e78e81b9 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java @@ -25,7 +25,7 @@ import org.polypheny.db.PolyImplementation; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.cql.parser.CqlParser; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationObserver; diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java index 67748ef8cf..05a6c40bde 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java @@ -47,8 +47,8 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.QueryableEntity; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.util.Source; @@ -90,7 +90,7 @@ public Enumerator enumerator() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { return Schemas.tableExpression( schema, getElementType(), tableName, clazz ); } @@ -102,7 +102,7 @@ public Type getElementType() { @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { throw new UnsupportedOperationException(); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherLanguagePlugin.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherLanguagePlugin.java index 557f91f7d5..8d77388063 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherLanguagePlugin.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherLanguagePlugin.java @@ -23,7 +23,7 @@ import org.pf4j.PluginWrapper; import org.polypheny.db.PolyImplementation; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.cypher.parser.CypherParserImpl; import org.polypheny.db.information.InformationManager; import org.polypheny.db.languages.LanguageManager; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java index 784b6f14d3..3d75e9c057 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java @@ -106,7 +106,7 @@ public AlgRoot translate( Statement statement, Node query, QueryParameters param final AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.createGraph( statement.getQueryProcessor().getPlanner(), rexBuilder ); + final AlgOptCluster cluster = AlgOptCluster.createGraph( statement.getQueryProcessor().getPlanner(), rexBuilder, statement.getDataContext().getRootSchema() ); final CypherToAlgConverter cypherToAlgConverter = new CypherToAlgConverter( statement, builder, rexBuilder, cluster ); diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java index 679dc90110..e68e161711 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java @@ -19,7 +19,7 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java index 4664777497..d6d2330032 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java @@ -19,7 +19,7 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java index 2978a7f35a..524417e137 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java @@ -19,7 +19,7 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java index 570ef9b9a3..23161a3a7c 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java @@ -19,7 +19,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java index f58e12a3db..071d42b1b5 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java @@ -23,7 +23,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java index 174ab09ac7..e7d6a21d24 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java @@ -22,7 +22,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java index a6e3efb43f..6bc6cd75f2 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java @@ -69,6 +69,7 @@ import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.ModelTraitDef; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; @@ -329,7 +330,7 @@ public String toString() { @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { return new ElasticsearchQueryable<>( dataContext, schema, this, tableName ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java index 1ea7dd33e0..e9bc3e4043 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java @@ -43,6 +43,7 @@ import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; @@ -140,7 +141,7 @@ public RelModify toModificationAlg( @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { throw new UnsupportedOperationException(); //System.out.println("as Queryable"); //fileSchema.getConvention().register( dataContext.getStatement().getQueryProcessor().getPlanner() ); diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java index 6aa34ae642..acbeacb56b 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java @@ -63,6 +63,7 @@ import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.runtime.Hook; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; @@ -215,7 +216,7 @@ public Enumerator enumerator() { @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { return new GeodeQueryable<>( dataContext, schema, this, tableName ); } diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 519be5e95c..1527d09c99 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -35,7 +35,7 @@ import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.adapter.jdbc.stores.AbstractJdbcStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java index 33dbda1f0b..d51df11b9d 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlEntity.java @@ -52,7 +52,7 @@ import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; import org.polypheny.db.schema.TranslatableEntity; @@ -114,7 +114,7 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { return new AbstractTableQueryable( dataContext, schema, this, tableName ) { @Override public Enumerator enumerator() { diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 48e0fb4341..2dabf4a89b 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -65,6 +65,7 @@ import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TableType; @@ -218,7 +219,7 @@ public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitS @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { return new JdbcTableQueryable<>( dataContext, schema, tableName ); } diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java index 10345f3c1d..4377ba165f 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java @@ -51,7 +51,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.rules.UnionMergeRule; import org.polypheny.db.algebra.type.AlgDataTypeSystemImpl; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.NodeToAlgConverter.Config; import org.polypheny.db.languages.OperatorRegistry; diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java index 4aed2ec0e8..44769fd038 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java @@ -38,7 +38,7 @@ import org.polypheny.db.algebra.convert.ConverterRule; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.rules.FilterMergeRule; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.Parser; import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.nodes.Node; diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java index 1c4673bf3c..93de573419 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java @@ -47,7 +47,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 50d2670e9e..f58f33cf44 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -62,7 +62,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -118,6 +118,16 @@ import org.polypheny.db.catalog.exceptions.UnknownTableIdRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.exceptions.UnknownUserIdRuntimeException; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.QueryInterfaceManager; import org.polypheny.db.languages.QueryLanguage; @@ -158,8 +168,8 @@ public class CatalogImpl extends Catalog { private static BTreeMap tableNames; private static HTreeMap> tableChildren; - private static BTreeMap collections; - private static BTreeMap collectionNames; + private static BTreeMap collections; + private static BTreeMap collectionNames; private static BTreeMap collectionPlacements; @@ -2376,7 +2386,7 @@ public void updateMaterializedViewRefreshTime( long materializedViewId ) { * {@inheritDoc} */ @Override - public CatalogCollection getCollection( long id ) { + public LogicalCollection getCollection( long id ) { if ( !collections.containsKey( id ) ) { throw new UnknownTableIdRuntimeException( id ); } @@ -2388,11 +2398,11 @@ public CatalogCollection getCollection( long id ) { * {@inheritDoc} */ @Override - public List getCollections( long namespaceId, Pattern namePattern ) { + public List getCollections( long namespaceId, Pattern namePattern ) { if ( schemas.containsKey( namespaceId ) ) { CatalogSchema schema = Objects.requireNonNull( schemas.get( namespaceId ) ); if ( namePattern != null ) { - CatalogCollection collection = collectionNames.get( new Object[]{ schema.databaseId, namespaceId, namePattern.pattern } ); + LogicalCollection collection = collectionNames.get( new Object[]{ schema.databaseId, namespaceId, namePattern.pattern } ); if ( collection == null ) { return new ArrayList<>(); } @@ -2416,7 +2426,7 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI } CatalogSchema namespace = getSchema( schemaId ); - CatalogCollection collection = new CatalogCollection( + LogicalCollection collection = new LogicalCollection( Catalog.defaultDatabaseId, schemaId, collectionId, @@ -2442,12 +2452,12 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI public long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ) { long id = partitionIdBuilder.getAndIncrement(); CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, null, null, id ); - CatalogCollection old = collections.get( collectionId ); + LogicalCollection old = collections.get( collectionId ); if ( old == null ) { throw new UnknownCollectionException( collectionId ); } - CatalogCollection collection = old.addPlacement( adapterId ); + LogicalCollection collection = old.addPlacement( adapterId ); synchronized ( this ) { collectionPlacements.put( new Object[]{ collectionId, adapterId }, placement ); @@ -2464,13 +2474,13 @@ public long addCollectionPlacement( long namespaceId, int adapterId, long collec */ @Override public void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { - CatalogCollection old = getCollection( collectionId ); + LogicalCollection old = getCollection( collectionId ); if ( old == null ) { throw new UnknownCollectionException( collectionId ); } CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, physicalCollectionName, physicalNamespaceName, old.id ); - CatalogCollection collection = old.setPhysicalName( physicalCollectionName ); + LogicalCollection collection = old.setPhysicalName( physicalCollectionName ); synchronized ( this ) { collections.replace( collectionId, collection ); collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); @@ -2560,7 +2570,7 @@ public long addCollectionLogistics( long schemaId, String name, List */ @Override public void deleteCollection( long id ) { - CatalogCollection collection = getCollection( id ); + LogicalCollection collection = getCollection( id ); synchronized ( this ) { collections.remove( collection.namespaceId ); @@ -2576,8 +2586,8 @@ public void deleteCollection( long id ) { */ @Override public void dropCollectionPlacement( long id, int adapterId ) { - CatalogCollection oldCollection = Objects.requireNonNull( collections.get( id ) ); - CatalogCollection collection = oldCollection.removePlacement( adapterId ); + LogicalCollection oldCollection = Objects.requireNonNull( collections.get( id ) ); + LogicalCollection collection = oldCollection.removePlacement( adapterId ); synchronized ( this ) { collectionPlacements.remove( new Object[]{ id, adapterId } ); diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index 6f15cb73c7..ccbed9b37e 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -33,12 +33,12 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.polypheny.db.catalog.Catalog.Collation; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.ForeignKeyOption; -import org.polypheny.db.catalog.Catalog.IndexType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.CatalogImpl; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java index c28d464d4b..c58e1417c6 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java @@ -195,7 +195,7 @@ public AlgNode visit( LogicalProject project ) { @Override - public AlgNode visit( RelScan scan ) { + public AlgNode visit( RelScan scan ) { super.visit( scan ); return scan; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index 8b2aa7b2af..61e920f591 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -80,7 +80,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -93,6 +93,7 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModifiableCollection; import org.polypheny.db.schema.ModifiableEntity; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; @@ -119,7 +120,7 @@ public class MongoEntity extends AbstractQueryableEntity implements Translatable private final LogicalTable catalogTable; @Getter - private final CatalogCollection catalogCollection; + private final LogicalCollection catalogCollection; @Getter private final TransactionProvider transactionProvider; @Getter @@ -142,7 +143,7 @@ public class MongoEntity extends AbstractQueryableEntity implements Translatable } - public MongoEntity( CatalogCollection catalogEntity, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long adapter, CatalogCollectionPlacement partitionPlacement ) { + public MongoEntity( LogicalCollection catalogEntity, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long adapter, CatalogCollectionPlacement partitionPlacement ) { super( Object[].class, catalogEntity.id, partitionPlacement.id, adapter ); this.collectionName = MongoStore.getPhysicalTableName( catalogEntity.id, partitionPlacement.id ); this.transactionProvider = transactionProvider; @@ -167,7 +168,7 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { return new MongoQueryable<>( dataContext, schema, this, tableName ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java index 929b3fd8ef..59f3f49186 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java @@ -59,9 +59,9 @@ import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -262,7 +262,7 @@ public void truncate( Context context, LogicalTable table ) { @Override - public Entity createDocumentSchema( CatalogCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { + public Entity createDocumentSchema( LogicalCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { return this.currentSchema.createCollection( catalogEntity, partitionPlacement ); } @@ -336,7 +336,7 @@ public void createTable( Context context, LogicalTable catalogTable, List @Override - public void createCollection( Context prepareContext, CatalogCollection catalogCollection, long adapterId ) { + public void createCollection( Context prepareContext, LogicalCollection catalogCollection, long adapterId ) { Catalog catalog = Catalog.getInstance(); commitAll(); @@ -367,7 +367,7 @@ public void createCollection( Context prepareContext, CatalogCollection catalogC @Override - public void dropCollection( Context context, CatalogCollection catalogCollection ) { + public void dropCollection( Context context, LogicalCollection catalogCollection ) { Catalog catalog = Catalog.getInstance(); commitAll(); context.getStatement().getTransaction().registerInvolvedAdapter( this ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java index 925d5a5f5f..df78183694 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java @@ -83,7 +83,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; @@ -894,7 +894,7 @@ private static class ScanChecker extends AlgShuttleImpl { @Override - public AlgNode visit( RelScan scan ) { + public AlgNode visit( RelScan scan ) { supported = false; return super.visit( scan ); } @@ -1349,7 +1349,7 @@ private void handlePreparedInsert( Implementor implementor, MongoProject input ) } - private Map getPhysicalMap( List fieldList, CatalogCollection catalogCollection ) { + private Map getPhysicalMap( List fieldList, LogicalCollection catalogCollection ) { Map map = new HashMap<>(); map.put( 0, "d" ); return map; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java index 983b0b6062..fd957e00e8 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java @@ -48,7 +48,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; @@ -124,7 +124,7 @@ public MongoEntity createTable( LogicalTable catalogTable, List collections = catalog.getCollections( namespaceId, new Pattern( getCollection() ) ); + List collections = catalog.getCollections( namespaceId, new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { throw new RuntimeException( "Error while adding new collection placement, collection not found." ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java index 9249c0e5de..0ca58622b9 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java @@ -22,7 +22,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java index c3387a1693..fba45b0963 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java @@ -22,7 +22,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java index 90eb3e91b6..a8dd0b52fc 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java @@ -22,8 +22,8 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -53,7 +53,7 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "The used document database (Polypheny Schema) is not available." ); } - List collections = catalog.getCollections( namespaceId, new Pattern( getCollection() ) ); + List collections = catalog.getCollections( namespaceId, new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { throw new RuntimeException( "Error while adding new collection placement, collection not found." ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java index bdf12df518..31080b35b9 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java @@ -18,8 +18,8 @@ import java.util.List; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -55,7 +55,7 @@ public void execute( Context context, Statement statement, QueryParameters param } CatalogSchema namespace = catalog.getSchemas( Catalog.defaultDatabaseId, new Pattern( database ) ).get( 0 ); - List collections = catalog.getCollections( namespace.id, new Pattern( getCollection() ) ); + List collections = catalog.getCollections( namespace.id, new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { // dropping a collection, which does not exist, which is a no-op return; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlQueryParameters.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlQueryParameters.java index e34a9c8ae7..504f82a51f 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlQueryParameters.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlQueryParameters.java @@ -17,7 +17,7 @@ package org.polypheny.db.languages.mql; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.QueryParameters; public class MqlQueryParameters extends QueryParameters { diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java index 0f04d02a7d..e2df3ec3bf 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java @@ -18,7 +18,7 @@ import lombok.Getter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.exceptions.NamespaceAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index fb6f51fe38..082f7e166d 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -66,7 +66,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; diff --git a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/Mql2AlgTest.java b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/Mql2AlgTest.java index 1c99ff54ff..426550d6b8 100644 --- a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/Mql2AlgTest.java +++ b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/Mql2AlgTest.java @@ -19,7 +19,7 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.MockCatalogReader; import org.polypheny.db.catalog.MockCatalogReaderDocument; import org.polypheny.db.languages.mql.MqlQueryParameters; @@ -43,7 +43,7 @@ public abstract class Mql2AlgTest extends MqlTest { static { factory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - cluster = AlgOptCluster.create( new MockRelOptPlanner( Contexts.empty() ), new RexBuilder( factory ) ); + cluster = AlgOptCluster.create( new MockRelOptPlanner( Contexts.empty() ), new RexBuilder( factory ), traitSet, rootSchema ); reader = new MockCatalogReaderDocument( factory, false ); reader.init(); MQL_TO_ALG_CONVERTER = new MqlToAlgConverter( null, reader, cluster ); diff --git a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java index cd1a929a23..ad8fa91405 100644 --- a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java +++ b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java @@ -17,6 +17,7 @@ package org.polypheny.db.mql.mql2alg; import org.polypheny.db.catalog.MockCatalog; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java index d0ca652995..9bea72a19b 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java @@ -53,7 +53,7 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.Catalog.Pattern; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/PigLanguagePlugin.java b/plugins/pig-language/src/main/java/org/polypheny/db/PigLanguagePlugin.java index c60b68c7bf..ba8c3da8a2 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/PigLanguagePlugin.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/PigLanguagePlugin.java @@ -23,7 +23,7 @@ import org.pf4j.Plugin; import org.pf4j.PluginWrapper; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.information.InformationManager; import org.polypheny.db.languages.LanguageManager; import org.polypheny.db.languages.QueryLanguage; diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java index f7c77bc72d..ec17dee21d 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java @@ -83,7 +83,7 @@ public static PigAlgBuilder create( Statement statement, AlgOptCluster cluster ) public static PigAlgBuilder create( Statement statement ) { final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, traitSet, rootSchema ); return create( statement, cluster ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java index 5df57311cb..3e9d6452bd 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java @@ -18,7 +18,7 @@ import org.pf4j.Plugin; import org.pf4j.PluginWrapper; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; public class CatalogPlugin extends Plugin { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java index d33d670973..756a40ad66 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java @@ -20,6 +20,7 @@ import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; +import org.polypheny.db.catalog.logistic.NamespaceType; @SerializeClass(subclasses = { GraphCatalog.class, RelationalCatalog.class, DocumentCatalog.class }) // required for deserialization public interface NCatalog { @@ -30,7 +31,7 @@ public interface NCatalog { boolean hasUncommittedChanges(); - Catalog.NamespaceType getType(); + NamespaceType getType(); default RelationalCatalog asRelational() { return unwrap( RelationalCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 7b561e6427..09d76b9425 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -28,12 +28,12 @@ import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; import org.polypheny.db.catalog.entities.CatalogUser; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.logical.LogicalFullSnapshot; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java index 4511d95914..b37e7c9044 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java @@ -23,7 +23,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java index 67f90735d7..e1433751f5 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java @@ -24,7 +24,7 @@ import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java index 5c8926e3b6..db4ffaf91e 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java @@ -22,7 +22,7 @@ import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java index 04627ec839..5e51221d61 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java @@ -24,7 +24,7 @@ import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java index 1979093c2a..e89fb7f6b9 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java @@ -22,7 +22,7 @@ import java.util.Map; import java.util.stream.Collectors; import lombok.Value; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logical.document.CatalogCollection; import org.polypheny.db.catalog.logical.document.CatalogDatabase; import org.polypheny.db.catalog.logical.document.DocumentCatalog; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java index f44ddd261e..7a8066762c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java index bd90a7997a..d4724ad4b1 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java @@ -22,7 +22,7 @@ import java.util.Map; import java.util.stream.Collectors; import lombok.Value; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logical.graph.CatalogGraph; import org.polypheny.db.catalog.logical.graph.GraphCatalog; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java index ea083643d4..e6f60c8315 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java @@ -22,7 +22,7 @@ import java.util.Map; import java.util.stream.Collectors; import lombok.Value; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logical.relational.CatalogColumn; import org.polypheny.db.catalog.logical.relational.CatalogSchema; import org.polypheny.db.catalog.logical.relational.CatalogTable; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java index 322fc7378b..259f82bd90 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java @@ -16,7 +16,7 @@ package org.polypheny.db.catalog.snapshot.logical; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; public interface LogicalSnapshot extends Snapshot { diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index fb538c7bd9..462d93ae22 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -173,7 +173,7 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi // Table Modify AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); // Values AlgDataType tableRowType = table.getRowType(); @@ -232,7 +232,7 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, // Table Modify AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); AlgNode algNode = algBuilder.build(); RelModify modify = new LogicalRelModify( @@ -278,7 +278,7 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final // List valueColumnNames = this.valuesColumnNames( updateResourceRequest.values ); AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); List valueColumnNames = this.valuesColumnNames( insertValueRequest.values ); List rexValues = this.valuesNode( statement, algBuilder, rexBuilder, insertValueRequest, tableRows, inputStreams ).get( 0 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java index d51c842384..81bb7527f1 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java @@ -35,7 +35,7 @@ import org.polypheny.db.algebra.operators.ChainedOperatorTable; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.operators.OperatorTable; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.PolyphenyDbConnectionProperty; import org.polypheny.db.languages.LanguageManager; import org.polypheny.db.languages.OperatorRegistry; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index bb4cfef196..1fd44b6534 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -37,7 +37,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -191,7 +191,7 @@ public AlgRoot translate( Statement statement, Node query, QueryParameters param Config sqlToAlgConfig = NodeToAlgConverter.configBuilder().build(); final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, traitSet, rootSchema ); final Config config = NodeToAlgConverter.configBuilder() .config( sqlToAlgConfig ) diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlInsert.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlInsert.java index 8baa89bb84..65581b49c7 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlInsert.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlInsert.java @@ -19,7 +19,7 @@ import java.util.List; import lombok.Setter; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.Operator; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlColumnDeclaration.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlColumnDeclaration.java index 0a94edd256..9deba62186 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlColumnDeclaration.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlColumnDeclaration.java @@ -21,7 +21,7 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.Catalog.Collation; +import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.exceptions.UnknownCollationException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java index 773790cad6..91edaba84c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java @@ -29,7 +29,7 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateSchema.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateSchema.java index 07a8321354..bec6b70891 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateSchema.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateSchema.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.exceptions.NamespaceAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 08efa10ed4..dd1466c6ed 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -29,7 +29,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java index ac215d6d8c..4b4410746d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java @@ -29,7 +29,7 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDdlNodes.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDdlNodes.java index d93e0aad8c..c040dac094 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDdlNodes.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDdlNodes.java @@ -18,7 +18,7 @@ import java.util.List; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Operator; import org.polypheny.db.partition.raw.RawPartitionInformation; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java index 91adc8e436..a200758f67 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java @@ -19,7 +19,7 @@ import static org.polypheny.db.util.Static.RESOURCE; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java index e3567978e3..733a98cf55 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java @@ -20,7 +20,7 @@ import static org.polypheny.db.util.Static.RESOURCE; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlKeyConstraint.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlKeyConstraint.java index 35cc69395f..eea67d59cd 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlKeyConstraint.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlKeyConstraint.java @@ -20,7 +20,7 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.Catalog.ConstraintType; +import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.Operator; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java index ed06b2c822..b8b49abbed 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java @@ -22,7 +22,7 @@ import java.util.Objects; import java.util.stream.Collectors; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java index 66a2b77207..d709e70089 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java @@ -20,7 +20,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java index f0e91cf3d0..c91eb99762 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java @@ -17,7 +17,7 @@ package org.polypheny.db.sql.language.ddl.altermaterializedview; import java.util.List; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java index 2a2ab1e111..15a95a7d5c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java @@ -20,7 +20,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java index c2f364ce4e..b24fb5cfe4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java @@ -20,7 +20,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java index 8a4e8290e0..b6c381a45a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index d0ab735a59..bd397af371 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java index 5632d57454..63d8a2880c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java @@ -22,8 +22,8 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java index e1aa64471f..689d84b757 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java @@ -23,7 +23,7 @@ import java.util.Objects; import java.util.stream.Collectors; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java index 4ef1a32ce8..a62751daed 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java @@ -23,8 +23,8 @@ import java.util.Objects; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -154,7 +154,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { // Check if table is already partitioned - if ( catalogTable.partitionProperty.partitionType == Catalog.PartitionType.NONE ) { + if ( catalogTable.partitionProperty.partitionType == PartitionType.NONE ) { DdlManager.getInstance().addPartitioning( PartitionInformation.fromNodeLists( catalogTable, diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index f8017eea06..8619003e09 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -25,7 +25,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java index da4fd7479e..645ab57138 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java index b684294035..7def65e4d9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java index ead565fdc6..75263c66ad 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java index b9b8a68bca..866db5fb3a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java index c48cd21e05..3e416b852c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java index 9bda8a231e..acb33df836 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java index c40dc04fba..32bddb4ae7 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.LastPlacementException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java index c25dab2c6b..c704c107fb 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java index 9c3cfc4549..2d62337b64 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java @@ -20,8 +20,8 @@ import java.util.List; import java.util.Objects; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -89,7 +89,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // Check if table is even partitioned - if ( catalogTable.partitionProperty.partitionType != Catalog.PartitionType.NONE ) { + if ( catalogTable.partitionProperty.partitionType != PartitionType.NONE ) { if ( log.isDebugEnabled() ) { log.debug( "Merging partitions for table: {} with id {} on schema: {}", catalogTable.name, catalogTable.id, catalogTable.getNamespaceName() ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java index d0b9d2373d..12c0f7e4bf 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java @@ -21,8 +21,8 @@ import java.util.List; import lombok.NonNull; -import org.polypheny.db.catalog.Catalog.Collation; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownCollationException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java index 3c1045c801..3c7dc0e3c8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java @@ -27,7 +27,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java index e88ab7adcb..b0a3ff80b8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java @@ -24,7 +24,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.IndexPreventsRemovalException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java index a808b0c018..ad86afa64f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java index 46043f402c..9477a13d83 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java index 0e68086aff..1b91a26760 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java @@ -21,7 +21,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java index a6b98cc8f9..17ab88d7ca 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java @@ -20,7 +20,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java index b253a712a9..7ee7dd3cf2 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java @@ -20,7 +20,7 @@ import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index ac1c498705..e7c436b35c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -69,7 +69,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index de77010509..b8d1713840 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -38,7 +38,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; @@ -623,7 +623,7 @@ public static boolean isTableRelational( SqlValidatorImpl validator ) { if ( graph != null ) { return false; } - CatalogCollection collection = validator.getCatalogReader().getRootSchema().getCollection( id.names ); + LogicalCollection collection = validator.getCatalogReader().getRootSchema().getCollection( id.names ); if ( collection != null ) { return false; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 9db06aa288..5d7dd1b043 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -122,8 +122,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.OperatorRegistry; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java index 050c5d683e..07a38a95e1 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/AlgWriterTest.java @@ -40,7 +40,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexBuilder; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java index c6fd42c3c3..12a6813203 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java @@ -51,7 +51,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgDataTypeSystemImpl; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.NodeParseException; @@ -417,7 +417,7 @@ public RelModify toModificationAlg( AlgOptCluster cluster, AlgOptEntity table, P @Override - public Queryable asQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { throw new UnsupportedOperationException(); } @@ -429,7 +429,7 @@ public Type getElementType() { @Override - public Expression getExpression( SchemaPlus schema, String tableName, Class clazz ) { + public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { throw new UnsupportedOperationException(); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java index 4d92d32364..b45c48f1a4 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java @@ -35,7 +35,7 @@ import org.polypheny.db.adapter.java.ReflectiveSchema; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.interpreter.Interpreter; import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.nodes.Node; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java index 8ed09f65af..984385f272 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java @@ -78,7 +78,7 @@ import org.polypheny.db.algebra.rules.SortRemoveRule; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.languages.Parser; import org.polypheny.db.languages.Parser.ParserConfig; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java index e44c9bf4d9..985bbc36e3 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java @@ -38,7 +38,7 @@ import org.polypheny.db.algebra.rules.SemiJoinRules; import org.polypheny.db.algebra.rules.SortProjectTransposeRule; import org.polypheny.db.algebra.rules.SortRemoveRule; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptUtil; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java index 04e1d90089..e602662c3d 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java @@ -668,7 +668,7 @@ public AlgRoot convertSqlToRel( String sql ) { protected SqlToAlgConverter createSqlToRelConverter( final SqlValidator validator, final Prepare.CatalogReader catalogReader, final AlgDataTypeFactory typeFactory, final Config config ) { final RexBuilder rexBuilder = new RexBuilder( typeFactory ); - AlgOptCluster cluster = AlgOptCluster.create( getPlanner(), rexBuilder ); + AlgOptCluster cluster = AlgOptCluster.create( getPlanner(), rexBuilder, traitSet, rootSchema ); if ( clusterFactory != null ) { cluster = clusterFactory.apply( cluster ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java index f52f0869e6..f333981840 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToRelConverterExtendedTest.java @@ -64,7 +64,7 @@ public static void foo( AlgNode alg ) { final AlgOptSchema[] schemas = { null }; alg.accept( new AlgShuttleImpl() { @Override - public AlgNode visit( RelScan scan ) { + public AlgNode visit( RelScan scan ) { schemas[0] = scan.getEntity().getRelOptSchema(); return super.visit( scan ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java index bf91c26e49..add01a33c7 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java @@ -31,7 +31,7 @@ import org.polypheny.db.adapter.java.ReflectiveSchema; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Lex; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.languages.Parser; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java index 8ab408ef7b..19d9089bac 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java @@ -25,8 +25,8 @@ import org.junit.Ignore; import org.junit.Test; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogDefaultValue; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java index 20e0c2431e..b26991a78f 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java @@ -255,7 +255,7 @@ public AlgRoot alg( Node sql ) throws AlgConversionException { ensure( State.STATE_4_VALIDATED ); assert validatedSqlNode != null; final RexBuilder rexBuilder = createRexBuilder(); - final AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder ); + final AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); final NodeToAlgConverter.Config config = new NodeToAlgConverter.ConfigBuilder() .config( sqlToRelConverterConfig ) diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/PlannerTests.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/PlannerTests.java index 54a092df93..eda2d6e03e 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/PlannerTests.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/PlannerTests.java @@ -66,7 +66,7 @@ public boolean useAbstractConvertersForConversion( AlgTraitSet fromTraits, AlgTr static AlgOptCluster newCluster( VolcanoPlanner planner ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - return AlgOptCluster.create( planner, new RexBuilder( typeFactory ) ); + return AlgOptCluster.create( planner, new RexBuilder( typeFactory ), traitSet, rootSchema ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java index 0c784989d1..ef273e8501 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/volcano/TraitPropagationTest.java @@ -465,7 +465,7 @@ private static AlgNode run( PropAction action, RuleSet rules ) throws Exception planner.addRule( r ); } - final AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder ); + final AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); return action.apply( cluster, catalogReader, prepareContext.getRootSchema().plus() ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 27e4859cea..8943911508 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -100,13 +100,13 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.ConstraintType; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.ForeignKeyOption; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.Catalog.PartitionType; -import org.polypheny.db.catalog.Catalog.PlacementType; -import org.polypheny.db.catalog.NameGenerator; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.logistic.NameGenerator; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; @@ -471,7 +471,7 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getTables( databaseId, new Catalog.Pattern( requestedSchema ), null ); + List tables = catalog.getTables( databaseId, new org.polypheny.db.catalog.logistic.Pattern( requestedSchema ), null ); ArrayList result = new ArrayList<>(); for ( LogicalTable t : tables ) { result.add( new DbTable( t.name, t.getNamespaceName(), t.modifiable, t.entityType ) ); @@ -657,7 +657,7 @@ void insertRow( final Context ctx ) { StringJoiner columns = new StringJoiner( ",", "(", ")" ); StringJoiner values = new StringJoiner( ",", "(", ")" ); - List catalogColumns = catalog.getColumns( new Catalog.Pattern( "APP" ), new Catalog.Pattern( split[0] ), new Catalog.Pattern( split[1] ), null ); + List catalogColumns = catalog.getColumns( new org.polypheny.db.catalog.logistic.Pattern( "APP" ), new org.polypheny.db.catalog.logistic.Pattern( split[0] ), new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); try { int i = 0; for ( CatalogColumn catalogColumn : catalogColumns ) { @@ -1025,7 +1025,7 @@ void updateRow( final Context ctx ) throws ServletException, IOException { Statement statement = transaction.createStatement(); StringJoiner setStatements = new StringJoiner( ",", "", "" ); - List catalogColumns = catalog.getColumns( new Catalog.Pattern( "APP" ), new Catalog.Pattern( split[0] ), new Catalog.Pattern( split[1] ), null ); + List catalogColumns = catalog.getColumns( new org.polypheny.db.catalog.logistic.Pattern( "APP" ), new org.polypheny.db.catalog.logistic.Pattern( split[0] ), new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); int i = 0; for ( CatalogColumn catalogColumn : catalogColumns ) { @@ -2502,7 +2502,7 @@ void getUml( final Context ctx ) { ArrayList fKeys = new ArrayList<>(); ArrayList tables = new ArrayList<>(); - List catalogEntities = catalog.getTables( databaseId, new Catalog.Pattern( request.schema ), null ); + List catalogEntities = catalog.getTables( databaseId, new org.polypheny.db.catalog.logistic.Pattern( request.schema ), null ); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { // get foreign keys @@ -2884,7 +2884,7 @@ void schemaRequest( final Context ctx ) { // drop schema else if ( !schema.isCreate() && schema.isDrop() ) { if ( type == null ) { - List namespaces = catalog.getSchemas( Catalog.defaultDatabaseId, new Catalog.Pattern( schema.getName() ) ); + List namespaces = catalog.getSchemas( Catalog.defaultDatabaseId, new org.polypheny.db.catalog.logistic.Pattern( schema.getName() ) ); assert namespaces.size() == 1; type = namespaces.get( 0 ).namespaceType; @@ -2981,7 +2981,7 @@ public void getTypeInfo( final Context ctx ) { * Get available actions for foreign key constraints */ void getForeignKeyActions( final Context ctx ) { - ForeignKeyOption[] options = Catalog.ForeignKeyOption.values(); + ForeignKeyOption[] options = ForeignKeyOption.values(); String[] arr = new String[options.length]; for ( int i = 0; i < options.length; i++ ) { arr[i] = options[i].name(); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 4a675d3f55..161330d48d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -34,10 +34,10 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.NamespaceType; -import org.polypheny.db.catalog.Catalog.Pattern; -import org.polypheny.db.catalog.entity.CatalogCollection; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -375,14 +375,14 @@ public void getCollectionPlacements( Context context ) { context.json( new Placement( e ) ); return; } - List collections = catalog.getCollections( namespaceId, new Pattern( collectionName ) ); + List collections = catalog.getCollections( namespaceId, new Pattern( collectionName ) ); if ( collections.size() != 1 ) { context.json( new Placement( new UnknownCollectionException( 0 ) ) ); return; } - CatalogCollection collection = catalog.getCollection( collections.get( 0 ).id ); + LogicalCollection collection = catalog.getCollection( collections.get( 0 ).id ); Placement placement = new Placement( false, List.of(), EntityType.ENTITY ); diff --git a/webui/src/main/java/org/polypheny/db/webui/models/DbTable.java b/webui/src/main/java/org/polypheny/db/webui/models/DbTable.java index 1c7ad02455..95bc04fd21 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/DbTable.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/DbTable.java @@ -19,7 +19,7 @@ import java.util.ArrayList; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.logistic.EntityType; /** diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java index 45dd63a5b9..b5bf330629 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java @@ -20,9 +20,9 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; -import org.polypheny.db.catalog.Catalog.EntityType; -import org.polypheny.db.catalog.Catalog.PartitionType; -import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Result.java b/webui/src/main/java/org/polypheny/db/webui/models/Result.java index 8cd6b38a71..d04259ed21 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/Result.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/Result.java @@ -30,7 +30,7 @@ import lombok.Setter; import lombok.experimental.Accessors; import org.jetbrains.annotations.NotNull; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.webui.HttpServer; import org.polypheny.db.webui.models.requests.UIRequest; diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Schema.java b/webui/src/main/java/org/polypheny/db/webui/models/Schema.java index 728dd9222d..20e133b5e6 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/Schema.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/Schema.java @@ -19,7 +19,7 @@ import javax.annotation.Nullable; import lombok.Getter; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; /** diff --git a/webui/src/main/java/org/polypheny/db/webui/models/SidebarElement.java b/webui/src/main/java/org/polypheny/db/webui/models/SidebarElement.java index d318365d0a..b4931184ed 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/SidebarElement.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/SidebarElement.java @@ -20,7 +20,7 @@ import java.util.ArrayList; import lombok.Setter; import lombok.experimental.Accessors; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; /** diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/PartitioningRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/PartitioningRequest.java index 8b9c9524ee..80d919923f 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/PartitioningRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/PartitioningRequest.java @@ -17,7 +17,7 @@ package org.polypheny.db.webui.models.requests; -import org.polypheny.db.catalog.Catalog.PartitionType; +import org.polypheny.db.catalog.logistic.PartitionType; public class PartitioningRequest { diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/SchemaTreeRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/SchemaTreeRequest.java index acd86320c6..3a6cd6f020 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/SchemaTreeRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/SchemaTreeRequest.java @@ -18,7 +18,7 @@ import java.util.List; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; public class SchemaTreeRequest extends UIRequest { From 69ea89c532ee0323ba5a1ab489839f0c2efd66ef Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 26 Feb 2023 20:47:10 +0100 Subject: [PATCH 024/436] temporary added Expression logic and statisticManager adjustment --- .../db/adapter/enumerable/EnumerableScan.java | 3 +- .../enumerable/EnumerableScanRule.java | 6 +-- .../db/adapter/java/ReflectiveSchema.java | 41 +++++++++---------- .../algebra/core/document/DocumentValues.java | 2 +- .../common/LogicalConstraintEnforcer.java | 10 ++--- .../db/algebra/logical/lpg/LogicalGraph.java | 9 ++-- .../algebra/logical/lpg/LogicalLpgValues.java | 2 +- .../algebra/rules/LoptOptimizeJoinRule.java | 20 ++++----- .../org/polypheny/db/catalog/Catalog.java | 6 ++- .../db/catalog/entity/LogicalCollection.java | 9 +++- .../allocation/AllocationCollection.java | 11 ++++- .../entity/allocation/AllocationGraph.java | 8 ++++ .../entity/allocation/AllocationTable.java | 9 ++++ .../catalog/entity/logical/LogicalGraph.java | 11 ++++- .../entity/physical/PhysicalCollection.java | 11 ++++- .../entity/physical/PhysicalGraph.java | 11 ++++- .../entity/physical/PhysicalTable.java | 12 +++++- .../polypheny/db/interpreter/ScanNode.java | 3 +- .../db/prepare/LixToAlgTranslator.java | 17 +++----- .../db/prepare/QueryableAlgBuilder.java | 4 +- .../polypheny/db/schema/LogicalSchema.java | 5 ++- .../org/polypheny/db/schema/Namespace.java | 5 ++- .../db/schema/PolySchemaBuilder.java | 36 +++------------- .../java/org/polypheny/db/schema/Schemas.java | 8 ++-- .../db/schema/impl/AbstractNamespace.java | 10 ++--- .../schema/impl/AbstractTableQueryable.java | 6 +-- .../db/schema/impl/DelegatingNamespace.java | 8 ++-- .../org/polypheny/db/tools/AlgBuilder.java | 6 +-- .../db/monitoring/statistics/QueryResult.java | 17 ++------ .../statistics/StatisticsManagerImpl.java | 20 ++++----- .../adapter/cottontail/CottontailSchema.java | 3 +- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 3 +- .../rel2sql/RelToSqlConverterStructsTest.java | 3 +- 33 files changed, 179 insertions(+), 156 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java index 417dd90784..adfd95cda9 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScan.java @@ -67,7 +67,6 @@ import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.volcano.VolcanoCost; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.util.BuiltInMethod; @@ -122,7 +121,7 @@ public int hashCode() { /** * Returns whether EnumerableScan can generate code to handle a particular variant of the Table SPI. */ - public static boolean canHandle( Entity entity ) { + public static boolean canHandle( CatalogEntity entity ) { // FilterableTable and ProjectableFilterableTable cannot be handled in/ enumerable convention because they might reject filters and those filters would need to be handled dynamically. return entity instanceof QueryableEntity || entity instanceof ScannableEntity; } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java index 401639cabf..8371716d65 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableScanRule.java @@ -41,9 +41,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalTableFunctionScan; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; -import org.polypheny.db.schema.Entity; import org.polypheny.db.tools.AlgBuilderFactory; @@ -69,11 +67,11 @@ public AlgNode convert( AlgNode alg ) { if ( !EnumerableScan.canHandle( entity ) ) { return null; } - final Expression expression = algOptEntity.getExpression( Object.class ); + final Expression expression = entity.asExpression(); if ( expression == null ) { return null; } - return EnumerableScan.create( scan.getCluster(), algOptEntity ); + return EnumerableScan.create( scan.getCluster(), entity ); } } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index c25bc5b779..6c501d1ae6 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -34,16 +34,13 @@ package org.polypheny.db.adapter.java; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; -import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Type; -import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.calcite.linq4j.Enumerable; @@ -59,13 +56,13 @@ import org.polypheny.db.algebra.AlgReferentialConstraint; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.ScannableEntity; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; @@ -75,7 +72,6 @@ import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.schema.impl.ReflectiveFunctionBase; import org.polypheny.db.util.BuiltInMethod; -import org.polypheny.db.util.Util; /** @@ -85,7 +81,7 @@ public class ReflectiveSchema extends AbstractNamespace implements Schema { private final Class clazz; private Object target; - private Map tableMap; + private Map tableMap; private Multimap functionMap; @@ -119,7 +115,7 @@ public Object getTarget() { @Override - public Map getTableMap() { + public Map getTableMap() { if ( tableMap == null ) { tableMap = createTableMap(); } @@ -127,17 +123,17 @@ public Map getTableMap() { } - private Map createTableMap() { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + private Map createTableMap() { + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( Field field : clazz.getFields() ) { final String fieldName = field.getName(); - final Entity entity = fieldRelation( field ); + final CatalogEntity entity = fieldRelation( field ); if ( entity == null ) { continue; } builder.put( fieldName, entity ); } - Map tableMap = builder.build(); + Map tableMap = builder.build(); // Unique-Key - Foreign-Key for ( Field field : clazz.getFields() ) { if ( AlgReferentialConstraint.class.isAssignableFrom( field.getType() ) ) { @@ -147,9 +143,10 @@ private Map createTableMap() { } catch ( IllegalAccessException e ) { throw new RuntimeException( "Error while accessing field " + field, e ); } - FieldEntity table = (FieldEntity) tableMap.get( Util.last( rc.getSourceQualifiedName() ) ); - assert table != null; - table.statistic = Statistics.of( ImmutableList.copyOf( Iterables.concat( table.getStatistic().getReferentialConstraints(), Collections.singleton( rc ) ) ) ); + // CatalogEntity table = (FieldEntity) tableMap.get( Util.last( rc.getSourceQualifiedName() ) ); + // assert table != null; + // table.statistic = Statistics.of( ImmutableList.copyOf( Iterables.concat( table.getStatistic().getReferentialConstraints(), Collections.singleton( rc ) ) ) ); + // todo dl; } } return tableMap; @@ -184,7 +181,7 @@ private Multimap createFunctionMap() { /** * Returns an expression for the object wrapped by this schema (not the schema itself). */ - Expression getTargetExpression( SchemaPlus parentSchema, String name ) { + Expression getTargetExpression( PolyphenyDbSchema parentSchema, String name ) { return Types.castIfNecessary( target.getClass(), Expressions.call( @@ -196,7 +193,7 @@ Expression getTargetExpression( SchemaPlus parentSchema, String name ) { /** * Returns a table based on a particular field of this schema. If the field is not of the right type to be a relation, returns null. */ - private Entity fieldRelation( final Field field ) { + private CatalogEntity fieldRelation( final Field field ) { final Type elementType = getElementType( field.getType() ); if ( elementType == null ) { return null; @@ -207,15 +204,16 @@ private Entity fieldRelation( final Field field ) { } catch ( IllegalAccessException e ) { throw new RuntimeException( "Error while accessing field " + field, e ); } - @SuppressWarnings("unchecked") final Enumerable enumerable = toEnumerable( o ); - return new FieldEntity<>( field, elementType, enumerable, null, null, null ); + final Enumerable enumerable = (Enumerable) toEnumerable( o ); + return null; + // return new FieldEntity<>( field, elementType, enumerable, null, null, null ); todo dl } /** * Deduces the element type of a collection; same logic as {@link #toEnumerable} */ - private static Type getElementType( Class clazz ) { + private static Type getElementType( Class clazz ) { if ( clazz.isArray() ) { return clazz.getComponentType(); } @@ -226,7 +224,7 @@ private static Type getElementType( Class clazz ) { } - private static Enumerable toEnumerable( final Object o ) { + private static Enumerable toEnumerable( final Object o ) { if ( o.getClass().isArray() ) { if ( o instanceof Object[] ) { return Linq4j.asEnumerable( (Object[]) o ); @@ -364,7 +362,8 @@ public Statistic getStatistic() { @Override public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { - return Expressions.field( schema.unwrap( ReflectiveSchema.class ).getTargetExpression( schema.getParentSchema(), schema.getName() ), field ); + return null; // todo dl + // return Expressions.field( schema.unwrap( ReflectiveSchema.class ).getTargetExpression( schema.getParentSchema(), schema.getName() ), field ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java index 49709b6a8d..e488a9b43e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java @@ -124,7 +124,7 @@ public DocType getDocType() { public LogicalValues getRelationalEquivalent() { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); - AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), traitSet, rootSchema ); + AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), traitSet, getCluster().getRootSchema() ); return new LogicalValues( cluster, out, ((DocumentType) rowType).asRelational(), relationalize( documentTuples, cluster.getRexBuilder() ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 4a528eca47..d960924783 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -30,25 +30,23 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.JoinAlgType; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.common.ConstraintEnforcer; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.exceptions.ConstraintViolationException; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexInputRef; @@ -397,12 +395,12 @@ public AlgNode accept( AlgShuttle shuttle ) { } - public static LogicalTable getCatalogTable( RelModify modify ) { + public static LogicalTable getCatalogTable( RelModify modify ) { if ( modify.getEntity() == null ) { throw new RuntimeException( "The table was not found in the catalog!" ); } - return (LogicalTable) modify.getEntity().getCatalogEntity(); + return (LogicalTable) modify.getEntity(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java index db606e3aff..017726135b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java @@ -26,14 +26,13 @@ import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.TranslatableGraph; @@ -62,7 +61,7 @@ public List getRelationalEquivalent( List values, List getSubNamespaceNames() { @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { + public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { return Schemas.subSchemaExpression( parentSchema, name, LogicalGraph.class ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java index 2907746b5a..104be93273 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java @@ -117,7 +117,7 @@ public static LogicalLpgValues create( public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); - AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), traitSet, rootSchema ); + AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), out, catalogReader.getRootSchema() ); LogicalValues nodeValues = new LogicalValues( cluster, out, entities.get( 0 ).getRowType(), getNodeValues( nodes ) ); LogicalValues nodePropertyValues = new LogicalValues( cluster, out, entities.get( 1 ).getRowType(), getNodePropertyValues( nodes ) ); diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java index 8df4138fb7..23ce94f525 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java @@ -57,9 +57,9 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.plan.AlgOptUtil; @@ -252,10 +252,10 @@ private void setJoinKey( ImmutableBitSet.Builder joinKeys, ImmutableBitSet.Build */ private void findRemovableSelfJoins( AlgMetadataQuery mq, LoptMultiJoin multiJoin ) { // Candidates for self-joins must be simple factors - Map simpleFactors = getSimpleFactors( mq, multiJoin ); + Map simpleFactors = getSimpleFactors( mq, multiJoin ); // See if a simple factor is repeated and therefore potentially is part of a self-join. Restrict each factor to at most one self-join. - final List repeatedTables = new ArrayList<>(); + final List repeatedTables = new ArrayList<>(); final TreeSet sortedFactors = new TreeSet<>(); sortedFactors.addAll( simpleFactors.keySet() ); final Map selfJoinPairs = new HashMap<>(); @@ -267,7 +267,7 @@ private void findRemovableSelfJoins( AlgMetadataQuery mq, LoptMultiJoin multiJoi for ( int j = i + 1; j < factors.length; j++ ) { int leftFactor = factors[i]; int rightFactor = factors[j]; - if ( simpleFactors.get( leftFactor ).getCatalogEntity().id == simpleFactors.get( rightFactor ).getCatalogEntity().id ) { + if ( simpleFactors.get( leftFactor ).id == simpleFactors.get( rightFactor ).id ) { selfJoinPairs.put( leftFactor, rightFactor ); repeatedTables.add( simpleFactors.get( leftFactor ) ); break; @@ -298,8 +298,8 @@ private void findRemovableSelfJoins( AlgMetadataQuery mq, LoptMultiJoin multiJoi * @param multiJoin join factors being optimized * @return map consisting of the simple factors and the tables they correspond */ - private Map getSimpleFactors( AlgMetadataQuery mq, LoptMultiJoin multiJoin ) { - final Map returnList = new HashMap<>(); + private Map getSimpleFactors( AlgMetadataQuery mq, LoptMultiJoin multiJoin ) { + final Map returnList = new HashMap<>(); // Loop through all join factors and locate the ones where each column referenced from the factor is not derived and originates from the same underlying table. Also, discard factors that // are null-generating or will be removed because of semijoins. @@ -311,7 +311,7 @@ private Map getSimpleFactors( AlgMetadataQuery mq, LoptMu continue; } final AlgNode alg = multiJoin.getJoinFactor( factIdx ); - final AlgOptEntity table = mq.getTableOrigin( alg ); + final CatalogEntity table = mq.getTableOrigin( alg ); if ( table != null ) { returnList.put( factIdx, table ); } @@ -1509,15 +1509,15 @@ public static boolean isRemovableSelfJoin( Join joinRel ) { // Make sure the join is between the same simple factor final AlgMetadataQuery mq = joinRel.getCluster().getMetadataQuery(); - final AlgOptEntity leftTable = mq.getTableOrigin( left ); + final CatalogEntity leftTable = mq.getTableOrigin( left ); if ( leftTable == null ) { return false; } - final AlgOptEntity rightTable = mq.getTableOrigin( right ); + final CatalogEntity rightTable = mq.getTableOrigin( right ); if ( rightTable == null ) { return false; } - if ( leftTable.getCatalogEntity().id != rightTable.getCatalogEntity().id ) { + if ( leftTable.id != rightTable.id ) { return false; } diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 1bca25de8f..e5ba93e21b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -23,6 +23,8 @@ import java.beans.PropertyChangeSupport; import java.util.List; import java.util.Map; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; import org.pf4j.ExtensionPoint; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.AlgCollation; @@ -30,7 +32,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -51,6 +52,7 @@ import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -94,6 +96,8 @@ public abstract class Catalog implements ExtensionPoint { public static boolean memoryCatalog; public static boolean testMode; + public static final Expression CATALOG_EXPRESSION = Expressions.call( Catalog.class, "getInstance" ); + public static Catalog setAndGetInstance( Catalog catalog ) { if ( INSTANCE != null ) { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java index 5ea51014ec..56c23f931f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java @@ -25,10 +25,12 @@ import lombok.Getter; import lombok.NonNull; import lombok.SneakyThrows; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.logical.Logical; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.logical.Logical; public class LogicalCollection extends CatalogEntity implements CatalogObject, Logical { @@ -87,4 +89,9 @@ public LogicalCollection setPhysicalName( String physicalCollectionName ) { } + @Override + public Expression asExpression() { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getCollection", Expressions.constant( id ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java index a64cd4c3a9..db2a5b26ed 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java @@ -17,9 +17,12 @@ package org.polypheny.db.catalog.entity.allocation; import java.io.Serializable; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogEntity; public class AllocationCollection extends CatalogEntity implements Allocation { @@ -33,4 +36,10 @@ public Serializable[] getParameterArray() { return new Serializable[0]; } + + @Override + public Expression asExpression() { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getAllocCollection", Expressions.constant( id ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java index 3a46062fd6..0611d31f0a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -17,6 +17,9 @@ package org.polypheny.db.catalog.entity.allocation; import java.io.Serializable; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -40,5 +43,10 @@ public Serializable[] getParameterArray() { } + @Override + public Expression asExpression() { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getAllocGraph", Expressions.constant( id ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index cc48e1cfcd..a7b90dcb59 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -17,6 +17,9 @@ package org.polypheny.db.catalog.entity.allocation; import java.io.Serializable; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -32,4 +35,10 @@ public Serializable[] getParameterArray() { return new Serializable[0]; } + + @Override + public Expression asExpression() { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getAllocTable", Expressions.constant( id ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index bd6ec67436..42e8358767 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -26,10 +26,12 @@ import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.experimental.SuperBuilder; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogObject; @SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = false) @@ -87,4 +89,9 @@ public LogicalGraph removePlacement( int adapterId ) { } + @Override + public Expression asExpression() { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getCollection", Expressions.constant( id ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java index 1cfbc7cf9f..6d93d00a7a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -17,9 +17,12 @@ package org.polypheny.db.catalog.entity.physical; import java.io.Serializable; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogEntity; public class PhysicalCollection extends CatalogEntity implements Physical { @@ -33,4 +36,10 @@ public Serializable[] getParameterArray() { return new Serializable[0]; } + + @Override + public Expression asExpression() { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getPhysicalCollection", Expressions.constant( id ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index e31c5d0e4b..e69de629b7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -17,9 +17,12 @@ package org.polypheny.db.catalog.entity.physical; import java.io.Serializable; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogEntity; public class PhysicalGraph extends CatalogEntity implements Physical { @@ -33,4 +36,10 @@ public Serializable[] getParameterArray() { return new Serializable[0]; } + + @Override + public Expression asExpression() { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getPhysicalGraph", Expressions.constant( id ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 32a15664dd..1159556069 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -19,17 +19,19 @@ import com.google.common.collect.ImmutableList; import java.io.Serializable; import java.util.List; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.type.PolyTypeFactoryImpl; public class PhysicalTable extends CatalogEntity implements Physical { @@ -67,4 +69,10 @@ public Serializable[] getParameterArray() { return new Serializable[0]; } + + @Override + public Expression asExpression() { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getPhysicalTable", Expressions.constant( id ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index 291463d4cc..f37ebd9f5a 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -53,7 +53,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexUtil; @@ -151,7 +150,7 @@ private static ScanNode createQueryable( Compiler compiler, RelScan alg, Immu return new Row( values ); } ); } else { - rowEnumerable = Schemas.queryable( root, Row.class, List.of( alg.entity.unwrap( LogicalTable.class ).getNamespaceName(), algOptEntity.getCatalogEntity().name ) ); + rowEnumerable = Schemas.queryable( root, Row.class, List.of( alg.entity.unwrap( LogicalTable.class ).getNamespaceName(), alg.getEntity().name ) ); } return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java index 8293f5bc9a..16e9ee3f84 100644 --- a/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java +++ b/core/src/main/java/org/polypheny/db/prepare/LixToAlgTranslator.java @@ -39,12 +39,10 @@ import java.util.List; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Blocks; -import org.apache.calcite.linq4j.tree.ConstantExpression; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.FunctionExpression; import org.apache.calcite.linq4j.tree.MethodCallExpression; import org.apache.calcite.linq4j.tree.NewExpression; -import org.apache.calcite.linq4j.tree.Types; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalFilter; @@ -79,11 +77,6 @@ ToAlgContext toAlgContext() { } - public AlgNode translate( Queryable queryable ) { - QueryableAlgBuilder translatorQueryable = new QueryableAlgBuilder<>( this ); - return translatorQueryable.toAlg( queryable ); - } - public AlgNode translate( Expression expression ) { if ( expression instanceof MethodCallExpression ) { @@ -110,18 +103,20 @@ public AlgNode translate( Expression expression ) { case AS_QUERYABLE: return LogicalRelScan.create( cluster, - AlgOptEntityImpl.create( + null + /*AlgOptEntityImpl.create( null, typeFactory.createJavaType( Types.toClass( Types.getElementType( call.targetExpression.getType() ) ) ) - ) ); + )*/ ); case SCHEMA_GET_TABLE: return LogicalRelScan.create( cluster, - AlgOptEntityImpl.create( + null + /*AlgOptEntityImpl.create( null, typeFactory.createJavaType( (Class) ((ConstantExpression) call.expressions.get( 1 )).value ) - ) ); + )*/ ); default: throw new UnsupportedOperationException( "unknown method " + call.method ); diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index 33da94fada..f1c712042d 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -107,9 +107,9 @@ AlgNode toAlg( Queryable queryable ) { final QueryableEntity table = tableQueryable.table; if ( table instanceof TranslatableEntity ) { - return ((TranslatableEntity) table).toAlg( translator.toAlgContext(), algOptTable, translator.cluster.traitSet() ); + return ((TranslatableEntity) table).toAlg( translator.toAlgContext(), null, translator.cluster.traitSet() ); } else { - return LogicalRelScan.create( translator.cluster, algOptTable ); + return LogicalRelScan.create( translator.cluster, null ); } } return translator.translate( queryable.getExpression() ); diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java index 1b8225a824..e1ea954eed 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java @@ -23,6 +23,7 @@ import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Namespace.Schema; @@ -47,7 +48,7 @@ public LogicalSchema( long id, String schemaName, Map tab @Override - public Entity getEntity( String name ) { + public CatalogEntity getEntity( String name ) { return tableMap.get( name ); } @@ -95,7 +96,7 @@ public Set getSubNamespaceNames() { @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { + public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { return Schemas.subSchemaExpression( parentSchema, name, LogicalSchema.class ); } diff --git a/core/src/main/java/org/polypheny/db/schema/Namespace.java b/core/src/main/java/org/polypheny/db/schema/Namespace.java index 75beb5dc1b..fcf452c8f4 100644 --- a/core/src/main/java/org/polypheny/db/schema/Namespace.java +++ b/core/src/main/java/org/polypheny/db/schema/Namespace.java @@ -38,6 +38,7 @@ import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; /** @@ -87,7 +88,7 @@ public interface Namespace { * @param name Table name * @return Table, or null */ - Entity getEntity( String name ); + CatalogEntity getEntity( String name ); /** * Returns the names of the tables in this schema. @@ -133,7 +134,7 @@ public interface Namespace { * @param name Name of this schema * @return Expression by which this schema can be referenced in generated code */ - Expression getExpression( SchemaPlus parentSchema, String name ); + Expression getExpression( PolyphenyDbSchema parentSchema, String name ); /** * Returns whether the user is allowed to create new tables, functions and sub-schemas in this schema, in addition to diff --git a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java index 99c809344d..8e33f34833 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -26,33 +26,27 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import org.apache.calcite.linq4j.tree.Expression; -import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; -import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogEntityPlacement; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.schema.Namespace.Schema; -import org.polypheny.db.schema.impl.AbstractNamespace; -import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Triple; @@ -256,7 +250,7 @@ private Map, CatalogEntityPlacement> buildPhysicalTable } - private void buildView( Map tableMap, SchemaPlus s, LogicalTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { + private void buildView( Map tableMap, PolyphenyDbSchema s, LogicalTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { LogicalRelView view = new LogicalRelView( catalogTable.id, catalogTable.getNamespaceName(), @@ -264,7 +258,7 @@ private void buildView( Map tableMap, SchemaPlus s, Logic columnIds, columnNames, AlgDataTypeImpl.proto( fieldInfo.build() ) ); - s.add( catalogTable.name, view ); + //s.add( catalogTable.name, view ); tableMap.put( catalogTable.name, view ); } @@ -303,7 +297,7 @@ private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map type ) { // (Type) schemaExpression.getSubSchema("name") final Expression schemaExpression = expression( schema ); Expression call = @@ -137,7 +137,7 @@ public static Expression unwrap( Expression call, Class type ) { /** * Returns the expression to access a table within a schema. */ - public static Expression tableExpression( SchemaPlus schema, Type elementType, String tableName, Class clazz ) { + public static Expression tableExpression( PolyphenyDbSchema schema, Type elementType, String tableName, Class clazz ) { final MethodCallExpression expression; if ( Entity.class.isAssignableFrom( clazz ) ) { expression = Expressions.call( diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java index 659db3a713..3be40ad7d2 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java @@ -43,10 +43,10 @@ import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.schema.Entity; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; @@ -87,7 +87,7 @@ public Namespace snapshot( SchemaVersion version ) { @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { + public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { return Schemas.subSchemaExpression( parentSchema, name, getClass() ); } @@ -101,7 +101,7 @@ public Expression getExpression( SchemaPlus parentSchema, String name ) { * * @return Map of tables in this schema by name */ - protected Map getTableMap() { + protected Map getTableMap() { return ImmutableMap.of(); } @@ -113,7 +113,7 @@ public final Set getEntityNames() { @Override - public final Entity getEntity( String name ) { + public final CatalogEntity getEntity( String name ) { return getTableMap().get( name ); } diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java index d5b00b5cfe..c8fc0b3e20 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java @@ -43,8 +43,8 @@ import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.AbstractQueryableEntity; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.QueryableEntity; -import org.polypheny.db.schema.SchemaPlus; /** @@ -57,12 +57,12 @@ public abstract class AbstractTableQueryable extends AbstractQueryable { public final DataContext dataContext; - public final SchemaPlus schema; + public final PolyphenyDbSchema schema; public final QueryableEntity table; public final String tableName; - public AbstractTableQueryable( DataContext dataContext, SchemaPlus schema, QueryableEntity table, String tableName ) { + public AbstractTableQueryable( DataContext dataContext, PolyphenyDbSchema schema, QueryableEntity table, String tableName ) { this.dataContext = dataContext; this.schema = schema; this.table = table; diff --git a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java index bc23b9c4c9..3afdfc4171 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java @@ -38,10 +38,10 @@ import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.schema.Entity; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaVersion; @@ -82,13 +82,13 @@ public Namespace snapshot( SchemaVersion version ) { @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { + public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { return namespace.getExpression( parentSchema, name ); } @Override - public Entity getEntity( String name ) { + public CatalogEntity getEntity( String name ) { return namespace.getEntity( name ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 20b1a5540e..b7314bd217 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -104,12 +104,12 @@ import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPredicateList; import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgOptUtil; @@ -290,7 +290,7 @@ public Void apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { public static AlgBuilder create( Statement statement ) { final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, traitSet, rootSchema ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getRootSchema() ); return create( statement, cluster ); } @@ -1361,7 +1361,7 @@ public AlgBuilder scan( String... tableNames ) { } - public AlgBuilder documentScan( AlgOptEntity collection ) { + public AlgBuilder documentScan( LogicalCollection collection ) { stack.add( new Frame( new LogicalDocumentScan( cluster, cluster.traitSet().replace( ModelTrait.DOCUMENT ), collection ) ) ); return this; } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java index 30676e5a98..ef651c76f6 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java @@ -29,15 +29,6 @@ @Slf4j class QueryResult { - @Getter - private String schema; - - @Getter - private String table; - - @Getter - private String column; - @Getter private final long schemaId; @@ -45,7 +36,7 @@ class QueryResult { private final long tableId; @Getter - private final Long columnId; + private final long columnId; @Getter private final PolyType type; @@ -59,10 +50,10 @@ class QueryResult { Catalog catalog = Catalog.getInstance(); if ( catalog.checkIfExistsEntity( tableId ) ) { - this.schema = catalog.getSchema( schemaId ).name; - this.table = catalog.getTable( tableId ).name; + this.schema = catalog.getSchema( schemaId ).id; + this.tableId = catalog.getTable( tableId ).id; if ( columnId != null ) { - this.column = catalog.getColumn( columnId ).name; + this.column = catalog.getColumn( columnId ).id; } } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index f7004f88c4..deeb7b0dab 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -53,7 +53,6 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -61,6 +60,7 @@ import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.RuntimeConfig; @@ -72,7 +72,6 @@ import org.polypheny.db.information.InformationTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; @@ -338,10 +337,6 @@ private synchronized void replaceStatistics( Map reevaluateColumn( QueryResult column ) { - if ( !Catalog.getInstance().checkIfExistsEntity( column.getTableId() ) - && !Catalog.getInstance().checkIfExistsColumn( column.getTableId(), column.getColumn() ) ) { - return null; - } if ( column.getType().getFamily() == PolyTypeFamily.NUMERIC ) { return this.reevaluateNumericalColumn( column ); @@ -536,10 +531,10 @@ private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); AlgBuilder relBuilder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = relBuilder.getRexBuilder(); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, traitSet, rootSchema ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getRootSchema() ); AlgNode queryNode; - LogicalRelScan tableScan = getLogicalScan( queryResult.getSchema(), queryResult.getTable(), reader, cluster ); + LogicalRelScan tableScan = getLogicalScan( queryResult.getSchemaId(), queryResult.getTableId(), reader, cluster ); switch ( nodeType ) { case MIN: case MAX: @@ -564,18 +559,17 @@ private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { /** * Gets a tableScan for a given table. */ - private LogicalRelScan getLogicalScan( String schema, String table, CatalogReader reader, AlgOptCluster cluster ) { - AlgOptEntity relOptTable = reader.getTable( Arrays.asList( schema, table ) ); - return LogicalRelScan.create( cluster, relOptTable ); + private LogicalRelScan getLogicalScan( long tableId, CatalogReader reader, AlgOptCluster cluster ) { + return LogicalRelScan.create( cluster, reader.getRootSchema().getTable( tableId ) ); } /** * Queries the database with an aggregate query, to get the min value or max value. */ - private AlgNode getAggregateColumn( QueryResult queryResult, NodeType nodeType, RelScan tableScan, RexBuilder rexBuilder, AlgOptCluster cluster ) { + private AlgNode getAggregateColumn( QueryResult queryResult, NodeType nodeType, RelScan tableScan, RexBuilder rexBuilder, AlgOptCluster cluster ) { for ( int i = 0; i < tableScan.getRowType().getFieldNames().size(); i++ ) { - if ( queryResult.getColumn() != null && tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn() ) ) { + if ( tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumnId() ) ) { LogicalProject logicalProject = LogicalProject.create( tableScan, Collections.singletonList( rexBuilder.makeInputRef( tableScan, i ) ), diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java index 60c4178ac9..020a7f2da9 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java @@ -28,6 +28,7 @@ import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; @@ -128,7 +129,7 @@ public Namespace snapshot( SchemaVersion version ) { @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { + public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { return Schemas.subSchemaExpression( parentSchema, name, CottontailSchema.class ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index 6b649bcdc4..be72c59cc9 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -67,6 +67,7 @@ import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; @@ -238,7 +239,7 @@ public ConnectionHandler getConnectionHandler( DataContext dataContext ) { @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { + public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { return Schemas.subSchemaExpression( parentSchema, name, JdbcSchema.class ); } diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java index 93de573419..981e996dba 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java @@ -54,6 +54,7 @@ import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; +import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Statistic; @@ -118,7 +119,7 @@ public Set getSubNamespaceNames() { @Override - public Expression getExpression( SchemaPlus parentSchema, String name ) { + public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { return null; } From cf84e9c96741cdedfecf6d6be606df9c4dfd55d8 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 26 Feb 2023 23:48:11 +0100 Subject: [PATCH 025/436] changes to cottontail and sqlValidator --- .../org/polypheny/db/adapter/Adapter.java | 7 +- .../entity/allocation/AllocationTable.java | 5 + .../catalog/entity/logical/LogicalTable.java | 6 ++ .../entity/physical/PhysicalTable.java | 3 +- .../db/catalog/refactor/QueryableEntity.java | 4 +- .../db/plan/AlgOptAbstractEntity.java | 5 +- .../org/polypheny/db/plan/AlgOptCluster.java | 2 +- .../db/prepare/AlgOptEntityImpl.java | 10 +- .../org/polypheny/db/prepare/Prepare.java | 5 +- .../db/util/InitializerExpressionFactory.java | 5 +- .../NullInitializerExpressionFactory.java | 6 +- .../polypheny/db/catalog/CountingFactory.java | 6 +- .../EmpInitializerExpressionFactory.java | 6 +- .../db/processing/AbstractQueryProcessor.java | 14 +-- .../db/processing/DataMigratorImpl.java | 10 +- .../shuttles/QueryParameterizer.java | 6 +- .../db/routing/routers/AbstractDqlRouter.java | 15 ++- .../db/routing/routers/BaseRouter.java | 13 ++- .../db/routing/routers/CachedPlanRouter.java | 4 +- .../db/transaction/EntityAccessMap.java | 36 +++----- .../db/view/MaterializedViewManagerImpl.java | 6 +- .../statistics/AlphabeticStatisticColumn.java | 2 +- .../statistics/NumericalStatisticColumn.java | 2 +- .../db/monitoring/statistics/QueryResult.java | 36 ++------ .../statistics/StatisticQueryProcessor.java | 6 +- .../statistics/StatisticQueryResult.java | 2 +- .../statistics/StatisticsManagerImpl.java | 36 ++++---- .../statistics/TemporalStatisticColumn.java | 4 +- .../adapter/cottontail/CottontailEntity.java | 91 +++++++++---------- .../adapter/cottontail/CottontailPlugin.java | 45 +-------- .../adapter/cottontail/CottontailSchema.java | 6 +- .../cottontail/algebra/CottontailAlg.java | 3 +- .../cottontail/algebra/CottontailScan.java | 9 +- .../algebra/CottontailTableModify.java | 21 ++--- .../CottontailTableModificationRule.java | 6 +- .../polypheny/db/adapter/csv/CsvSource.java | 3 +- .../polypheny/db/adapter/file/FilePlugin.java | 3 +- .../polypheny/db/adapter/file/source/Qfs.java | 3 +- .../db/hsqldb/stores/HsqldbStore.java | 5 +- .../monetdb/sources/MonetdbSource.java | 4 +- .../adapter/monetdb/stores/MonetdbStore.java | 3 +- .../db/adapter/mongodb/MongoPlugin.java | 7 +- .../db/adapter/jdbc/MysqlSourcePlugin.java | 4 +- .../db/adapter/neo4j/Neo4jPlugin.java | 6 +- .../postgres/source/PostgresqlSource.java | 4 +- .../postgres/store/PostgresqlStore.java | 3 +- .../src/main/codegen/templates/Parser.jj | 2 +- .../polypheny/db/sql/SqlProcessorImpl.java | 4 +- .../language/validate/SqlValidatorImpl.java | 8 +- .../language/validate/SqlValidatorUtil.java | 13 ++- .../db/sql/sql2alg/SqlToAlgConverter.java | 62 ++++++------- 51 files changed, 257 insertions(+), 320 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 191d65561e..1099f83a8b 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -47,13 +47,14 @@ import lombok.experimental.Accessors; import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.ConfigDocker; @@ -323,7 +324,7 @@ public Adapter( int adapterId, String uniqueName, Map settings ) public abstract void createNewSchema( SchemaPlus rootSchema, String name, Long id ); - public abstract PhysicalTable createTableSchema( PhysicalTable boilerplate ); + public abstract PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ); public abstract Namespace getCurrentSchema(); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index a7b90dcb59..a7e0eeed77 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -17,14 +17,19 @@ package org.polypheny.db.catalog.entity.allocation; import java.io.Serializable; +import java.util.List; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; public class AllocationTable extends CatalogEntity implements Allocation { + public List placements; + + protected AllocationTable( long id, LogicalGraph graph ) { super( id, graph.name, graph.entityType, graph.namespaceType ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index a6c37e4b75..9903063ddc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -33,6 +33,7 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.partition.properties.PartitionProperty; +import org.polypheny.db.schema.ColumnStrategy; @EqualsAndHashCode(callSuper = false) @@ -270,6 +271,11 @@ public Expression asExpression() { } + public List getColumnStrategies() { + return null; + } + + @RequiredArgsConstructor public static class PrimitiveCatalogTable { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 1159556069..6aa123690b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -30,6 +30,7 @@ import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.type.PolyTypeFactoryImpl; @@ -45,7 +46,7 @@ protected PhysicalTable( long id, String name, EntityType type, NamespaceType na } - public PhysicalTable( PhysicalTable table ) { + public PhysicalTable( AllocationTable table ) { this( table.id, table.name, table.entityType, table.namespaceType, table.placements ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java index e16b44de0f..0ea37177d0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java @@ -19,14 +19,14 @@ import java.lang.reflect.Type; import org.apache.calcite.linq4j.Queryable; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.schema.graph.QueryableGraph; +import org.polypheny.db.schema.PolyphenyDbSchema; public interface QueryableEntity { /** * Converts this table into a {@link Queryable}. */ - Queryable asQueryable( DataContext root, QueryableGraph graph ); + Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, long entityId ); Type getElementType(); diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java index 416477423f..8d75c9ee9d 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptAbstractEntity.java @@ -42,10 +42,8 @@ import org.polypheny.db.algebra.AlgDistributions; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgReferentialConstraint; -import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.util.ImmutableBitSet; @@ -145,7 +143,8 @@ public AlgOptEntity extend( List extendedFields ) { @Override public List getColumnStrategies() { - return AlgOptEntityImpl.columnStrategies( this ); + return null; + //return AlgOptEntityImpl.columnStrategies( this ); } } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java index 059156d780..9937be6974 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java @@ -89,7 +89,7 @@ private AlgOptCluster( AlgOptPlanner planner, AlgDataTypeFactory typeFactory, Re // set up a default alg metadata provider, giving the planner first crack at everything setMetadataProvider( DefaultAlgMetadataProvider.INSTANCE ); - this.emptyTraitSet = traitSet; + this.emptyTraitSet = traitSet == null ? AlgTraitSet.createEmpty() : traitSet; assert emptyTraitSet.size() == planner.getAlgTraitDefs().size(); this.rootSchema = rootSchema; } diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index 162f9c4ddc..bf2cd7fa1a 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -43,7 +43,6 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.StatisticsManager; -import org.polypheny.db.adapter.enumerable.EnumerableScan; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.AlgDistributionTraitDef; @@ -52,7 +51,6 @@ import org.polypheny.db.algebra.AlgReferentialConstraint; import org.polypheny.db.algebra.constant.Modality; import org.polypheny.db.algebra.constant.Monotonicity; -import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeFactoryImpl; @@ -385,7 +383,7 @@ public AccessType getAllowedAccess() { /** * Helper for {@link #getColumnStrategies()}. */ - public static List columnStrategies( final AlgOptEntity table ) { + public static List columnStrategies( final CatalogEntity table ) { final int fieldCount = table.getRowType().getFieldCount(); final InitializerExpressionFactory ief = Util.first( table.unwrap( InitializerExpressionFactory.class ), @@ -409,8 +407,8 @@ public ColumnStrategy get( int index ) { * Converts the ordinal of a field into the ordinal of a stored field. * That is, it subtracts the number of virtual fields that come before it. */ - public static int realOrdinal( final AlgOptEntity table, int i ) { - List strategies = table.getColumnStrategies(); + public static int realOrdinal( final CatalogEntity table, int i ) { + List strategies = table.unwrap( LogicalTable.class ).getColumnStrategies(); int n = 0; for ( int j = 0; j < i; j++ ) { switch ( strategies.get( j ) ) { @@ -426,7 +424,7 @@ public static int realOrdinal( final AlgOptEntity table, int i ) { * Returns the row type of a table after any {@link ColumnStrategy#VIRTUAL} columns have been removed. This is the type * of the records that are actually stored. */ - public static AlgDataType realRowType( AlgOptEntity table ) { + public static AlgDataType realRowType( CatalogEntity table ) { final AlgDataType rowType = table.getRowType(); final List strategies = columnStrategies( table ); if ( !strategies.contains( ColumnStrategy.VIRTUAL ) ) { diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index 81920d9f31..33c1cebf3b 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -48,8 +48,8 @@ import org.polypheny.db.algebra.constant.ExplainFormat; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; @@ -293,7 +293,8 @@ public final AlgOptEntity extend( List extendedFields ) { @Override public List getColumnStrategies() { - return AlgOptEntityImpl.columnStrategies( AbstractPreparingEntity.this ); + return null; + //return AlgOptEntityImpl.columnStrategies( AbstractPreparingEntity.this ); } } diff --git a/core/src/main/java/org/polypheny/db/util/InitializerExpressionFactory.java b/core/src/main/java/org/polypheny/db/util/InitializerExpressionFactory.java index 296f380343..df2c80260e 100644 --- a/core/src/main/java/org/polypheny/db/util/InitializerExpressionFactory.java +++ b/core/src/main/java/org/polypheny/db/util/InitializerExpressionFactory.java @@ -19,6 +19,7 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexNode; @@ -38,7 +39,7 @@ public interface InitializerExpressionFactory { * @return generation strategy, never null * @see AlgOptEntity#getColumnStrategies() */ - ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ); + ColumnStrategy generationStrategy( CatalogEntity table, int iColumn ); /** * Creates an expression which evaluates to the default value for a particular column. @@ -48,7 +49,7 @@ public interface InitializerExpressionFactory { * @param context Context for creating the expression * @return default value expression */ - RexNode newColumnDefaultValue( AlgOptEntity table, int iColumn, InitializerContext context ); + RexNode newColumnDefaultValue( CatalogEntity table, int iColumn, InitializerContext context ); /** * Creates an expression which evaluates to the initializer expression for a particular attribute of a structured type. diff --git a/core/src/main/java/org/polypheny/db/util/NullInitializerExpressionFactory.java b/core/src/main/java/org/polypheny/db/util/NullInitializerExpressionFactory.java index 2626a1d0c7..50520663ca 100644 --- a/core/src/main/java/org/polypheny/db/util/NullInitializerExpressionFactory.java +++ b/core/src/main/java/org/polypheny/db/util/NullInitializerExpressionFactory.java @@ -19,8 +19,8 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ColumnStrategy; @@ -38,7 +38,7 @@ public NullInitializerExpressionFactory() { @Override - public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { + public ColumnStrategy generationStrategy( CatalogEntity table, int iColumn ) { return table.getRowType().getFieldList().get( iColumn ).getType().isNullable() ? ColumnStrategy.NULLABLE : ColumnStrategy.NOT_NULLABLE; @@ -46,7 +46,7 @@ public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { @Override - public RexNode newColumnDefaultValue( AlgOptEntity table, int iColumn, InitializerContext context ) { + public RexNode newColumnDefaultValue( CatalogEntity table, int iColumn, InitializerContext context ) { return context.getRexBuilder().constantNull(); } diff --git a/core/src/test/java/org/polypheny/db/catalog/CountingFactory.java b/core/src/test/java/org/polypheny/db/catalog/CountingFactory.java index 932c475d32..7b1076f9bd 100644 --- a/core/src/test/java/org/polypheny/db/catalog/CountingFactory.java +++ b/core/src/test/java/org/polypheny/db/catalog/CountingFactory.java @@ -23,8 +23,8 @@ import java.util.concurrent.atomic.AtomicInteger; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ColumnStrategy; @@ -51,7 +51,7 @@ public class CountingFactory extends NullInitializerExpressionFactory { @Override - public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { + public ColumnStrategy generationStrategy( CatalogEntity table, int iColumn ) { final AlgDataTypeField field = table.getRowType().getFieldList().get( iColumn ); if ( defaultColumns.contains( field.getName() ) ) { return ColumnStrategy.DEFAULT; @@ -61,7 +61,7 @@ public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { @Override - public RexNode newColumnDefaultValue( AlgOptEntity table, int iColumn, InitializerContext context ) { + public RexNode newColumnDefaultValue( CatalogEntity table, int iColumn, InitializerContext context ) { THREAD_CALL_COUNT.get().incrementAndGet(); final AlgDataTypeField field = table.getRowType().getFieldList().get( iColumn ); if ( defaultColumns.contains( field.getName() ) ) { diff --git a/core/src/test/java/org/polypheny/db/catalog/EmpInitializerExpressionFactory.java b/core/src/test/java/org/polypheny/db/catalog/EmpInitializerExpressionFactory.java index 6195184676..5f6fc8a8e0 100644 --- a/core/src/test/java/org/polypheny/db/catalog/EmpInitializerExpressionFactory.java +++ b/core/src/test/java/org/polypheny/db/catalog/EmpInitializerExpressionFactory.java @@ -19,7 +19,7 @@ import java.math.BigDecimal; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ColumnStrategy; @@ -34,7 +34,7 @@ class EmpInitializerExpressionFactory extends NullInitializerExpressionFactory { @Override - public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { + public ColumnStrategy generationStrategy( CatalogEntity table, int iColumn ) { switch ( iColumn ) { case 0: case 1: @@ -47,7 +47,7 @@ public ColumnStrategy generationStrategy( AlgOptEntity table, int iColumn ) { @Override - public RexNode newColumnDefaultValue( AlgOptEntity table, int iColumn, InitializerContext context ) { + public RexNode newColumnDefaultValue( CatalogEntity table, int iColumn, InitializerContext context ) { final RexBuilder rexBuilder = context.getRexBuilder(); final AlgDataTypeFactory typeFactory = rexBuilder.getTypeFactory(); switch ( iColumn ) { diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 2552de2645..d922ac19e1 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -78,9 +78,10 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationCode; import org.polypheny.db.information.InformationGroup; @@ -97,7 +98,6 @@ import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.prepare.Prepare.PreparedResult; import org.polypheny.db.prepare.Prepare.PreparedResultImpl; @@ -616,7 +616,7 @@ public AlgNode visit( AlgNode node ) { if ( node instanceof LogicalRelModify ) { final Catalog catalog = Catalog.getInstance(); final LogicalRelModify ltm = (LogicalRelModify) node; - final LogicalTable table = ltm.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); + final LogicalTable table = ltm.getEntity().unwrap( LogicalTable.class ); final CatalogSchema schema = catalog.getSchema( table.namespaceId ); final List indices = IndexManager.getInstance().getIndices( schema, table ); @@ -911,7 +911,7 @@ public AlgNode visit( LogicalProject project ) { } // Retrieve the catalog schema and database representations required for index lookup final CatalogSchema schema = statement.getTransaction().getDefaultSchema(); - final LogicalTable ctable = scan.getEntity().getCatalogEntity().unwrap( LogicalTable.class ); + final LogicalTable ctable = scan.getEntity().unwrap( LogicalTable.class ); // Retrieve any index and use for simplification final Index idx = IndexManager.getInstance().getIndex( schema, ctable, columns ); if ( idx == null ) { @@ -1306,17 +1306,17 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< } else { boolean fallback = false; if ( alg.getEntity() != null ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) alg.getEntity(); + CatalogEntity table = alg.getEntity(); int scanId = alg.getId(); - if ( table.getCatalogEntity() == null ) { + if ( table == null ) { // todo dl: remove after RowType refactor return accessedPartitionList; } // Get placements of this table - LogicalTable catalogTable = table.getCatalogEntity().unwrap( LogicalTable.class ); + LogicalTable catalogTable = table.unwrap( LogicalTable.class ); if ( aggregatedPartitionValues.containsKey( scanId ) ) { if ( aggregatedPartitionValues.get( scanId ) != null ) { diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 477a1dc814..ad34bb4381 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -51,8 +51,8 @@ import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.partition.PartitionManager; @@ -326,7 +326,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List columnNames = new LinkedList<>(); @@ -368,7 +368,7 @@ public AlgRoot buildInsertStatement( Statement statement, List buildSelect( AlgNode node, List handleRelationalOnGraphScan( AlgNode node, Statem node.getTraitSet().replace( ModelTrait.RELATIONAL ), ModelTrait.GRAPH, ModelTrait.RELATIONAL, - logicalTable.getRowType( algBuilder.getTypeFactory() ), false ) ); + logicalTable.getRowType(), false ) ); return builders; } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index ce81aa305b..ec94605d7d 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -54,26 +54,25 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.rex.RexBuilder; @@ -479,7 +478,7 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespa List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); - AlgOptEntity collection = statement.getTransaction().getCatalogReader().getCollection( List.of( t.getNamespaceName(), t.name ) ); + LogicalCollection collection = statement.getTransaction().getCatalogReader().getRootSchema().getCollection( List.of( t.getNamespaceName(), t.name ) ); AlgNode scan = algBuilder.documentScan( collection ).build(); routeDocument( algBuilder, (AlgNode & DocumentAlg) scan, statement ); return Pair.of( t.name, algBuilder.build() ); @@ -574,7 +573,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st CatalogCollectionPlacement placement = catalog.getCollectionPlacement( collection.id, placementId ); String namespaceName = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, collection.getNamespaceName(), placement.physicalNamespaceName ); String collectionName = collection.name + "_" + placement.id; - AlgOptEntity collectionTable = reader.getCollection( List.of( namespaceName, collectionName ) ); + LogicalCollection collectionTable = reader.getRootSchema().getCollection( List.of( namespaceName, collectionName ) ); // we might previously have pushed the non-native transformer builder.clear(); return builder.push( LogicalDocumentScan.create( alg.getCluster(), collectionTable ) ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index 3aeaaea795..b9653e87fe 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -30,7 +30,6 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.dto.CachedProposedRoutingPlan; import org.polypheny.db.tools.RoutedAlgBuilder; @@ -62,8 +61,7 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build } if ( node instanceof LogicalRelScan && node.getEntity() != null ) { - AlgOptEntityImpl table = (AlgOptEntityImpl) node.getEntity(); - LogicalTable catalogTable = table.getCatalogEntity().unwrap( LogicalTable.class ); + LogicalTable catalogTable = node.getEntity().unwrap( LogicalTable.class ); List partitionIds = catalogTable.partitionProperty.partitionIds; Map> placement = new HashMap<>(); diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 2e62ae402a..60a8c67461 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -33,18 +33,17 @@ import org.jetbrains.annotations.NotNull; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgVisitor; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.document.DocumentAlg; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.lpg.LpgAlg; import org.polypheny.db.algebra.core.lpg.LpgModify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; -import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.transaction.EntityAccessMap.EntityIdentifier.NamespaceLevel; import org.polypheny.db.transaction.Lock.LockMode; @@ -223,11 +222,8 @@ public Mode getEntityAccessMode( @NonNull EntityAccessMap.EntityIdentifier entit * @param table table of interest * @return qualified name */ - public EntityIdentifier getQualifiedName( AlgOptEntity table, long partitionId ) { - if ( !(table instanceof AlgOptEntityImpl) ) { - throw new RuntimeException( "Unexpected table type: " + table.getClass() ); - } - return new EntityIdentifier( table.getCatalogEntity().id, partitionId, NamespaceLevel.ENTITY_LEVEL ); + public EntityIdentifier getQualifiedName( CatalogEntity table, long partitionId ) { + return new EntityIdentifier( table.id, partitionId, NamespaceLevel.ENTITY_LEVEL ); } @@ -239,7 +235,7 @@ private class TableRelVisitor extends AlgVisitor { @Override public void visit( AlgNode p, int ordinal, AlgNode parent ) { super.visit( p, ordinal, parent ); - AlgOptEntity table = p.getEntity(); + CatalogEntity table = p.getEntity(); if ( table == null ) { if ( p instanceof LpgAlg ) { attachGraph( (AlgNode & LpgAlg) p ); @@ -258,7 +254,7 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { if ( p instanceof RelModify ) { newAccess = Mode.WRITE_ACCESS; if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { - extractWriteConstraints( (LogicalEntity) table.getEntity() ); + extractWriteConstraints( table.unwrap( LogicalEntity.class ) ); } } else { newAccess = Mode.READ_ACCESS; @@ -270,9 +266,9 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { List relevantPartitions; if ( accessedPartitions.containsKey( p.getId() ) ) { relevantPartitions = accessedPartitions.get( p.getId() ); - } else if ( table.getCatalogEntity() != null ) { - if ( table.getCatalogEntity().namespaceType == NamespaceType.RELATIONAL ) { - relevantPartitions = table.getCatalogEntity().unwrap( LogicalTable.class ).partitionProperty.partitionIds; + } else if ( table != null ) { + if ( table.namespaceType == NamespaceType.RELATIONAL ) { + relevantPartitions = table.unwrap( LogicalTable.class ).partitionProperty.partitionIds; } else { relevantPartitions = List.of(); } @@ -294,9 +290,6 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { private void attachDocument( T p ) { - if ( p.getCollection() == null ) { - return; - } Mode newAccess; if ( p instanceof DocumentModify ) { @@ -305,15 +298,12 @@ private void attachDocument( T p ) { newAccess = Mode.READ_ACCESS; } // as documents are using the same id space as tables this will work - EntityIdentifier key = new EntityIdentifier( p.getCollection().getCatalogEntity().id, 0, NamespaceLevel.ENTITY_LEVEL ); + EntityIdentifier key = new EntityIdentifier( p.getEntity().id, 0, NamespaceLevel.ENTITY_LEVEL ); accessMap.put( key, newAccess ); } - private void attachGraph( LpgAlg p ) { - if ( p.getGraph() == null ) { - return; - } + private void attachGraph( AlgNode p ) { Mode newAccess; if ( p instanceof LpgModify ) { @@ -322,7 +312,7 @@ private void attachGraph( LpgAlg p ) { newAccess = Mode.READ_ACCESS; } // as graph is on the namespace level in the full polyschema it is unique and can be used like this - EntityIdentifier key = new EntityIdentifier( p.getGraph().id, 0, NamespaceLevel.NAMESPACE_LEVEL ); + EntityIdentifier key = new EntityIdentifier( p.getEntity().id, 0, NamespaceLevel.NAMESPACE_LEVEL ); accessMap.put( key, newAccess ); } diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index ff1df6a1f6..d08741e2cc 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -41,18 +41,18 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogMaterializedView; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -433,7 +433,7 @@ public void commitTransaction( Transaction transaction ) { private void prepareSourceRel( Statement sourceStatement, AlgCollation algCollation, AlgNode sourceRel ) { AlgOptCluster cluster = AlgOptCluster.create( sourceStatement.getQueryProcessor().getPlanner(), - new RexBuilder( sourceStatement.getTransaction().getTypeFactory() ), traitSet, rootSchema ); + new RexBuilder( sourceStatement.getTransaction().getTypeFactory() ), null, sourceStatement.getDataContext().getRootSchema() ); prepareNode( sourceRel, cluster, algCollation ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java index 51daea8bf1..9e49ed2129 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java @@ -37,7 +37,7 @@ public class AlphabeticStatisticColumn> extends Statisti public AlphabeticStatisticColumn( QueryResult column ) { - super( column.getSchemaId(), column.getTableId(), column.getColumnId(), column.getType(), StatisticType.ALPHABETICAL ); + super( column.getColumn().schemaId, column.getColumn().tableId, column.getColumn().id, column.getColumn().type, StatisticType.ALPHABETICAL ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java index e04e29bc43..4709a5a3da 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java @@ -51,7 +51,7 @@ public class NumericalStatisticColumn extends StatisticColumn { public NumericalStatisticColumn( QueryResult column ) { - super( column.getSchemaId(), column.getTableId(), column.getColumnId(), column.getType(), StatisticType.NUMERICAL ); + super( column.getColumn().schemaId, column.getEntity().id, column.getColumn().id, column.getColumn().type, StatisticType.NUMERICAL ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java index ef651c76f6..acf3d71997 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java @@ -16,51 +16,33 @@ package org.polypheny.db.monitoring.statistics; +import lombok.Data; import lombok.Getter; -import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.type.PolyType; +import org.polypheny.db.catalog.entity.CatalogEntity; /** * Boilerplate of a column to guide the handling and pattern of a column */ -@Slf4j +@Data class QueryResult { @Getter - private final long schemaId; - - @Getter - private final long tableId; - + private final CatalogEntity entity; @Getter - private final long columnId; - - @Getter - private final PolyType type; - + private final CatalogColumn column; - QueryResult( long schemaId, long tableId, Long columnId, PolyType type ) { - this.schemaId = schemaId; - this.tableId = tableId; - this.columnId = columnId; - this.type = type; - Catalog catalog = Catalog.getInstance(); - if ( catalog.checkIfExistsEntity( tableId ) ) { - this.schema = catalog.getSchema( schemaId ).id; - this.tableId = catalog.getTable( tableId ).id; - if ( columnId != null ) { - this.column = catalog.getColumn( columnId ).id; - } - } + QueryResult( CatalogEntity entity, CatalogColumn column ) { + this.entity = entity; + this.column = column; } public static QueryResult fromCatalogColumn( CatalogColumn column ) { - return new QueryResult( column.schemaId, column.tableId, column.id, column.type ); + return new QueryResult( Catalog.getInstance().getTable( column.tableId ), column ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 203e35a336..8bfe878967 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -27,14 +27,14 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.Authenticator; import org.polypheny.db.transaction.Statement; @@ -130,7 +130,7 @@ public List getAllColumns() { for ( CatalogColumn catalogColumn : catalogColumns ) { if ( catalog.getTable( catalogColumn.tableId ).entityType != EntityType.VIEW ) { - allColumns.add( new QueryResult( catalogColumn.schemaId, catalogColumn.tableId, catalogColumn.id, catalogColumn.type ) ); + allColumns.add( QueryResult.fromCatalogColumn( catalogColumn ) ); } } return allColumns; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryResult.java index e449e9b665..d8794a2c4b 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryResult.java @@ -38,7 +38,7 @@ public class StatisticQueryResult extends QueryResult { * @param data map consisting of different values to a given statistic */ public StatisticQueryResult( QueryResult queryResult, final Comparable[] data ) { - super( queryResult.getSchemaId(), queryResult.getTableId(), queryResult.getColumnId(), queryResult.getType() ); + super( queryResult.getEntity(), queryResult.getColumn() ); this.data = data; } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index deeb7b0dab..0d2fa7f4f5 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -241,7 +241,7 @@ public void restart( Config c ) { private void resetAllIsFull() { this.statisticSchemaMap.values().forEach( s -> s.values().forEach( t -> t.values().forEach( c -> { - assignUnique( c, this.prepareNode( new QueryResult( c.getSchemaId(), c.getTableId(), c.getColumnId(), c.getType() ), NodeType.UNIQUE_VALUE ) ); + assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getColumn( c.getColumnId() ) ), NodeType.UNIQUE_VALUE ) ); } ) ) ); } @@ -282,7 +282,7 @@ private void reevaluateRowCount() { log.debug( "Reevaluate Row Count." ); statisticQueryInterface.getAllTable().forEach( table -> { - int rowCount = getNumberColumnCount( this.prepareNode( new QueryResult( table.namespaceId, table.id, null, null ), NodeType.ROW_COUNT_TABLE ) ); + int rowCount = getNumberColumnCount( this.prepareNode( new QueryResult( Catalog.getInstance().getTable( table.id ), null ), NodeType.ROW_COUNT_TABLE ) ); updateRowCountPerTable( table.id, rowCount, "SET-ROW-COUNT" ); } ); } @@ -338,11 +338,11 @@ private synchronized void replaceStatistics( Map reevaluateColumn( QueryResult column ) { - if ( column.getType().getFamily() == PolyTypeFamily.NUMERIC ) { + if ( column.getColumn().type.getFamily() == PolyTypeFamily.NUMERIC ) { return this.reevaluateNumericalColumn( column ); - } else if ( column.getType().getFamily() == PolyTypeFamily.CHARACTER ) { + } else if ( column.getColumn().type.getFamily() == PolyTypeFamily.CHARACTER ) { return this.reevaluateAlphabeticalColumn( column ); - } else if ( PolyType.DATETIME_TYPES.contains( column.getType() ) ) { + } else if ( PolyType.DATETIME_TYPES.contains( column.getColumn().type ) ) { return this.reevaluateTemporalColumn( column ); } return null; @@ -405,7 +405,7 @@ private > StatisticColumn reevaluateTemporalColumn( Q StatisticQueryResult unique = this.prepareNode( column, NodeType.UNIQUE_VALUE ); if ( unique != null ) { for ( int idx = 0; idx < unique.getData().length; idx++ ) { - unique.getData()[idx] = DateTimeStringUtils.longToAdjustedString( (Number) unique.getData()[idx], column.getType() ); + unique.getData()[idx] = DateTimeStringUtils.longToAdjustedString( (Number) unique.getData()[idx], column.getColumn().type ); } } @@ -460,7 +460,7 @@ private Integer getNumberColumnCount( StatisticQueryResult countColumn ) { try { return Integer.parseInt( value.toString() ); } catch ( NumberFormatException e ) { - log.error( "Count could not be parsed for column {}.", countColumn.getColumn(), e ); + log.error( "Count could not be parsed for column {}.", countColumn/*.getColumn()*/, e ); } } } @@ -471,9 +471,7 @@ private Integer getNumberColumnCount( StatisticQueryResult countColumn ) { private void put( QueryResult queryResult, StatisticColumn statisticColumn ) { put( this.statisticSchemaMap, - queryResult.getSchemaId(), - queryResult.getTableId(), - queryResult.getColumnId(), + queryResult, statisticColumn ); } @@ -484,9 +482,7 @@ private void put( StatisticColumn statisticColumn ) { put( statisticSchemaMapCopy, - queryResult.getSchemaId(), - queryResult.getTableId(), - queryResult.getColumnId(), + queryResult, statisticColumn ); } @@ -517,7 +513,7 @@ private void put( private StatisticQueryResult prepareNode( QueryResult queryResult, NodeType nodeType ) { StatisticQueryResult statisticQueryColumn = null; - if ( Catalog.getInstance().checkIfExistsEntity( queryResult.getTableId() ) ) { + if ( Catalog.getInstance().checkIfExistsEntity( queryResult.getEntity().id ) ) { AlgNode queryNode = getQueryNode( queryResult, nodeType ); //queryNode = getQueryNode( queryResult, nodeType ); statisticQueryColumn = statisticQueryInterface.selectOneColumnStat( queryNode, transaction, statement, queryResult ); @@ -534,7 +530,7 @@ private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getRootSchema() ); AlgNode queryNode; - LogicalRelScan tableScan = getLogicalScan( queryResult.getSchemaId(), queryResult.getTableId(), reader, cluster ); + LogicalRelScan tableScan = getLogicalScan( queryResult.getEntity().id, reader, cluster ); switch ( nodeType ) { case MIN: case MAX: @@ -569,7 +565,7 @@ private LogicalRelScan getLogicalScan( long tableId, CatalogReader reader, AlgOp */ private AlgNode getAggregateColumn( QueryResult queryResult, NodeType nodeType, RelScan tableScan, RexBuilder rexBuilder, AlgOptCluster cluster ) { for ( int i = 0; i < tableScan.getRowType().getFieldNames().size(); i++ ) { - if ( tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumnId() ) ) { + if ( tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn().name ) ) { LogicalProject logicalProject = LogicalProject.create( tableScan, Collections.singletonList( rexBuilder.makeInputRef( tableScan, i ) ), @@ -619,7 +615,7 @@ private AlgNode getAggregateColumn( QueryResult queryResult, NodeType nodeType, private AlgNode getUniqueValues( QueryResult queryResult, RelScan tableScan, RexBuilder rexBuilder ) { for ( int i = 0; i < tableScan.getRowType().getFieldNames().size(); i++ ) { - if ( queryResult.getColumn() != null && tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn() ) ) { + if ( tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn() ) ) { LogicalProject logicalProject = LogicalProject.create( tableScan, Collections.singletonList( rexBuilder.makeInputRef( tableScan, i ) ), @@ -906,7 +902,7 @@ private void handleTruncate( long tableId, long schemaId, Catalog catalog ) { LogicalTable catalogTable = catalog.getTable( tableId ); for ( int i = 0; i < catalogTable.fieldIds.size(); i++ ) { PolyType polyType = catalog.getColumn( catalogTable.fieldIds.get( i ) ).type; - QueryResult queryResult = new QueryResult( schemaId, catalogTable.id, catalogTable.fieldIds.get( i ), polyType ); + QueryResult queryResult = new QueryResult( catalogTable, Catalog.getInstance().getColumn( catalogTable.fieldIds.get( i ) ) ); if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( catalogTable.fieldIds.get( i ) ) != null ) { StatisticColumn statisticColumn = createNewStatisticColumns( polyType, queryResult ); if ( statisticColumn != null ) { @@ -937,7 +933,7 @@ private void handleInsert( long tableId, Map> changedValues, if ( this.statisticSchemaMap.get( schemaId ).get( tableId ) != null ) { for ( int i = 0; i < columns.size(); i++ ) { PolyType polyType = catalog.getColumn( columns.get( i ) ).type; - QueryResult queryResult = new QueryResult( schemaId, catalogTable.id, columns.get( i ), polyType ); + QueryResult queryResult = new QueryResult( catalogTable, Catalog.getInstance().getColumn( columns.get( i ) ) ); if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( columns.get( i ) ) != null && changedValues.get( (long) i ) != null ) { handleInsertColumn( tableId, changedValues, schemaId, columns, i, queryResult ); } else { @@ -959,7 +955,7 @@ private void handleInsert( long tableId, Map> changedValues, private void addInserts( Map> changedValues, Catalog catalog, LogicalTable catalogTable, List columns ) { for ( int i = 0; i < columns.size(); i++ ) { PolyType polyType = catalog.getColumn( columns.get( i ) ).type; - QueryResult queryResult = new QueryResult( catalogTable.namespaceId, catalogTable.id, columns.get( i ), polyType ); + QueryResult queryResult = new QueryResult( catalogTable, Catalog.getInstance().getColumn( columns.get( i ) ) ); addNewColumnStatistics( changedValues, i, polyType, queryResult ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java index 4a7e7e492f..354ea887db 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java @@ -74,8 +74,8 @@ public void setMax( T max ) { public TemporalStatisticColumn( QueryResult column ) { - super( column.getSchemaId(), column.getTableId(), column.getColumnId(), column.getType(), StatisticType.TEMPORAL ); - temporalType = column.getType().getFamily().name(); + super( column.getColumn().schemaId, column.getEntity().id, column.getColumn().id, column.getColumn().type, StatisticType.TEMPORAL ); + temporalType = column.getColumn().type.getFamily().name(); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java index 9c24add7ef..08cc766591 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java @@ -16,33 +16,35 @@ package org.polypheny.db.adapter.cottontail; +import java.lang.reflect.Type; import java.util.List; +import java.util.stream.Collectors; import lombok.Getter; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Queryable; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.cottontail.algebra.CottontailScan; import org.polypheny.db.adapter.cottontail.enumberable.CottontailQueryEnumerable; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; +import org.polypheny.db.adapter.cottontail.util.CottontailNameUtil; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.vitrivr.cottontail.grpc.CottontailGrpc.EntityName; import org.vitrivr.cottontail.grpc.CottontailGrpc.From; @@ -53,13 +55,15 @@ import org.vitrivr.cottontail.grpc.CottontailGrpc.SchemaName; -public class CottontailEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity { +public class CottontailEntity extends PhysicalTable implements TranslatableEntity, ModifiableEntity, QueryableEntity { + private final LogicalTable logical; + private final AllocationTable allocation; private AlgProtoDataType protoRowType; private CottontailSchema cottontailSchema; @Getter - private EntityName entity; + private EntityName entityName; @Getter private final String physicalSchemaName; @@ -72,26 +76,22 @@ public class CottontailEntity extends AbstractQueryableEntity implements Transla protected CottontailEntity( CottontailSchema cottontailSchema, - String logicalSchemaName, - String logicalTableName, - List logicalColumnNames, - AlgProtoDataType protoRowType, String physicalSchemaName, - String physicalTableName, - List physicalColumnNames, - Long tableId, - long partitionId, long adapterId ) { - super( Object[].class, tableId, partitionId, adapterId ); + LogicalTable logical, + AllocationTable allocation ) { + super( allocation ); this.cottontailSchema = cottontailSchema; - this.protoRowType = protoRowType; - this.logicalColumnNames = logicalColumnNames; + this.logicalColumnNames = logical.getColumnNames(); this.physicalSchemaName = physicalSchemaName; - this.physicalTableName = physicalTableName; - this.physicalColumnNames = physicalColumnNames; + this.physicalTableName = CottontailNameUtil.createPhysicalTableName( logical.id, allocation.id ); + this.physicalColumnNames = allocation.placements.stream().map( p -> CottontailNameUtil.createPhysicalColumnName( p.columnId ) ).collect( Collectors.toList() ); - this.entity = EntityName.newBuilder() + this.logical = logical; + this.allocation = allocation; + + this.entityName = EntityName.newBuilder() .setName( this.physicalTableName ) .setSchema( SchemaName.newBuilder().setName( physicalSchemaName ).build() ) .build(); @@ -110,56 +110,53 @@ public String toString() { @Override - public RelModify toModificationAlg( + public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptEntity table, - CatalogReader catalogReader, + AlgTraitSet traitSet, + CatalogEntity table, AlgNode input, Operation operation, List updateColumnList, - List sourceExpressionList, - boolean flattened ) { + List sourceExpressionList + ) { this.cottontailSchema.getConvention().register( cluster.getPlanner() ); return new LogicalRelModify( - cluster, cluster.traitSetOf( Convention.NONE ), table, - catalogReader, input, operation, updateColumnList, - sourceExpressionList, - flattened ); + sourceExpressionList ); } @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - return new CottontailTableQueryable( dataContext, schema, tableName ); + public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, long entityId ) { + return new CottontailTableQueryable( dataContext, schema, this ); } @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { - return new CottontailScan( context.getCluster(), algOptEntity, this, traitSet, this.cottontailSchema.getConvention() ); + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { + return new CottontailScan( context.getCluster(), this, traitSet, this.cottontailSchema.getConvention() ); } - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return protoRowType.apply( typeFactory ); + public CottontailConvention getUnderlyingConvention() { + return this.cottontailSchema.getConvention(); } - public CottontailConvention getUnderlyingConvention() { - return this.cottontailSchema.getConvention(); + @Override + public Type getElementType() { + return Object[].class; } private class CottontailTableQueryable extends AbstractTableQueryable { - public CottontailTableQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - super( dataContext, schema, CottontailEntity.this, tableName ); + public CottontailTableQueryable( DataContext dataContext, PolyphenyDbSchema schema, PhysicalTable physicalTable ) { + super( dataContext, schema, null, physicalTable.name ); } @@ -169,7 +166,7 @@ public Enumerator enumerator() { final CottontailEntity cottontailTable = (CottontailEntity) this.table; final long txId = cottontailTable.cottontailSchema.getWrapper().beginOrContinue( this.dataContext.getStatement().getTransaction() ); final Query query = Query.newBuilder() - .setFrom( From.newBuilder().setScan( Scan.newBuilder().setEntity( cottontailTable.entity ) ).build() ) + .setFrom( From.newBuilder().setScan( Scan.newBuilder().setEntity( cottontailTable.entityName ) ).build() ) .build(); final QueryMessage queryMessage = QueryMessage.newBuilder() .setMetadata( Metadata.newBuilder().setTransactionId( txId ) ) @@ -177,7 +174,7 @@ public Enumerator enumerator() { .build(); return new CottontailQueryEnumerable( cottontailTable.cottontailSchema.getWrapper().query( queryMessage ), - new CottontailQueryEnumerable.RowTypeParser( cottontailTable.getRowType( typeFactory ), cottontailTable.physicalColumnNames ) + new CottontailQueryEnumerable.RowTypeParser( cottontailTable.getRowType(), cottontailTable.physicalColumnNames ) ).enumerator(); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java index 4f25288730..9fccdacf80 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java @@ -40,16 +40,13 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.cottontail.util.CottontailNameUtil; import org.polypheny.db.adapter.cottontail.util.CottontailTypeUtil; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.prepare.Context; @@ -57,7 +54,6 @@ import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.PolyphenyHomeDirManager; import org.vitrivr.cottontail.CottontailKt; import org.vitrivr.cottontail.client.iterators.TupleIterator; @@ -213,43 +209,12 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - List logicalColumnNames = new LinkedList<>(); - List physicalColumnNames = new LinkedList<>(); - Long tableId = combinedTable.id; - - String physicalSchemaName = partitionPlacement.physicalTableName != null - ? partitionPlacement.physicalSchemaName - : this.dbName; - String physicalTableName = partitionPlacement.physicalTableName != null - ? partitionPlacement.physicalTableName - : CottontailNameUtil.createPhysicalTableName( combinedTable.id, partitionPlacement.partitionId ); - - for ( CatalogColumnPlacement placement : columnPlacementsOnStore ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); - - AlgDataType sqlType = catalogColumn.getAlgDataType( typeFactory ); - fieldInfo.add( catalogColumn.name, placement.physicalColumnName, sqlType ).nullable( catalogColumn.nullable ); - logicalColumnNames.add( catalogColumn.name ); - physicalColumnNames.add( placement.physicalColumnName != null - ? placement.physicalColumnName - : CottontailNameUtil.createPhysicalColumnName( placement.columnId ) ); - } - + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return new CottontailEntity( this.currentSchema, - combinedTable.getNamespaceName(), - combinedTable.name, - logicalColumnNames, - AlgDataTypeImpl.proto( fieldInfo.build() ), - physicalSchemaName, - physicalTableName, - physicalColumnNames, - tableId, - partitionPlacement.partitionId, - getAdapterId() + this.dbName, + logical, + allocationTable ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java index 020a7f2da9..5e748733ab 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java @@ -25,7 +25,7 @@ import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.cottontail.CottontailPlugin.CottontailStore; -import org.polypheny.db.schema.Entity; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.PolyphenyDbSchema; @@ -98,7 +98,7 @@ public static CottontailSchema create( CottontailWrapper wrapper, CottontailStore cottontailStore ) { - final Expression expression = Schemas.subSchemaExpression( parentSchema, name, CottontailSchema.class ); + final Expression expression = Schemas.subSchemaExpression( parentSchema.polyphenyDbSchema(), name, CottontailSchema.class ); final CottontailConvention convention = CottontailConvention.of( name, expression ); return new CottontailSchema( id, wrapper, convention, cottontailStore, name ); } @@ -135,7 +135,7 @@ public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { @Override - protected Map getTableMap() { + protected Map getTableMap() { return ImmutableMap.copyOf( this.tableMap ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java index 97c12a4aba..b17b7bd867 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailAlg.java @@ -21,7 +21,6 @@ import org.apache.calcite.linq4j.tree.ParameterExpression; import org.polypheny.db.adapter.cottontail.CottontailEntity; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.plan.AlgOptEntity; public interface CottontailAlg extends AlgNode { @@ -49,7 +48,7 @@ class CottontailImplementContext { public String tableName; - public AlgOptEntity table; + public CottontailEntity table; public CottontailEntity cottontailTable; public Expression filterBuilder; diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java index 2fe1b757e0..dc58cc9e66 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java @@ -26,18 +26,17 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; -public class CottontailScan extends RelScan implements CottontailAlg { +public class CottontailScan extends RelScan implements CottontailAlg { protected final CottontailEntity cottontailTable; - public CottontailScan( AlgOptCluster cluster, AlgOptEntity table, CottontailEntity cottontailTable, AlgTraitSet traitSet, CottontailConvention cottontailConvention ) { - super( cluster, traitSet.replace( cottontailConvention ), table ); + public CottontailScan( AlgOptCluster cluster, CottontailEntity cottontailTable, AlgTraitSet traitSet, CottontailConvention cottontailConvention ) { + super( cluster, traitSet.replace( cottontailConvention ), cottontailTable ); this.cottontailTable = cottontailTable; } @@ -45,7 +44,7 @@ public CottontailScan( AlgOptCluster cluster, AlgOptEntity table, CottontailEnti @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { assert inputs.isEmpty(); - return new CottontailScan( getCluster(), this.table, this.cottontailTable, traitSet, (CottontailConvention) this.getConvention() ); + return new CottontailScan( getCluster(), this.cottontailTable, traitSet, (CottontailConvention) this.getConvention() ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java index df5d4fc22f..b872b0850b 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java @@ -35,12 +35,9 @@ import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexDynamicParam; import org.polypheny.db.rex.RexLiteral; @@ -50,7 +47,7 @@ import org.polypheny.db.util.BuiltInMethod; -public class CottontailTableModify extends RelModify implements CottontailAlg { +public class CottontailTableModify extends RelModify implements CottontailAlg { public final CottontailEntity cottontailTable; @@ -63,10 +60,8 @@ public class CottontailTableModify extends RelModify implements CottontailAlg { *
    UPDATE table SET iden1 = exp1, ident2 = exp2  WHERE condition
    * * - * @param cluster Cluster this relational expression belongs to * @param traitSet Traits of this relational expression * @param table Target table to modify - * @param catalogReader accessor to the table metadata. * @param input Sub-query or filter condition * @param operation Modify operation (INSERT, UPDATE, DELETE) * @param updateColumnList List of column identifiers to be updated (e.g. ident1, ident2); null if not UPDATE @@ -74,16 +69,14 @@ public class CottontailTableModify extends RelModify implements CottontailAlg { * @param flattened Whether set flattens the input row type */ public CottontailTableModify( - AlgOptCluster cluster, AlgTraitSet traitSet, - AlgOptEntity table, - CatalogReader catalogReader, + CottontailEntity table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { - super( cluster, traitSet, table, catalogReader, input, operation, updateColumnList, sourceExpressionList, flattened ); + super( input.getCluster(), traitSet, table, input, operation, updateColumnList, sourceExpressionList, flattened ); this.cottontailTable = table.unwrap( CottontailEntity.class ); } @@ -91,10 +84,8 @@ public CottontailTableModify( @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new CottontailTableModify( - getCluster(), traitSet, - getEntity(), - getCatalogReader(), + entity, AbstractAlgNode.sole( inputs ), getOperation(), getUpdateColumnList(), @@ -118,7 +109,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public void implement( CottontailImplementContext context ) { context.cottontailTable = this.cottontailTable; - context.table = this.table; + context.table = this.entity; context.schemaName = this.cottontailTable.getPhysicalSchemaName(); context.tableName = this.cottontailTable.getPhysicalTableName(); context.visitChild( 0, getInput() ); @@ -155,7 +146,7 @@ private Expression buildUpdateTupleBuilder( CottontailImplementContext context ) final List physicalColumnNames = new ArrayList<>(); final List logicalColumnNames = new ArrayList<>(); final List columnTypes = new ArrayList<>(); - for ( AlgDataTypeField field : context.cottontailTable.getRowType( getCluster().getTypeFactory() ).getFieldList() ) { + for ( AlgDataTypeField field : context.cottontailTable.getRowType().getFieldList() ) { physicalColumnNames.add( context.cottontailTable.getPhysicalColumnName( field.getName() ) ); logicalColumnNames.add( field.getName() ); columnTypes.add( field.getType().getPolyType() ); diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java index bcfe8d3d65..351ba47d3c 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/rules/CottontailTableModificationRule.java @@ -22,8 +22,8 @@ import org.polypheny.db.adapter.cottontail.algebra.CottontailTableModify; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.UnsupportedFromInsertShuttle; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.plan.AlgTraitSet; @@ -74,10 +74,8 @@ public AlgNode convert( AlgNode alg ) { final AlgTraitSet traitSet = modify.getTraitSet().replace( out ); return new CottontailTableModify( - modify.getCluster(), traitSet, - modify.getEntity(), - modify.getCatalogReader(), + modify.getEntity().unwrap( CottontailEntity.class ), AlgOptRule.convert( modify.getInput(), traitSet ), modify.getOperation(), modify.getUpdateColumnList(), diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 167453fc01..fabaed9465 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -42,6 +42,7 @@ import org.polypheny.db.adapter.csv.CsvTable.Flavor; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.information.InformationGroup; @@ -132,7 +133,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentSchema.createCsvTable( catalogTable, columnPlacementsOnStore, this, partitionPlacement ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java index 68a53af98b..cdb8dfc024 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java @@ -53,6 +53,7 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.information.InformationGraph; @@ -183,7 +184,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentSchema.createFileTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java index fbaf787a4e..ebea9ee43d 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java @@ -36,6 +36,7 @@ import org.polypheny.db.adapter.Adapter.AdapterSettingString; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.information.InformationGroup; @@ -88,7 +89,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentSchema.createFileTable( combinedTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 1527d09c99..aaff9c9cf7 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -35,12 +35,13 @@ import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.adapter.jdbc.stores.AbstractJdbcStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; @@ -105,7 +106,7 @@ protected ConnectionFactory deployEmbedded() { @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 04fb7b26d1..8fb2ae0e4d 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -31,6 +31,8 @@ import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.catalog.Adapter; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.SqlDialect; @@ -91,7 +93,7 @@ protected ConnectionFactory createConnectionFactory( final Map s @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java index 007fd0cf4f..42f8e813b6 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java @@ -40,6 +40,7 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.docker.DockerManager; @@ -229,7 +230,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java index 59f3f49186..b475c560f2 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java @@ -59,9 +59,7 @@ import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -69,8 +67,11 @@ import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.ConfigDocker; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.docker.DockerInstance; @@ -239,7 +240,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentSchema.createTable( combinedTable, columnPlacementsOnStore, getAdapterId(), partitionPlacement ); } diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 2369414f0c..48fcd0084b 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -31,6 +31,8 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.catalog.Adapter; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.MysqlSqlDialect; @@ -93,7 +95,7 @@ public MysqlSource( int storeId, String uniqueName, final Map se @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java index 9bea72a19b..663a2f5e7a 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java @@ -61,10 +61,12 @@ import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.docker.DockerManager; import org.polypheny.db.docker.DockerManager.Container; @@ -495,8 +497,8 @@ public void createNewSchema( SchemaPlus rootSchema, String name ) { @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { - return this.currentSchema.createTable( boilerplate ); + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { + return this.currentSchema.createTable( allocationTable ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 5ad5d8001e..9ee87fdae3 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -29,6 +29,8 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.catalog.Adapter; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.dialect.PostgresqlSqlDialect; @@ -82,7 +84,7 @@ public static void register() { @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java index ef46799fd5..13dbf5b19f 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java @@ -42,6 +42,7 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.docker.DockerManager; @@ -195,7 +196,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public PhysicalTable createTableSchema( PhysicalTable boilerplate ) { + public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/sql-language/src/main/codegen/templates/Parser.jj b/plugins/sql-language/src/main/codegen/templates/Parser.jj index 4a3ae39044..ae51e67022 100644 --- a/plugins/sql-language/src/main/codegen/templates/Parser.jj +++ b/plugins/sql-language/src/main/codegen/templates/Parser.jj @@ -50,7 +50,7 @@ import ${importStr}; -import org.polypheny.db.catalog.Catalog.NamespaceType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.ParserFactory; import org.polypheny.db.util.Conformance; import org.polypheny.db.languages.NodeParseException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index 1fd44b6534..6b56cf31b6 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -37,13 +37,13 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.languages.NodeToAlgConverter; @@ -191,7 +191,7 @@ public AlgRoot translate( Statement statement, Node query, QueryParameters param Config sqlToAlgConfig = NodeToAlgConverter.configBuilder().build(); final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, traitSet, rootSchema ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getRootSchema() ); final Config config = NodeToAlgConverter.configBuilder() .config( sqlToAlgConfig ) diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index e7c436b35c..18f9612b85 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -69,8 +69,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordType; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryLanguage; @@ -4026,7 +4026,7 @@ protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList tar } } final Set assignedFields = new HashSet<>(); - final AlgOptEntity algOptEntity = table instanceof AlgOptEntity ? ((AlgOptEntity) table) : null; + final CatalogEntity algOptEntity = null; //table instanceof AlgOptEntity ? ((AlgOptEntity) table) : null; todo dl for ( Node node : targetColumnList ) { SqlIdentifier id = (SqlIdentifier) node; AlgDataTypeField targetField = @@ -4057,7 +4057,7 @@ protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList tar public void validateInsert( SqlInsert insert ) { final SqlValidatorNamespace targetNamespace = getSqlNamespace( insert ); validateNamespace( targetNamespace, unknownType ); - final AlgOptEntity algOptEntity = + final CatalogEntity algOptEntity = SqlValidatorUtil.getAlgOptTable( targetNamespace, catalogReader.unwrap( Prepare.CatalogReader.class ), @@ -4341,7 +4341,7 @@ public void validateDelete( SqlDelete call ) { public void validateUpdate( SqlUpdate call ) { final SqlValidatorNamespace targetNamespace = getSqlNamespace( call ); validateNamespace( targetNamespace, unknownType ); - final AlgOptEntity algOptEntity = + final CatalogEntity algOptEntity = SqlValidatorUtil.getAlgOptTable( targetNamespace, catalogReader.unwrap( Prepare.CatalogReader.class ), diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index b8d1713840..50c855e4a3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -38,6 +38,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.languages.OperatorRegistry; @@ -88,8 +89,10 @@ private SqlValidatorUtil() { * @param datasetName Name of sample dataset to substitute, or null to use the regular table * @param usedDataset Output parameter which is set to true if a sample dataset is found; may be null */ - public static AlgOptEntity getAlgOptTable( SqlValidatorNamespace namespace, Prepare.CatalogReader catalogReader, String datasetName, boolean[] usedDataset ) { - if ( namespace.isWrapperFor( TableNamespace.class ) ) { + public static CatalogEntity getAlgOptTable( SqlValidatorNamespace namespace, Prepare.CatalogReader catalogReader, String datasetName, boolean[] usedDataset ) { + final TableNamespace tableNamespace = namespace.unwrap( TableNamespace.class ); + return catalogReader.getRootSchema().getTable( tableNamespace.getTable().getQualifiedName() ); + /*if ( namespace.isWrapperFor( TableNamespace.class ) ) { final TableNamespace tableNamespace = namespace.unwrap( TableNamespace.class ); return getAlgOptTable( tableNamespace, catalogReader, datasetName, usedDataset, tableNamespace.extendedFields ); } else if ( namespace.isWrapperFor( SqlValidatorImpl.DmlNamespace.class ) ) { @@ -106,7 +109,7 @@ public static AlgOptEntity getAlgOptTable( SqlValidatorNamespace namespace, Prep return getAlgOptTable( tableNamespace, catalogReader, datasetName, usedDataset, extendedFields ); } } - return null; + return null;*/ } @@ -266,7 +269,7 @@ public static SqlValidatorWithHints newValidator( OperatorTable opTab, Validator } - public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, AlgOptEntity table ) { + public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, CatalogEntity table ) { return getTargetField( rowType, typeFactory, id, catalogReader, table, false ); } @@ -279,7 +282,7 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF * @param table the target table or null if it is not a RelOptTable instance * @return the target field or null if the name cannot be resolved */ - public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, AlgOptEntity table, boolean isDocument ) { + public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, CatalogEntity table, boolean isDocument ) { final Entity t = table == null ? null : table.unwrap( Entity.class ); if ( !(t instanceof CustomColumnResolvingEntity) ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 5d7dd1b043..f45f28f82c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -103,8 +103,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; @@ -121,10 +121,12 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; @@ -141,7 +143,6 @@ import org.polypheny.db.nodes.Operator; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgOptSamplingParameters; import org.polypheny.db.plan.AlgOptUtil; @@ -167,8 +168,6 @@ import org.polypheny.db.rex.RexWindowBound; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.Entity; -import org.polypheny.db.schema.ModifiableEntity; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.sql.language.SqlAggFunction; import org.polypheny.db.sql.language.SqlBasicCall; @@ -2120,17 +2119,17 @@ private void convertIdentifier( Blackboard bb, SqlIdentifier id, SqlNodeList ext } final String datasetName = datasetStack.isEmpty() ? null : datasetStack.peek(); final boolean[] usedDataset = { false }; - AlgOptEntity table = SqlValidatorUtil.getAlgOptTable( fromNamespace, catalogReader, datasetName, usedDataset ); + CatalogEntity table = SqlValidatorUtil.getAlgOptTable( fromNamespace, catalogReader, datasetName, usedDataset ); if ( extendedColumns != null && extendedColumns.size() > 0 ) { assert table != null; final ValidatorTable validatorTable = table.unwrap( ValidatorTable.class ); final List extendedFields = SqlValidatorUtil.getExtendedColumns( validator.getTypeFactory(), validatorTable, extendedColumns ); - table = table.extend( extendedFields ); + table = table; // table.extend( extendedFields ); todo dl } final AlgNode tableRel; if ( config.isConvertTableAccess() ) { tableRel = toAlg( table ); - } else if ( table instanceof AlgOptEntityImpl && table.getCatalogEntity() != null && table.getCatalogEntity().entityType == EntityType.VIEW ) { + } else if ( table.entityType == EntityType.VIEW ) { tableRel = LogicalRelViewScan.create( cluster, table ); } else { tableRel = LogicalRelScan.create( cluster, table ); @@ -2161,12 +2160,12 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { final SqlCallBinding callBinding = new SqlCallBinding( bb.scope.getValidator(), bb.scope, call ); if ( operator instanceof SqlUserDefinedTableMacro ) { final SqlUserDefinedTableMacro udf = (SqlUserDefinedTableMacro) operator; - final TranslatableEntity table = udf.getTable( typeFactory, callBinding.sqlOperands() ); - final LogicalTable catalogTable = Catalog.getInstance().getTable( table.getId() ); - final AlgDataType rowType = table.getRowType( typeFactory ); - AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, null ); - AlgNode converted = toAlg( algOptEntity ); - bb.setRoot( converted, true ); + //final TranslatableEntity table = udf.getTable( typeFactory, callBinding.sqlOperands() ); + //final LogicalTable catalogTable = Catalog.getInstance().getTable( table.getId() ); + //final AlgDataType rowType = table.getRowType( typeFactory ); + //AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, null ); + //AlgNode converted = toAlg( algOptEntity ); + //bb.setRoot( converted, true ); return; } @@ -2865,7 +2864,7 @@ private boolean all( SqlCall call ) { protected AlgNode convertInsert( SqlInsert call ) { - AlgOptEntity targetTable = getTargetTable( call ); + CatalogEntity targetTable = getTargetTable( call ); final AlgDataType targetRowType = validator.getValidatedNodeType( call ); assert targetRowType != null; @@ -2879,18 +2878,17 @@ protected AlgNode convertInsert( SqlInsert call ) { /** * Creates a relational expression to modify a table or modifiable view. */ - private AlgNode createModify( AlgOptEntity targetTable, AlgNode source ) { + private AlgNode createModify( CatalogEntity targetTable, AlgNode source ) { final ModifiableEntity modifiableTable = targetTable.unwrap( ModifiableEntity.class ); if ( modifiableTable != null && modifiableTable == targetTable.unwrap( Entity.class ) ) { return modifiableTable.toModificationAlg( cluster, + source.getTraitSet(), targetTable, - catalogReader, source, Modify.Operation.INSERT, null, - null, - false ); + null ); } return LogicalRelModify.create( targetTable, @@ -2907,8 +2905,8 @@ private ToAlgContext createToRelContext() { } - public AlgNode toAlg( final AlgOptEntity table ) { - final AlgNode scan = table.toAlg( createToRelContext(), cluster.traitSet() ); + public AlgNode toAlg( final CatalogEntity table ) { + final AlgNode scan = table.unwrap( TranslatableEntity.class ).toAlg( createToRelContext(), cluster.traitSet() ); final InitializerExpressionFactory ief = Util.first( @@ -2943,7 +2941,7 @@ public AlgNode toAlg( final AlgOptEntity table ) { } - protected AlgOptEntity getTargetTable( SqlNode call ) { + protected CatalogEntity getTargetTable( SqlNode call ) { final SqlValidatorNamespace targetNs = validator.getSqlNamespace( call ); if ( targetNs.isWrapperFor( SqlValidatorImpl.DmlNamespace.class ) ) { final SqlValidatorImpl.DmlNamespace dmlNamespace = targetNs.unwrap( SqlValidatorImpl.DmlNamespace.class ); @@ -2974,8 +2972,8 @@ protected AlgNode convertColumnList( final SqlInsert call, AlgNode source ) { final List columnExprs = new ArrayList<>(); collectInsertTargets( call, sourceRef, targetColumnNames, columnExprs ); - final AlgOptEntity targetTable = getTargetTable( call ); - final AlgDataType targetRowType = AlgOptEntityImpl.realRowType( targetTable ); + final CatalogEntity targetTable = getTargetTable( call ); + final AlgDataType targetRowType = targetTable.getRowType();//AlgOptEntityImpl.realRowType( targetTable ); final List targetFields = targetRowType.getFieldList(); boolean isDocument = call.getSchemaType() == NamespaceType.DOCUMENT; @@ -3025,12 +3023,12 @@ protected AlgNode convertColumnList( final SqlInsert call, AlgNode source ) { /** * Creates a blackboard for translating the expressions of generated columns in an INSERT statement. */ - private Blackboard createInsertBlackboard( AlgOptEntity targetTable, RexNode sourceRef, List targetColumnNames ) { + private Blackboard createInsertBlackboard( CatalogEntity targetTable, RexNode sourceRef, List targetColumnNames ) { final Map nameToNodeMap = new HashMap<>(); int j = 0; // Assign expressions for non-generated columns. - final List strategies = targetTable.getColumnStrategies(); + final List strategies = targetTable.unwrap( LogicalTable.class ).getColumnStrategies(); final List targetFields = targetTable.getRowType().getFieldNames(); for ( String targetColumnName : targetColumnNames ) { final int i = targetFields.indexOf( targetColumnName ); @@ -3084,7 +3082,7 @@ private RexNode castNullLiteralIfNeeded( RexNode node, AlgDataType type ) { * @param columnExprs List of expressions, to be populated */ protected void collectInsertTargets( SqlInsert call, final RexNode sourceRef, final List targetColumnNames, List columnExprs ) { - final AlgOptEntity targetTable = getTargetTable( call ); + final CatalogEntity targetTable = getTargetTable( call ); final AlgDataType tableRowType = targetTable.getRowType(); SqlNodeList targetColumnList = call.getTargetColumnList(); if ( targetColumnList == null ) { @@ -3123,7 +3121,7 @@ protected void collectInsertTargets( SqlInsert call, final RexNode sourceRef, fi final Blackboard bb = createInsertBlackboard( targetTable, sourceRef, targetColumnNames ); // Next, assign expressions for generated columns. - final List strategies = targetTable.getColumnStrategies(); + final List strategies = targetTable.unwrap( LogicalTable.class ).getColumnStrategies(); for ( String columnName : targetColumnNames ) { final int i = tableRowType.getFieldNames().indexOf( columnName ); final RexNode expr; @@ -3157,7 +3155,7 @@ protected void collectInsertTargets( SqlInsert call, final RexNode sourceRef, fi private AlgNode convertDelete( SqlDelete call ) { - AlgOptEntity targetTable = getTargetTable( call ); + CatalogEntity targetTable = getTargetTable( call ); AlgNode sourceRel = convertSelect( call.getSourceSelect(), false ); return LogicalRelModify.create( targetTable, @@ -3179,7 +3177,7 @@ private AlgNode convertUpdate( SqlUpdate call ) { rexNodeSourceExpressionListBuilder.add( rn ); } - AlgOptEntity targetTable = getTargetTable( call ); + CatalogEntity targetTable = getTargetTable( call ); // convert update column list from SqlIdentifier to String final List targetColumnNameList = new ArrayList<>(); @@ -3204,7 +3202,7 @@ private AlgNode convertUpdate( SqlUpdate call ) { private AlgNode convertMerge( SqlMerge call ) { - AlgOptEntity targetTable = getTargetTable( call ); + CatalogEntity targetTable = getTargetTable( call ); // convert update column list from SqlIdentifier to String final List targetColumnNameList = new ArrayList<>(); From a27a6f7a7ddcbbe0d27dde94f0f6041cb3df0882 Mon Sep 17 00:00:00 2001 From: datomo Date: Mon, 27 Feb 2023 23:26:28 +0100 Subject: [PATCH 026/436] boilerplate for interface of schemaSnapshot --- .../org/polypheny/db/adapter/DataContext.java | 6 +- .../common/LogicalConstraintEnforcer.java | 4 +- .../org/polypheny/db/catalog/Catalog.java | 2 + .../org/polypheny/db/catalog/Snapshot.java | 72 ++++++++++ .../polypheny/db/plan/VisitorDataContext.java | 4 +- .../db/schema/PolySchemaBuilder.java | 10 +- .../java/org/polypheny/db/schema/Schemas.java | 7 +- .../org/polypheny/db/tools/AlgBuilder.java | 4 +- .../polypheny/db/tools/RoutedAlgBuilder.java | 3 +- .../polypheny/db/transaction/Transaction.java | 7 +- .../db/test/RexProgramBuilderBase.java | 4 +- .../db/processing/AbstractQueryProcessor.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 13 +- .../db/processing/DataContextImpl.java | 22 ++- .../db/processing/DataMigratorImpl.java | 20 +-- .../db/routing/routers/BaseRouter.java | 16 +-- .../db/routing/routers/DmlRouterImpl.java | 29 ++-- .../db/transaction/StatementImpl.java | 4 +- .../db/transaction/TransactionImpl.java | 19 +-- .../db/view/MaterializedViewManagerImpl.java | 2 +- .../org/polypheny/db/misc/AlgBuilderTest.java | 2 +- .../statistics/StatisticsManagerImpl.java | 4 +- .../db/cypher/CypherProcessorImpl.java | 2 +- .../cypher2alg/CypherToAlgConverter.java | 2 +- .../db/languages/MqlProcessorImpl.java | 2 +- .../org/polypheny/db/tools/PigAlgBuilder.java | 4 +- .../polypheny/db/test/PigRelBuilderTest.java | 2 +- .../org/polypheny/db/catalog/PolyCatalog.java | 6 +- .../{Snapshot.java => FullSnapshot.java} | 12 +- .../allocation/AllocationSnapshot.java | 23 ---- .../logical/LogicalDocumentSnapshot.java | 94 ------------- .../snapshot/logical/LogicalFullSnapshot.java | 81 ----------- .../logical/LogicalGraphSnapshot.java | 69 ---------- .../logical/LogicalRelationalSnapshot.java | 126 ------------------ .../snapshot/logical/LogicalSnapshot.java | 27 ---- .../physical/DocumentOnlySnapshot.java | 21 --- .../snapshot/physical/GraphOnlySnapshot.java | 21 --- .../snapshot/physical/PhysicalSnapshot.java | 23 ---- .../physical/RelationalOnlySnapshot.java | 21 --- .../java/org/polypheny/db/restapi/Rest.java | 8 +- .../polypheny/db/sql/SqlProcessorImpl.java | 6 +- .../org/polypheny/db/sql/InterpreterTest.java | 3 +- .../org/polypheny/db/sql/RexExecutorTest.java | 3 +- 43 files changed, 186 insertions(+), 626 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/catalog/Snapshot.java rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/{Snapshot.java => FullSnapshot.java} (74%) delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/allocation/AllocationSnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/DocumentOnlySnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/GraphOnlySnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/PhysicalSnapshot.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/RelationalOnlySnapshot.java diff --git a/core/src/main/java/org/polypheny/db/adapter/DataContext.java b/core/src/main/java/org/polypheny/db/adapter/DataContext.java index bb0082faaf..9a0139dc96 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataContext.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataContext.java @@ -32,7 +32,7 @@ import org.apache.calcite.linq4j.tree.ParameterExpression; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.schema.PolyphenyDbSchema; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Advisor; @@ -49,7 +49,7 @@ public interface DataContext { /** * Returns a sub-schema with a given name, or null. */ - PolyphenyDbSchema getRootSchema(); + Snapshot getSnapshot(); /** * Returns the type factory. @@ -209,7 +209,7 @@ public T get( DataContext dataContext ) { class SlimDataContext implements DataContext, Serializable { @Override - public PolyphenyDbSchema getRootSchema() { + public Snapshot getSnapshot() { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index d960924783..1d15062181 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -160,8 +160,8 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { for ( final CatalogForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { builder.clear(); - final LogicalTable scanOptTable = statement.getDataContext().getRootSchema().getTable( foreignKey.tableId ); - final LogicalTable refOptTable = statement.getDataContext().getRootSchema().getTable( foreignKey.referencedKeyTableId ); + final LogicalTable scanOptTable = statement.getDataContext().getSnapshot().getTable( foreignKey.tableId ); + final LogicalTable refOptTable = statement.getDataContext().getSnapshot().getTable( foreignKey.referencedKeyTableId ); final AlgNode scan = LogicalRelScan.create( modify.getCluster(), scanOptTable ); final LogicalRelScan ref = LogicalRelScan.create( modify.getCluster(), refOptTable ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index e5ba93e21b..37fc4ab089 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -1912,4 +1912,6 @@ protected final boolean isValidIdentifier( final String str ) { public abstract void clear(); + public abstract Snapshot getSnapshot( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/Snapshot.java new file mode 100644 index 0000000000..c9afc490df --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/Snapshot.java @@ -0,0 +1,72 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog; + +import java.util.List; +import org.polypheny.db.catalog.entity.CatalogNamespace; +import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.allocation.AllocationCollection; +import org.polypheny.db.catalog.entity.allocation.AllocationGraph; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalGraph; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.logistic.Pattern; + +public interface Snapshot { + + CatalogNamespace getNamespace( long id ); + + CatalogNamespace getNamespace( String name ); + + List getNamespaces( Pattern name ); + + //// ENTITIES + + LogicalTable getLogicalTable( long id ); + + LogicalTable getLogicalTable( long namespaceId, String name ); + + List getLogicalTables( long namespaceId, Pattern name ); + + LogicalCollection getLogicalCollection( long id ); + + LogicalCollection getLogicalCollection( long namespaceId, String name ); + + List getLogicalCollections( long namespaceId, Pattern name ); + + LogicalGraph getLogicalGraph( long id ); + + LogicalGraph getLogicalGraph( long namespaceId, String name ); + + List getLogicalGraphs( long namespaceId, Pattern name ); + + AllocationTable getAllocTable( long id ); + + AllocationCollection getAllocCollection( long id ); + + AllocationGraph getAllocGraph( long id ); + + PhysicalTable getPhysicalTable( long id ); + + PhysicalCollection getPhysicalCollection( long id ); + + PhysicalGraph getPhysicalGraph( long id ); + +} diff --git a/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java b/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java index 4f0555f212..4733aa8471 100644 --- a/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java +++ b/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java @@ -44,6 +44,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.nodes.Function; import org.polypheny.db.nodes.Function.FunctionType; import org.polypheny.db.nodes.Operator; @@ -51,7 +52,6 @@ import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.NlsString; import org.polypheny.db.util.Pair; @@ -72,7 +72,7 @@ public VisitorDataContext( Object[] values ) { @Override - public PolyphenyDbSchema getRootSchema() { + public Snapshot getSnapshot() { throw new UnsupportedOperationException( "This operation is not supported for " + getClass().getSimpleName() ); } diff --git a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java index 8e33f34833..6f2981b78c 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -32,6 +32,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; @@ -55,7 +56,7 @@ public class PolySchemaBuilder implements PropertyChangeListener { private final static PolySchemaBuilder INSTANCE = new PolySchemaBuilder(); - private AbstractPolyphenyDbSchema current; + private Snapshot current; private boolean isOutdated = true; @@ -69,7 +70,7 @@ public static PolySchemaBuilder getInstance() { } - public AbstractPolyphenyDbSchema getCurrent() { + public Snapshot getCurrent() { if ( !RuntimeConfig.SCHEMA_CACHING.getBoolean() ) { return buildSchema(); } @@ -80,7 +81,7 @@ public AbstractPolyphenyDbSchema getCurrent() { } - private synchronized AbstractPolyphenyDbSchema buildSchema() { + private synchronized Snapshot buildSchema() { Catalog catalog = Catalog.getInstance(); CatalogDatabase catalogDatabase = catalog.getDatabase( Catalog.defaultDatabaseId ); @@ -104,7 +105,8 @@ private synchronized AbstractPolyphenyDbSchema buildSchema() { Map, CatalogEntityPlacement> physicalGraph = buildPhysicalGraphs( catalog, catalogDatabase ); isOutdated = false; - return new SimplePolyphenyDbSchema( logicalRelational, logicalDocument, logicalGraph, physicalRelational, physicalDocument, physicalGraph ); + return null; + //return new SimplePolyphenyDbSchema( logicalRelational, logicalDocument, logicalGraph, physicalRelational, physicalDocument, physicalGraph ); } diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index e026ecea1e..1f0d8c2ddb 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -54,6 +54,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.config.PolyphenyDbConnectionConfigImpl; @@ -191,7 +192,7 @@ public static Queryable queryable( DataContext root, Class clazz, Stri * Returns a {@link Queryable}, given a fully-qualified table name as an iterable. */ public static Queryable queryable( DataContext root, Class clazz, Iterable names ) { - PolyphenyDbSchema schema = root.getRootSchema(); + PolyphenyDbSchema schema = root.getSnapshot(); return queryable( root, schema, clazz, names.iterator().next() ); @@ -245,7 +246,7 @@ private static int[] identity( int count ) { * Returns an {@link org.apache.calcite.linq4j.Enumerable} over object arrays, given a fully-qualified table name which leads to a {@link ScannableEntity}. */ public static LogicalTable table( DataContext root, String... names ) { - PolyphenyDbSchema schema = root.getRootSchema(); + PolyphenyDbSchema schema = root.getSnapshot(); return schema.getTable( List.of( names ) ); } @@ -441,7 +442,7 @@ private static class DummyDataContext implements DataContext { @Override - public PolyphenyDbSchema getRootSchema() { + public Snapshot getSnapshot() { return rootSchema; } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index b7314bd217..d5512f0d00 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -290,13 +290,13 @@ public Void apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { public static AlgBuilder create( Statement statement ) { final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getRootSchema() ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getTransaction().getSnapshot() ); return create( statement, cluster ); } public static AlgBuilder create( Statement statement, AlgOptCluster cluster ) { - return new AlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader().getRootSchema() ); + return new AlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getSnapshot() ); } diff --git a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java index 1dde205b4e..635b86aee7 100644 --- a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java @@ -28,7 +28,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.Context; import org.polypheny.db.plan.Contexts; import org.polypheny.db.processing.DeepCopyShuttle; @@ -53,7 +52,7 @@ public RoutedAlgBuilder( Context context, AlgOptCluster cluster, PolyphenyDbSche public static RoutedAlgBuilder create( Statement statement, AlgOptCluster cluster ) { - return new RoutedAlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader().getRootSchema() ); + return new RoutedAlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getSnapshot() ); } diff --git a/core/src/main/java/org/polypheny/db/transaction/Transaction.java b/core/src/main/java/org/polypheny/db/transaction/Transaction.java index 2800ea35cc..ab811f7d0d 100644 --- a/core/src/main/java/org/polypheny/db/transaction/Transaction.java +++ b/core/src/main/java/org/polypheny/db/transaction/Transaction.java @@ -22,14 +22,13 @@ import java.util.concurrent.atomic.AtomicBoolean; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.information.InformationManager; import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.processing.Processor; -import org.polypheny.db.schema.PolyphenyDbSchema; public interface Transaction { @@ -48,14 +47,12 @@ public interface Transaction { List getInvolvedAdapters(); - PolyphenyDbSchema getSchema(); + Snapshot getSnapshot(); boolean isActive(); JavaTypeFactory getTypeFactory(); - PolyphenyDbCatalogReader getCatalogReader(); - Processor getProcessor( QueryLanguage language ); boolean isAnalyze(); diff --git a/core/src/test/java/org/polypheny/db/test/RexProgramBuilderBase.java b/core/src/test/java/org/polypheny/db/test/RexProgramBuilderBase.java index 59e32cab98..90c1e637ba 100644 --- a/core/src/test/java/org/polypheny/db/test/RexProgramBuilderBase.java +++ b/core/src/test/java/org/polypheny/db/test/RexProgramBuilderBase.java @@ -48,6 +48,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptPredicateList; import org.polypheny.db.prepare.JavaTypeFactoryImpl; @@ -59,7 +60,6 @@ import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexSimplify; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyType; @@ -117,7 +117,7 @@ private static class DummyTestDataContext implements DataContext { @Override - public PolyphenyDbSchema getRootSchema() { + public Snapshot getSnapshot() { return null; } diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index d922ac19e1..da93bd68f1 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -1168,7 +1168,7 @@ private PreparedResult implement( AlgRoot root, AlgDataType parameterRowType ) { } try { - CatalogReader.THREAD_LOCAL.set( statement.getTransaction().getCatalogReader() ); + CatalogReader.THREAD_LOCAL.set( statement.getTransaction().getSnapshot() ); final Conformance conformance = statement.getPrepareContext().config().conformance(); final Map internalParameters = new LinkedHashMap<>(); diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 264e6eb07b..fa10de5965 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -18,7 +18,6 @@ import com.google.common.collect.ImmutableSet; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; @@ -37,10 +36,10 @@ import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.JoinAlgType; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.common.ConditionalExecute.Condition; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.exceptions.ConstraintViolationException; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer; @@ -52,9 +51,6 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.ConstraintType; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -66,6 +62,9 @@ import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.RuntimeConfig; @@ -74,8 +73,6 @@ import org.polypheny.db.information.InformationPage; import org.polypheny.db.information.InformationQueryPlan; import org.polypheny.db.languages.OperatorRegistry; -import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexDynamicParam; @@ -333,7 +330,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final RexBuilder rexBuilder = root.getCluster().getRexBuilder(); for ( final CatalogForeignKey foreignKey : foreignKeys ) { - final LogicalTable algOptEntity = statement.getDataContext().getRootSchema().getTable( foreignKey.referencedKeyTableId); + final LogicalTable algOptEntity = statement.getDataContext().getSnapshot().getTable( foreignKey.referencedKeyTableId ); final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), algOptEntity ); RexNode joinCondition = rexBuilder.makeLiteral( true ); builder.push( input ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java index bf665cbfb2..1a28646ba0 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java @@ -30,8 +30,8 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.runtime.Hook; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Holder; @@ -42,7 +42,9 @@ public class DataContextImpl implements DataContext { private final Map map; - private final PolyphenyDbSchema rootSchema; + + @Getter + private final Snapshot snapshot; @Getter private final QueryProvider queryProvider; @Getter @@ -67,10 +69,10 @@ public class DataContextImpl implements DataContext { private boolean isMixedModel = false; - private DataContextImpl( QueryProvider queryProvider, Map parameters, PolyphenyDbSchema rootSchema, JavaTypeFactory typeFactory, Statement statement, Map parameterTypes, List> parameterValues ) { + private DataContextImpl( QueryProvider queryProvider, Map parameters, Snapshot snapshot, JavaTypeFactory typeFactory, Statement statement, Map parameterTypes, List> parameterValues ) { this.queryProvider = queryProvider; this.typeFactory = typeFactory; - this.rootSchema = rootSchema; + this.snapshot = snapshot; this.statement = statement; this.map = getMedaInfo( parameters ); this.parameterTypes = parameterTypes; @@ -79,8 +81,8 @@ private DataContextImpl( QueryProvider queryProvider, Map parame } - public DataContextImpl( QueryProvider queryProvider, Map parameters, PolyphenyDbSchema rootSchema, JavaTypeFactory typeFactory, Statement statement ) { - this( queryProvider, parameters, rootSchema, typeFactory, statement, new HashMap<>(), new LinkedList<>() ); + public DataContextImpl( QueryProvider queryProvider, Map parameters, Snapshot snapshot, JavaTypeFactory typeFactory, Statement statement ) { + this( queryProvider, parameters, snapshot, typeFactory, statement, new HashMap<>(), new LinkedList<>() ); } @@ -174,7 +176,7 @@ public void resetParameterValues() { @Override public DataContext switchContext() { if ( otherParameterValues.containsKey( i ) ) { - return new DataContextImpl( queryProvider, map, rootSchema, typeFactory, statement, parameterTypes, otherParameterValues.get( i++ ) ); + return new DataContextImpl( queryProvider, map, snapshot, typeFactory, statement, parameterTypes, otherParameterValues.get( i++ ) ); } return this; } @@ -198,10 +200,4 @@ public void resetContext() { } - @Override - public PolyphenyDbSchema getRootSchema() { - return rootSchema; - } - - } diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index ad34bb4381..558ff95101 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -321,12 +321,12 @@ public AlgRoot buildDeleteStatement( Statement statement, List columnNames = new LinkedList<>(); @@ -343,7 +343,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); - LogicalCollection collection = statement.getTransaction().getCatalogReader().getRootSchema().getCollection( List.of( t.getNamespaceName(), t.name ) ); + LogicalCollection collection = statement.getTransaction().getSnapshot().getCollection( List.of( t.getNamespaceName(), t.name ) ); AlgNode scan = algBuilder.documentScan( collection ).build(); routeDocument( algBuilder, (AlgNode & DocumentAlg) scan, statement ); return Pair.of( t.name, algBuilder.build() ); @@ -495,10 +495,10 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespa public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement statement ) { CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); - PhysicalTable nodesTable = statement.getDataContext().getRootSchema().getTable( mapping.nodesId ).unwrap( PhysicalTable.class ); - PhysicalTable nodePropertiesTable = statement.getDataContext().getRootSchema().getTable( mapping.nodesPropertyId ).unwrap( PhysicalTable.class ); - PhysicalTable edgesTable = statement.getDataContext().getRootSchema().getTable( mapping.edgesId ).unwrap( PhysicalTable.class ); - PhysicalTable edgePropertiesTable = statement.getDataContext().getRootSchema().getTable( mapping.edgesPropertyId ).unwrap( PhysicalTable.class ); + PhysicalTable nodesTable = statement.getDataContext().getSnapshot().getTable( mapping.nodesId ).unwrap( PhysicalTable.class ); + PhysicalTable nodePropertiesTable = statement.getDataContext().getSnapshot().getTable( mapping.nodesPropertyId ).unwrap( PhysicalTable.class ); + PhysicalTable edgesTable = statement.getDataContext().getSnapshot().getTable( mapping.edgesId ).unwrap( PhysicalTable.class ); + PhysicalTable edgePropertiesTable = statement.getDataContext().getSnapshot().getTable( mapping.edgesPropertyId ).unwrap( PhysicalTable.class ); AlgNode node = buildSubstitutionJoin( alg, nodesTable, nodePropertiesTable ); @@ -520,7 +520,7 @@ protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, ), nodes.name + "_" + nodes.partitionProperty.partitionIds.get( 0 ) ); - return statement.getDataContext().getRootSchema().getTable( qualifiedTableName ); + return statement.getDataContext().getSnapshot().getTable( qualifiedTableName ); } @@ -542,7 +542,7 @@ protected AlgNode buildSubstitutionJoin( AlgNode alg, CatalogEntity nodesTable, protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder, Integer adapterId ) { Catalog catalog = Catalog.getInstance(); - PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); + PolyphenyDbCatalogReader reader = statement.getTransaction().getSnapshot(); if ( alg.entity.namespaceType != NamespaceType.DOCUMENT ) { if ( alg.entity.namespaceType == NamespaceType.GRAPH ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index bfb98ffd42..6be776ddb6 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -33,14 +33,13 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.relational.RelModify; -import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.ModifyCollect; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.common.BatchIterator; import org.polypheny.db.algebra.core.common.ConditionalExecute; import org.polypheny.db.algebra.core.common.ConstraintEnforcer; import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.document.DocumentAlg; import org.polypheny.db.algebra.core.document.DocumentProject; import org.polypheny.db.algebra.core.document.DocumentScan; @@ -48,6 +47,7 @@ import org.polypheny.db.algebra.core.lpg.LpgProject; import org.polypheny.db.algebra.core.lpg.LpgScan; import org.polypheny.db.algebra.core.lpg.LpgValues; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.logical.common.LogicalBatchIterator; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer; @@ -67,9 +67,9 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgTransformer; import org.polypheny.db.algebra.logical.lpg.LogicalLpgValues; import org.polypheny.db.algebra.logical.relational.LogicalFilter; -import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalModifyCollect; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; @@ -78,28 +78,27 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.processing.WhereClauseVisitor; @@ -183,7 +182,7 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { Map newParameterValues = new HashMap<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { - CatalogReader catalogReader = statement.getTransaction().getCatalogReader(); + CatalogReader catalogReader = statement.getTransaction().getSnapshot(); // Get placements on store List placementsOnAdapter = catalog.getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); @@ -713,7 +712,7 @@ public AlgNode handleBatchIterator( AlgNode alg, Statement statement, LogicalQue @Override public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Integer adapterId ) { - PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); + PolyphenyDbCatalogReader reader = statement.getTransaction().getSnapshot(); LogicalCollection collection = alg.entity.unwrap( LogicalCollection.class ); @@ -766,7 +765,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement ) { @Override public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ) { - PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); + PolyphenyDbCatalogReader reader = statement.getTransaction().getSnapshot(); List modifies = new ArrayList<>(); boolean usedSubstitution = false; @@ -792,7 +791,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Logical alg.getCluster(), alg.getTraitSet(), graph, - statement.getTransaction().getCatalogReader(), + statement.getTransaction().getSnapshot(), buildGraphDml( alg.getInput(), statement, adapterId ), alg.operation, alg.ids, @@ -949,7 +948,7 @@ private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, Cata private List attachRelationalDocInsert( LogicalDocumentModify alg, Statement statement, CatalogEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { if ( alg.getInput() instanceof DocumentValues ) { // simple value insert - AlgNode values = ((LogicalDocumentValues) alg.getInput()).getRelationalEquivalent( List.of(), List.of( collectionTable ), statement.getTransaction().getCatalogReader() ).get( 0 ); + AlgNode values = ((LogicalDocumentValues) alg.getInput()).getRelationalEquivalent( List.of(), List.of( collectionTable ), statement.getTransaction().getSnapshot() ).get( 0 ); return List.of( getModify( collectionTable, values, statement, alg.operation, null, null ) ); } @@ -970,7 +969,7 @@ private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Sta case INSERT: if ( alg.getInput() instanceof LpgValues ) { // simple value insert - inputs.addAll( ((LogicalLpgValues) alg.getInput()).getRelationalEquivalent( List.of(), List.of( nodesTable, nodePropertiesTable, edgesTable, edgePropertiesTable ), statement.getTransaction().getCatalogReader() ) ); + inputs.addAll( ((LogicalLpgValues) alg.getInput()).getRelationalEquivalent( List.of(), List.of( nodesTable, nodePropertiesTable, edgesTable, edgePropertiesTable ), statement.getTransaction().getSnapshot() ) ); } if ( alg.getInput() instanceof LpgProject ) { return attachRelationalRelatedInsert( alg, statement, nodesTable, nodePropertiesTable, edgesTable, edgePropertiesTable, adapterId ); @@ -1335,7 +1334,7 @@ private AlgBuilder buildDml( private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, LogicalTable catalogTable, Statement statement ) { LogicalTable fromTable = catalogTable; // Select from other table - if ( statement.getDataContext().getRootSchema().isPartitioned( fromTable.id )) { + if ( statement.getDataContext().getSnapshot().isPartitioned( fromTable.id ) ) { throw new UnsupportedOperationException( "DMLs from other partitioned tables is not supported" ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java index 4ff6444b05..f3e9415b7a 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java @@ -89,7 +89,7 @@ public DataContext getDataContext() { dataContext = new DataContextImpl( new QueryProviderImpl(), map, - transaction.getSchema(), + transaction.getSnapshot(), transaction.getTypeFactory(), this ); } @@ -101,7 +101,7 @@ public DataContext getDataContext() { public ContextImpl getPrepareContext() { if ( prepareContext == null ) { prepareContext = new ContextImpl( - transaction.getSchema(), + transaction.getSnapshot(), getDataContext(), transaction.getDefaultSchema().name, transaction.getDatabase().id, diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java index 4af7f4f851..722bdefb17 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java @@ -39,26 +39,23 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer.EnforcementInformation; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationManager; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.monitoring.core.MonitoringServiceProvider; import org.polypheny.db.monitoring.events.StatementEvent; import org.polypheny.db.prepare.JavaTypeFactoryImpl; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.processing.ConstraintEnforceAttacher; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.processing.DataMigratorImpl; import org.polypheny.db.processing.Processor; import org.polypheny.db.processing.QueryProcessor; -import org.polypheny.db.schema.PolySchemaBuilder; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.view.MaterializedViewManager; @@ -138,8 +135,8 @@ public class TransactionImpl implements Transaction, Comparable { @Override - public PolyphenyDbSchema getSchema() { - return PolySchemaBuilder.getInstance().getCurrent(); + public Snapshot getSnapshot() { + return Catalog.getInstance().getSnapshot( getId() ); } @@ -262,14 +259,6 @@ public boolean isActive() { } - @Override - public PolyphenyDbCatalogReader getCatalogReader() { - return new PolyphenyDbCatalogReader( - getSchema(), - getTypeFactory() ); - } - - @Override public Processor getProcessor( QueryLanguage language ) { // note dl, while caching the processors works in most cases, diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index d08741e2cc..5de4ea676f 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -433,7 +433,7 @@ public void commitTransaction( Transaction transaction ) { private void prepareSourceRel( Statement sourceStatement, AlgCollation algCollation, AlgNode sourceRel ) { AlgOptCluster cluster = AlgOptCluster.create( sourceStatement.getQueryProcessor().getPlanner(), - new RexBuilder( sourceStatement.getTransaction().getTypeFactory() ), null, sourceStatement.getDataContext().getRootSchema() ); + new RexBuilder( sourceStatement.getTransaction().getTypeFactory() ), null, sourceStatement.getDataContext().getSnapshot() ); prepareNode( sourceRel, cluster, algCollation ); } diff --git a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java index 081d8eb4d7..656bd00917 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java @@ -149,7 +149,7 @@ private static void dropTestSchema() throws SQLException { private AlgBuilder createAlgBuilder() { - final PolyphenyDbSchema rootSchema = transaction.getSchema(); + final PolyphenyDbSchema rootSchema = transaction.getSnapshot(); FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) .defaultSchema( rootSchema ) diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 0d2fa7f4f5..d74e1f4974 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -524,10 +524,10 @@ private StatisticQueryResult prepareNode( QueryResult queryResult, NodeType node @Nullable private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { - PolyphenyDbCatalogReader reader = statement.getTransaction().getCatalogReader(); + PolyphenyDbCatalogReader reader = statement.getTransaction().getSnapshot(); AlgBuilder relBuilder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = relBuilder.getRexBuilder(); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getRootSchema() ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getSnapshot() ); AlgNode queryNode; LogicalRelScan tableScan = getLogicalScan( queryResult.getEntity().id, reader, cluster ); diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java index 3d75e9c057..f291ae6526 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java @@ -106,7 +106,7 @@ public AlgRoot translate( Statement statement, Node query, QueryParameters param final AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.createGraph( statement.getQueryProcessor().getPlanner(), rexBuilder, statement.getDataContext().getRootSchema() ); + final AlgOptCluster cluster = AlgOptCluster.createGraph( statement.getQueryProcessor().getPlanner(), rexBuilder, statement.getDataContext().getSnapshot() ); final CypherToAlgConverter cypherToAlgConverter = new CypherToAlgConverter( statement, builder, rexBuilder, cluster ); diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index 71afa08191..c54c8caf19 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -93,7 +93,7 @@ public class CypherToAlgConverter { public CypherToAlgConverter( Statement statement, AlgBuilder builder, RexBuilder rexBuilder, AlgOptCluster cluster ) { - this.catalogReader = statement.getTransaction().getCatalogReader(); + this.catalogReader = statement.getTransaction().getSnapshot(); this.statement = statement; this.algBuilder = builder; this.rexBuilder = rexBuilder; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java index 70ee4ff9cb..2633263c52 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java @@ -147,7 +147,7 @@ public AlgRoot translate( Statement statement, Node mql, QueryParameters paramet final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); final AlgOptCluster cluster = AlgOptCluster.createDocument( statement.getQueryProcessor().getPlanner(), rexBuilder ); - final MqlToAlgConverter mqlToAlgConverter = new MqlToAlgConverter( this, statement.getTransaction().getCatalogReader(), cluster ); + final MqlToAlgConverter mqlToAlgConverter = new MqlToAlgConverter( this, statement.getTransaction().getSnapshot(), cluster ); AlgRoot logicalRoot = mqlToAlgConverter.convert( mql, parameters ); // Decorrelate diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java index ec17dee21d..788ad7bfd7 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java @@ -77,13 +77,13 @@ public static PigAlgBuilder create( FrameworkConfig config ) { public static PigAlgBuilder create( Statement statement, AlgOptCluster cluster ) { - return new PigAlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getCatalogReader().getRootSchema() ); + return new PigAlgBuilder( Contexts.EMPTY_CONTEXT, cluster, statement.getTransaction().getSnapshot() ); } public static PigAlgBuilder create( Statement statement ) { final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, traitSet, rootSchema ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getTransaction().getSnapshot() ); return create( statement, cluster ); } diff --git a/plugins/pig-language/src/test/java/org/polypheny/db/test/PigRelBuilderTest.java b/plugins/pig-language/src/test/java/org/polypheny/db/test/PigRelBuilderTest.java index ab1bb6cbf6..5004755324 100644 --- a/plugins/pig-language/src/test/java/org/polypheny/db/test/PigRelBuilderTest.java +++ b/plugins/pig-language/src/test/java/org/polypheny/db/test/PigRelBuilderTest.java @@ -52,7 +52,7 @@ public class PigRelBuilderTest { public static Frameworks.ConfigBuilder config() { Transaction transaction = null; // TODO MV: FIX - final SchemaPlus rootSchema = transaction.getSchema().plus(); + final SchemaPlus rootSchema = transaction.getSnapshot().plus(); Frameworks.ConfigBuilder configBuilder = Frameworks.newConfigBuilder() .parserConfig( ParserConfig.DEFAULT ) .defaultSchema( rootSchema.getSubNamespace( transaction.getDefaultSchema().name ) ) diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 09d76b9425..31499caeb6 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -34,7 +34,7 @@ import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.snapshot.logical.LogicalFullSnapshot; +import org.polypheny.db.catalog.snapshot.FullSnapshot; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptEntity; @@ -64,7 +64,7 @@ public class PolyCatalog implements Serializable, CatalogReader { public final Map users; private final IdBuilder idBuilder = new IdBuilder(); - private LogicalFullSnapshot logicalFullSnapshot; + private FullSnapshot fullSnapshot; public PolyCatalog() { @@ -83,7 +83,7 @@ public PolyCatalog( private void updateSnapshot() { - this.logicalFullSnapshot = new LogicalFullSnapshot( catalogs ); + this.fullSnapshot = new FullSnapshot( catalogs ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java similarity index 74% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java index 9f57bfdaf4..ac6c2149bb 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java @@ -16,6 +16,16 @@ package org.polypheny.db.catalog.snapshot; -public interface Snapshot { +import java.util.Map; +import org.polypheny.db.catalog.NCatalog; +import org.polypheny.db.catalog.Snapshot; + +public class FullSnapshot implements Snapshot { + + + public FullSnapshot( Map catalogs ) { + + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/allocation/AllocationSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/allocation/AllocationSnapshot.java deleted file mode 100644 index e125fad59e..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/allocation/AllocationSnapshot.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.allocation; - -import org.polypheny.db.catalog.snapshot.Snapshot; - -public class AllocationSnapshot implements Snapshot { - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java deleted file mode 100644 index e89fb7f6b9..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalDocumentSnapshot.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.logical; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.Value; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logical.document.CatalogCollection; -import org.polypheny.db.catalog.logical.document.CatalogDatabase; -import org.polypheny.db.catalog.logical.document.DocumentCatalog; - -@Value -public class LogicalDocumentSnapshot implements LogicalSnapshot { - - ImmutableList catalogs; - public ImmutableList databases; - public ImmutableMap databaseIds; - public ImmutableMap databaseNames; - public ImmutableList collections; - public ImmutableMap collectionIds; - public ImmutableMap collectionNames; - - - public LogicalDocumentSnapshot( final List catalogs ) { - this.catalogs = ImmutableList.copyOf( catalogs.stream().map( DocumentCatalog::copy ).collect( Collectors.toList() ) ); - - this.databases = ImmutableList.copyOf( buildDatabases() ); - this.databaseIds = ImmutableMap.copyOf( buildDatabaseIds() ); - this.databaseNames = ImmutableMap.copyOf( buildDatabaseNames() ); - - this.collections = ImmutableList.copyOf( buildCollections() ); - this.collectionIds = ImmutableMap.copyOf( buildCollectionIds() ); - this.collectionNames = ImmutableMap.copyOf( buildCollectionNames() ); - } - - - private Map buildCollectionNames() { - return this.collections.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); - } - - - private Map buildCollectionIds() { - return this.collections.stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); - } - - - private List buildCollections() { - return this.databases.stream().flatMap( d -> d.collections.values().stream() ).collect( Collectors.toList() ); - } - - /////////////////////////// - ///// Database //////////// - /////////////////////////// - - - private Map buildDatabaseNames() { - return this.databases.stream().collect( Collectors.toMap( d -> d.name, d -> d ) ); - } - - - private Map buildDatabaseIds() { - return this.databases.stream().collect( Collectors.toMap( d -> d.id, d -> d ) ); - } - - - private List buildDatabases() { - return catalogs.stream().map( c -> new CatalogDatabase( c.id, c.name, c.collections ) ).collect( Collectors.toList() ); - } - - - @Override - public NamespaceType getType() { - return NamespaceType.DOCUMENT; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java deleted file mode 100644 index 7a8066762c..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalFullSnapshot.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.logical; - -import com.google.common.collect.ImmutableMap; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.NCatalog; -import org.polypheny.db.catalog.logical.document.DocumentCatalog; -import org.polypheny.db.catalog.logical.graph.GraphCatalog; -import org.polypheny.db.catalog.logical.relational.RelationalCatalog; - -public class LogicalFullSnapshot implements LogicalSnapshot { - - private final ImmutableMap catalogs; - private final LogicalRelationalSnapshot relationalPeek; - private final LogicalGraphSnapshot graphPeek; - private final LogicalDocumentSnapshot documentPeek; - private final ImmutableMap ids; - private final ImmutableMap names; - - - public LogicalFullSnapshot( Map catalogs ) { - this.catalogs = ImmutableMap.copyOf( catalogs ); - - List relational = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.RELATIONAL ).map( NCatalog::asRelational ).collect( Collectors.toList() ); - List graph = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.GRAPH ).map( NCatalog::asGraph ).collect( Collectors.toList() ); - List document = catalogs.values().stream().filter( c -> c.getType() == NamespaceType.DOCUMENT ).map( NCatalog::asDocument ).collect( Collectors.toList() ); - - this.relationalPeek = new LogicalRelationalSnapshot( relational ); - this.graphPeek = new LogicalGraphSnapshot( graph ); - this.documentPeek = new LogicalDocumentSnapshot( document ); - - this.ids = buildIds(); - this.names = buildNames(); - } - - - private ImmutableMap buildIds() { - Map ids = new HashMap<>(); - this.relationalPeek.schemaIds.keySet().forEach( id -> ids.put( id, relationalPeek ) ); - this.graphPeek.graphIds.keySet().forEach( id -> ids.put( id, graphPeek ) ); - this.documentPeek.databaseIds.keySet().forEach( id -> ids.put( id, documentPeek ) ); - - return ImmutableMap.copyOf( ids ); - } - - - private ImmutableMap buildNames() { - Map names = new HashMap<>(); - this.relationalPeek.schemaNames.keySet().forEach( name -> names.put( name, relationalPeek ) ); - this.graphPeek.graphNames.keySet().forEach( name -> names.put( name, graphPeek ) ); - this.documentPeek.databaseNames.keySet().forEach( name -> names.put( name, documentPeek ) ); - - return ImmutableMap.copyOf( names ); - } - - - @Override - public NamespaceType getType() { - return null; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java deleted file mode 100644 index d4724ad4b1..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalGraphSnapshot.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.logical; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.Value; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logical.graph.CatalogGraph; -import org.polypheny.db.catalog.logical.graph.GraphCatalog; - -@Value -public class LogicalGraphSnapshot implements LogicalSnapshot { - - ImmutableList catalogs; - public ImmutableList graphs; - - public ImmutableMap graphIds; - public ImmutableMap graphNames; - - - public LogicalGraphSnapshot( final List catalogs ) { - this.catalogs = ImmutableList.copyOf( catalogs.stream().map( GraphCatalog::copy ).collect( Collectors.toList() ) ); - - this.graphs = ImmutableList.copyOf( buildGraphs() ); - this.graphIds = ImmutableMap.copyOf( buildGraphIds() ); - this.graphNames = ImmutableMap.copyOf( buildGraphNames() ); - - } - - - private List buildGraphs() { - return catalogs.stream().map( c -> new CatalogGraph( c.id, c.name ) ).collect( Collectors.toList() ); - } - - - private Map buildGraphIds() { - return graphs.stream().collect( Collectors.toMap( g -> g.id, g -> g ) ); - } - - - private Map buildGraphNames() { - return graphs.stream().collect( Collectors.toMap( g -> g.name, g -> g ) ); - } - - - @Override - public NamespaceType getType() { - return NamespaceType.GRAPH; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java deleted file mode 100644 index e6f60c8315..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalRelationalSnapshot.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.logical; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.Value; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logical.relational.CatalogColumn; -import org.polypheny.db.catalog.logical.relational.CatalogSchema; -import org.polypheny.db.catalog.logical.relational.CatalogTable; -import org.polypheny.db.catalog.logical.relational.RelationalCatalog; - -@Value -public class LogicalRelationalSnapshot implements LogicalSnapshot { - - ImmutableList catalogs; - - public ImmutableList schemas; - public ImmutableMap schemaIds; - public ImmutableMap schemaNames; - public ImmutableList tables; - public ImmutableMap tableIds; - public ImmutableMap tableNames; - public ImmutableList columns; - public ImmutableMap columnIds; - public ImmutableMap columnNames; - - - public LogicalRelationalSnapshot( List catalogs ) { - this.catalogs = ImmutableList.copyOf( catalogs.stream().map( RelationalCatalog::copy ).collect( Collectors.toList() ) ); - - this.schemas = ImmutableList.copyOf( buildSchemas() ); - this.schemaIds = ImmutableMap.copyOf( buildSchemaIds() ); - this.schemaNames = ImmutableMap.copyOf( buildSchemaNames() ); - - this.tables = ImmutableList.copyOf( buildTables() ); - this.tableIds = ImmutableMap.copyOf( buildTableIds() ); - this.tableNames = ImmutableMap.copyOf( buildTableNames() ); - - this.columns = ImmutableList.copyOf( buildColumns() ); - this.columnIds = ImmutableMap.copyOf( buildColumnIds() ); - this.columnNames = ImmutableMap.copyOf( buildColumnNames() ); - } - - /////////////////////////// - ///// Columns ///////////// - /////////////////////////// - - - private List buildColumns() { - return tables.stream().flatMap( t -> t.columns.values().stream() ).collect( Collectors.toList() ); - } - - - private Map buildColumnIds() { - return columns.stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); - } - - - private Map buildColumnNames() { - return columns.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); - } - - /////////////////////////// - ///// Tables ////////////// - /////////////////////////// - - - private List buildTables() { - return catalogs.stream().flatMap( c -> c.tables.values().stream() ).collect( Collectors.toList() ); - } - - - private Map buildTableIds() { - return tables.stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); - } - - - private Map buildTableNames() { - return tables.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); - } - - /////////////////////////// - ///// Schema ////////////// - /////////////////////////// - - - private List buildSchemas() { - return catalogs.stream().map( c -> new CatalogSchema( c.id, c.name ) ).collect( Collectors.toList() ); - } - - - private Map buildSchemaIds() { - return schemas.stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); - } - - - private Map buildSchemaNames() { - return schemas.stream().collect( Collectors.toMap( c -> c.name, c -> c ) ); - } - - - @Override - public NamespaceType getType() { - return NamespaceType.RELATIONAL; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java deleted file mode 100644 index 259f82bd90..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/logical/LogicalSnapshot.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.logical; - -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.snapshot.Snapshot; - -public interface LogicalSnapshot extends Snapshot { - - NamespaceType getType(); - - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/DocumentOnlySnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/DocumentOnlySnapshot.java deleted file mode 100644 index c69cd59403..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/DocumentOnlySnapshot.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.physical; - -public class DocumentOnlySnapshot implements PhysicalSnapshot { - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/GraphOnlySnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/GraphOnlySnapshot.java deleted file mode 100644 index c784f83336..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/GraphOnlySnapshot.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.physical; - -public class GraphOnlySnapshot implements PhysicalSnapshot { - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/PhysicalSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/PhysicalSnapshot.java deleted file mode 100644 index 3e5f8fe752..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/PhysicalSnapshot.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.physical; - -import org.polypheny.db.catalog.snapshot.Snapshot; - -public interface PhysicalSnapshot extends Snapshot { - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/RelationalOnlySnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/RelationalOnlySnapshot.java deleted file mode 100644 index 99cdceaa61..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/physical/RelationalOnlySnapshot.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot.physical; - -public class RelationalOnlySnapshot implements PhysicalSnapshot { - -} diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 462d93ae22..9f98639989 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -38,9 +38,9 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.core.JoinAlgType; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalValues; @@ -156,7 +156,7 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi JavaTypeFactory typeFactory = transaction.getTypeFactory(); RexBuilder rexBuilder = new RexBuilder( typeFactory ); - PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getCatalogReader(); + PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getSnapshot(); PreparingEntity table = catalogReader.getTable( Arrays.asList( resourcePatchRequest.tables.get( 0 ).getNamespaceName(), resourcePatchRequest.tables.get( 0 ).name ) ); // Table Scans @@ -215,7 +215,7 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, JavaTypeFactory typeFactory = transaction.getTypeFactory(); RexBuilder rexBuilder = new RexBuilder( typeFactory ); - PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getCatalogReader(); + PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getSnapshot(); PreparingEntity table = catalogReader.getTable( Arrays.asList( resourceDeleteRequest.tables.get( 0 ).getNamespaceName(), resourceDeleteRequest.tables.get( 0 ).name ) ); // Table Scans @@ -268,7 +268,7 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final JavaTypeFactory typeFactory = transaction.getTypeFactory(); RexBuilder rexBuilder = new RexBuilder( typeFactory ); - PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getCatalogReader(); + PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getSnapshot(); PreparingEntity table = catalogReader.getTable( Arrays.asList( insertValueRequest.tables.get( 0 ).getNamespaceName(), insertValueRequest.tables.get( 0 ).name ) ); // Values diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index 6b56cf31b6..aece9af301 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -154,7 +154,7 @@ public Pair validate( Transaction transaction, Node parsed, b } final Conformance conformance = parserConfig.conformance(); - final PolyphenyDbCatalogReader catalogReader = transaction.getCatalogReader(); + final PolyphenyDbCatalogReader catalogReader = transaction.getSnapshot(); validator = new PolyphenyDbSqlValidator( SqlStdOperatorTable.instance(), catalogReader, transaction.getTypeFactory(), conformance ); validator.setIdentifierExpansion( true ); @@ -191,14 +191,14 @@ public AlgRoot translate( Statement statement, Node query, QueryParameters param Config sqlToAlgConfig = NodeToAlgConverter.configBuilder().build(); final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getRootSchema() ); + final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getSnapshot() ); final Config config = NodeToAlgConverter.configBuilder() .config( sqlToAlgConfig ) .trimUnusedFields( false ) .convertTableAccess( false ) .build(); - final SqlToAlgConverter sqlToAlgConverter = new SqlToAlgConverter( validator, statement.getTransaction().getCatalogReader(), cluster, StandardConvertletTable.INSTANCE, config ); + final SqlToAlgConverter sqlToAlgConverter = new SqlToAlgConverter( validator, statement.getTransaction().getSnapshot(), cluster, StandardConvertletTable.INSTANCE, config ); AlgRoot logicalRoot = sqlToAlgConverter.convertQuery( query, false, true ); // Decorrelate diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java index b45c48f1a4..29d978bfcc 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java @@ -35,6 +35,7 @@ import org.polypheny.db.adapter.java.ReflectiveSchema; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.interpreter.Interpreter; import org.polypheny.db.languages.Parser.ParserConfig; @@ -78,7 +79,7 @@ private class MyDataContext implements DataContext { @Override - public SchemaPlus getRootSchema() { + public Snapshot getSnapshot() { return rootSchema; } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java index 877070e78b..974cf1381c 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java @@ -42,6 +42,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptSchema; @@ -350,7 +351,7 @@ public TestDataContext( Object[] values ) { @Override - public SchemaPlus getRootSchema() { + public Snapshot getSnapshot() { throw new RuntimeException( "Unsupported" ); } From 63ec4fc0d1fe8ebb24c09b3bbf5aa3dda9f0c604 Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 28 Feb 2023 17:10:29 +0100 Subject: [PATCH 027/436] adjusted creation of relational entities, changed cottontail, file and mongodb adapter --- .../org/polypheny/db/adapter/Adapter.java | 8 +- .../org/polypheny/db/adapter/DataStore.java | 20 +- .../EnumerableTableModifyToStreamerRule.java | 2 +- .../adapter/java/AbstractQueryableEntity.java | 6 +- .../db/adapter/java/ReflectiveSchema.java | 39 ++-- .../algebra/core/document/DocumentValues.java | 6 +- .../db/algebra/core/relational/RelModify.java | 5 +- .../relational/RelationalTransformable.java | 3 +- .../common/LogicalConstraintEnforcer.java | 6 +- .../document/LogicalDocumentModify.java | 4 +- .../logical/document/LogicalDocumentScan.java | 14 +- .../db/algebra/logical/lpg/LogicalGraph.java | 9 +- .../algebra/logical/lpg/LogicalLpgModify.java | 5 +- .../algebra/logical/lpg/LogicalLpgScan.java | 9 +- .../algebra/logical/lpg/LogicalLpgValues.java | 6 +- .../logical/relational/LogicalRelModify.java | 6 +- .../polypheny/db/algebra/type/GraphType.java | 195 ++++++++++++++++++ .../org/polypheny/db/catalog/Catalog.java | 2 +- .../org/polypheny/db/catalog/Snapshot.java | 69 ++++++- .../db/catalog/entity/CatalogEntity.java | 19 +- .../db/catalog/entity/CatalogNamespace.java | 4 +- .../db/catalog/entity/CatalogSchema.java | 2 +- .../entity/allocation/AllocationTable.java | 33 ++- .../{ => logical}/LogicalCollection.java | 7 +- .../catalog/entity/logical/LogicalTable.java | 20 +- .../entity/physical/PhysicalTable.java | 24 ++- .../db/catalog/refactor/Expressible.java | 5 + .../db/catalog/refactor/FilterableEntity.java | 10 + .../db/catalog/refactor/ModifiableEntity.java | 2 +- .../db/catalog/refactor/QueryableEntity.java | 8 +- .../db/catalog/refactor/ScannableEntity.java | 8 + .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../db/languages/LanguageManager.java | 3 +- .../polypheny/db/languages/QueryLanguage.java | 5 +- .../validate/ValidatorCatalogReader.java | 6 +- .../db/nodes/validate/ValidatorTable.java | 7 - .../org/polypheny/db/plan/AlgOptCluster.java | 20 +- .../org/polypheny/db/plan/AlgOptSchema.java | 3 +- .../db/prepare/AlgOptEntityImpl.java | 24 +-- .../org/polypheny/db/prepare/Context.java | 3 +- .../org/polypheny/db/prepare/ContextImpl.java | 7 +- .../org/polypheny/db/prepare/PlannerImpl.java | 8 +- .../db/prepare/PolyphenyDbCatalogReader.java | 50 ++--- .../db/prepare/PolyphenyDbPrepareImpl.java | 21 +- .../org/polypheny/db/prepare/Prepare.java | 17 +- .../db/prepare/QueryableAlgBuilder.java | 10 +- .../db/schema/AbstractPolyphenyDbSchema.java | 3 +- .../polypheny/db/schema/LogicalEntity.java | 18 +- .../polypheny/db/schema/LogicalSchema.java | 5 +- .../org/polypheny/db/schema/Namespace.java | 5 +- .../db/schema/PolySchemaBuilder.java | 4 +- .../db/schema/PolyphenyDbSchema.java | 2 +- .../polypheny/db/schema/QueryableEntity.java | 5 +- .../java/org/polypheny/db/schema/Schemas.java | 101 +++------ .../db/schema/impl/AbstractNamespace.java | 14 +- .../schema/impl/AbstractTableQueryable.java | 21 +- .../db/schema/impl/DelegatingNamespace.java | 6 +- .../org/polypheny/db/tools/AlgBuilder.java | 33 +-- .../polypheny/db/tools/AlgBuilderFactory.java | 3 +- .../polypheny/db/tools/FrameworkConfig.java | 5 +- .../org/polypheny/db/tools/Frameworks.java | 50 ++--- .../polypheny/db/tools/RoutedAlgBuilder.java | 8 +- .../org/polypheny/db/util/BuiltInMethod.java | 2 +- .../main/java/org/polypheny/db/util/Util.java | 21 ++ .../org/polypheny/db/util/ValidatorUtil.java | 6 +- .../org/polypheny/db/catalog/MockCatalog.java | 2 +- .../db/catalog/MockCatalogReader.java | 159 +++----------- .../db/docker/MockCatalogDocker.java | 7 + .../org/polypheny/db/plan/RelOptUtilTest.java | 10 +- .../db/schemas/HrClusteredSchema.java | 24 +-- .../java/org/polypheny/db/test/JdbcTest.java | 11 +- .../db/test/ScannableEntityTest.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 16 +- .../db/partition/FrequencyMapImpl.java | 4 +- .../db/processing/AbstractQueryProcessor.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 4 +- .../db/processing/DataMigratorImpl.java | 25 +-- .../db/routing/routers/BaseRouter.java | 41 ++-- .../db/routing/routers/DmlRouterImpl.java | 27 +-- .../db/view/MaterializedViewManagerImpl.java | 2 +- .../org/polypheny/db/misc/AlgBuilderTest.java | 9 +- .../java/org/polypheny/db/mql/DdlTest.java | 2 +- .../statistics/StatisticsManagerImpl.java | 9 +- .../adapter/cottontail/CottontailEntity.java | 14 +- .../adapter/cottontail/CottontailPlugin.java | 64 +++--- .../adapter/cottontail/CottontailSchema.java | 13 +- .../cottontail/algebra/CottontailProject.java | 2 +- .../cottontail/algebra/CottontailScan.java | 2 +- .../algebra/CottontailTableModify.java | 2 +- .../db/adapter/csv/CsvFilterableTable.java | 10 +- .../db/adapter/csv/CsvProjectScanRule.java | 5 +- .../org/polypheny/db/adapter/csv/CsvScan.java | 10 +- .../db/adapter/csv/CsvScannableTable.java | 7 +- .../polypheny/db/adapter/csv/CsvSchema.java | 35 ++-- .../polypheny/db/adapter/csv/CsvSource.java | 8 +- .../polypheny/db/adapter/csv/CsvTable.java | 14 +- .../db/adapter/csv/CsvTranslatableTable.java | 37 +--- .../cypher2alg/CypherToAlgConverter.java | 13 +- .../db/adapter/druid/DruidEntity.java | 9 +- .../db/adapter/druid/DruidQuery.java | 14 +- .../db/adapter/druid/DruidSchema.java | 10 +- .../org/polypheny/db/test/DruidAdapterIT.java | 2 +- .../polypheny/db/test/DruidAdapterIT2.java | 2 +- .../elasticsearch/ElasticsearchEntity.java | 87 +++++--- .../elasticsearch/ElasticsearchRel.java | 3 +- .../elasticsearch/ElasticsearchScan.java | 5 +- .../elasticsearch/ElasticsearchSchema.java | 11 +- .../ElasticsearchToEnumerableConverter.java | 2 +- .../adapter/elasticsearch/ScrollingTest.java | 2 +- .../polypheny/db/adapter/file/FileAlg.java | 14 +- .../polypheny/db/adapter/file/FilePlugin.java | 16 +- .../db/adapter/file/FileStoreSchema.java | 64 ++---- .../adapter/file/FileTranslatableEntity.java | 116 +++-------- .../org/polypheny/db/adapter/file/Value.java | 6 +- .../db/adapter/file/algebra/FileRules.java | 12 +- .../db/adapter/file/algebra/FileScan.java | 8 +- .../adapter/file/algebra/FileTableModify.java | 18 +- .../algebra/FileToEnumerableConverter.java | 10 +- .../polypheny/db/adapter/file/source/Qfs.java | 10 +- .../db/adapter/file/source/QfsSchema.java | 64 ++---- .../db/adapter/geode/algebra/GeodeAlg.java | 3 +- .../db/adapter/geode/algebra/GeodeEntity.java | 41 ++-- .../db/adapter/geode/algebra/GeodeScan.java | 5 +- .../db/adapter/geode/algebra/GeodeSchema.java | 9 +- .../algebra/GeodeToEnumerableConverter.java | 2 +- .../simple/GeodeSimpleScannableEntity.java | 20 +- .../geode/simple/GeodeSimpleSchema.java | 9 +- .../db/adapter/geode/util/GeodeUtils.java | 6 +- .../db/hsqldb/stores/HsqldbStore.java | 5 +- .../polypheny/db/adapter/html/HtmlSchema.java | 2 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 87 +++----- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 65 ++---- .../jdbc/sources/AbstractJdbcSource.java | 6 +- .../jdbc/stores/AbstractJdbcStore.java | 60 ++---- .../jdbc/rel2sql/AlgToSqlConverterTest.java | 6 +- .../db/adapter/jdbc/rel2sql/PlannerTest.java | 2 +- .../rel2sql/RelToSqlConverterStructsTest.java | 6 +- .../org/polypheny/db/catalog/CatalogImpl.java | 10 +- .../org/polypheny/db/test/CatalogTest.java | 2 +- .../monetdb/sources/MonetdbSource.java | 2 +- .../adapter/monetdb/stores/MonetdbStore.java | 2 +- .../db/adapter/mongodb/MongoAlg.java | 7 +- .../db/adapter/mongodb/MongoEntity.java | 110 +++++----- .../db/adapter/mongodb/MongoPlugin.java | 36 +--- .../db/adapter/mongodb/MongoRules.java | 2 +- .../db/adapter/mongodb/MongoScan.java | 12 +- .../db/adapter/mongodb/MongoSchema.java | 28 +-- .../db/languages/mql/MqlAddPlacement.java | 2 +- .../db/languages/mql/MqlDeletePlacement.java | 2 +- .../polypheny/db/languages/mql/MqlDrop.java | 2 +- .../db/adapter/jdbc/MysqlSourcePlugin.java | 2 +- .../db/adapter/neo4j/Neo4jPlugin.java | 6 +- .../polypheny/db/adapter/neo4j/NeoEntity.java | 2 +- .../polypheny/db/adapter/neo4j/NeoGraph.java | 2 +- .../polypheny/db/adapter/pig/PigSchema.java | 2 +- .../db/test/PigAlgBuilderStyleTest.java | 2 +- .../org/polypheny/db/catalog/PolyCatalog.java | 9 +- .../postgres/source/PostgresqlSource.java | 2 +- .../postgres/store/PostgresqlStore.java | 2 +- .../polypheny/db/sql/SqlLanguagePlugin.java | 7 +- .../polypheny/db/sql/SqlProcessorImpl.java | 5 +- .../polypheny/db/sql/language/SqlUtil.java | 25 +++ .../language/advise/SqlAdvisorValidator.java | 6 +- .../db/sql/language/ddl/SqlCreateTable.java | 2 +- .../db/sql/language/util/SqlTypeUtil.java | 13 +- .../language/validate/AbstractNamespace.java | 6 +- .../validate/DelegatingNamespace.java | 4 +- .../language/validate/DelegatingScope.java | 3 +- .../DelegatingSqlValidatorCatalogReader.java | 4 +- .../validate/DelegatingSqlValidatorTable.java | 6 - .../db/sql/language/validate/EmptyScope.java | 10 +- ...bleNamespace.java => EntityNamespace.java} | 32 +-- .../validate/IdentifierNamespace.java | 29 +-- .../db/sql/language/validate/ListScope.java | 8 +- .../validate/MatchRecognizeScope.java | 2 +- .../sql/language/validate/OrderByScope.java | 4 +- .../validate/PolyphenyDbSqlValidator.java | 5 +- .../language/validate/SchemaNamespace.java | 12 +- .../sql/language/validate/SqlValidator.java | 3 +- .../language/validate/SqlValidatorImpl.java | 120 +++++------ .../validate/SqlValidatorNamespace.java | 7 +- .../language/validate/SqlValidatorScope.java | 2 +- .../language/validate/SqlValidatorUtil.java | 57 +++-- .../language/validate/UnnestNamespace.java | 4 +- .../db/sql/language/validate/WithScope.java | 4 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 40 ++-- .../org/polypheny/db/sql/FrameworksTest.java | 19 +- .../org/polypheny/db/sql/InterpreterTest.java | 2 +- .../org/polypheny/db/sql/PlannerTest.java | 12 +- .../org/polypheny/db/sql/RexExecutorTest.java | 2 +- .../polypheny/db/sql/SortRemoveRuleTest.java | 2 +- .../org/polypheny/db/sql/TestFixture.java | 2 +- .../db/sql/language/SqlToAlgTestBase.java | 3 +- .../validate/LexCaseSensitiveTest.java | 2 +- .../db/sql/util/PlannerImplMock.java | 2 +- .../org/polypheny/db/webui/HttpServer.java | 2 +- .../polypheny/db/webui/crud/LanguageCrud.java | 2 +- 197 files changed, 1584 insertions(+), 1597 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/algebra/type/GraphType.java rename core/src/main/java/org/polypheny/db/catalog/entity/{ => logical}/LogicalCollection.java (94%) rename plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/{TableNamespace.java => EntityNamespace.java} (85%) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 1099f83a8b..7508094237 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -47,10 +47,11 @@ import lombok.experimental.Accessors; import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; @@ -69,7 +70,6 @@ import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; @Getter @@ -322,9 +322,9 @@ public Adapter( int adapterId, String uniqueName, Map settings ) } - public abstract void createNewSchema( SchemaPlus rootSchema, String name, Long id ); + public abstract void createNewSchema( Snapshot snapshot, String name, long id ); - public abstract PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ); + public abstract PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ); public abstract Namespace getCurrentSchema(); diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index 85487379e6..458505aae1 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -28,17 +28,19 @@ import lombok.extern.slf4j.Slf4j; import org.pf4j.ExtensionPoint; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.prepare.Context; import org.polypheny.db.type.PolyType; @@ -65,7 +67,7 @@ public List getSupportedSchemaType() { } - public abstract void createTable( Context context, LogicalTable combinedTable, List partitionIds ); + public abstract PhysicalTable createPhysicalTable( Context context, LogicalTable combinedTable, AllocationTable allocationTable ); public abstract void dropTable( Context context, LogicalTable combinedTable, List partitionIds ); @@ -116,16 +118,16 @@ private void createGraphSubstitution( Context context, LogicalGraph graphDatabas CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( graphDatabase.id ); LogicalTable nodes = Catalog.getInstance().getTable( mapping.nodesId ); - createTable( context, nodes, nodes.partitionProperty.partitionIds ); + createPhysicalTable( context, nodes, null ); LogicalTable nodeProperty = Catalog.getInstance().getTable( mapping.nodesPropertyId ); - createTable( context, nodeProperty, nodeProperty.partitionProperty.partitionIds ); + createPhysicalTable( context, nodeProperty, null ); LogicalTable edges = Catalog.getInstance().getTable( mapping.edgesId ); - createTable( context, edges, edges.partitionProperty.partitionIds ); + createPhysicalTable( context, edges, null ); LogicalTable edgeProperty = Catalog.getInstance().getTable( mapping.edgesPropertyId ); - createTable( context, edgeProperty, edgeProperty.partitionProperty.partitionIds ); + createPhysicalTable( context, edgeProperty, null ); } @@ -171,7 +173,7 @@ private void createCollectionSubstitution( Context prepareContext, LogicalCollec CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); LogicalTable collectionEntity = catalog.getTable( mapping.collectionId ); - createTable( prepareContext, collectionEntity, collectionEntity.partitionProperty.partitionIds ); + createPhysicalTable( prepareContext, collectionEntity, null ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java index e7838c2dca..5f6d72d942 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableTableModifyToStreamerRule.java @@ -42,7 +42,7 @@ public void onMatch( AlgOptRuleCall call ) { LogicalStreamer streamer = LogicalStreamer.create( modify, - AlgFactories.LOGICAL_BUILDER.create( modify.getCluster(), modify.getCluster().getRootSchema() ) ); + AlgFactories.LOGICAL_BUILDER.create( modify.getCluster(), modify.getCluster().getSnapshot() ) ); if ( streamer != null ) { call.transformTo( streamer ); diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java index b5684af75a..dc13bca9c7 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java @@ -36,8 +36,8 @@ import java.lang.reflect.Type; import org.apache.calcite.linq4j.tree.Expression; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.schema.Entity; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.impl.AbstractEntity; @@ -64,8 +64,8 @@ public Type getElementType() { @Override - public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { - return Schemas.tableExpression( schema, elementType, tableName, clazz ); + public Expression getExpression( Snapshot snapshot, String tableName, Class clazz ) { + return Schemas.tableExpression( snapshot, elementType, tableName, clazz ); } } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index 6c501d1ae6..31c8533276 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -34,6 +34,7 @@ package org.polypheny.db.adapter.java; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.Multimap; @@ -56,13 +57,16 @@ import org.polypheny.db.algebra.AlgReferentialConstraint; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; @@ -115,7 +119,7 @@ public Object getTarget() { @Override - public Map getTableMap() { + public Map getTables() { if ( tableMap == null ) { tableMap = createTableMap(); } @@ -181,11 +185,11 @@ private Multimap createFunctionMap() { /** * Returns an expression for the object wrapped by this schema (not the schema itself). */ - Expression getTargetExpression( PolyphenyDbSchema parentSchema, String name ) { + Expression getTargetExpression( Snapshot snapshot, String name ) { return Types.castIfNecessary( target.getClass(), Expressions.call( - Schemas.unwrap( getExpression( parentSchema, name ), ReflectiveSchema.class ), + Schemas.unwrap( getExpression( snapshot, name ), ReflectiveSchema.class ), BuiltInMethod.REFLECTIVE_SCHEMA_GET_TARGET.method ) ); } @@ -242,25 +246,20 @@ private static Enumerable toEnumerable( final Object o ) { /** * Table that is implemented by reading from a Java object. */ - private static class ReflectiveEntity extends AbstractQueryableEntity implements Entity, ScannableEntity { + private static class ReflectiveEntity extends LogicalTable implements ScannableEntity { private final Type elementType; private final Enumerable enumerable; ReflectiveEntity( Type elementType, Enumerable enumerable, Long id, Long partitionId, Long adapterId ) { - super( elementType, id, partitionId, adapterId ); + //super( elementType, id, partitionId, adapterId ); + super( id, "test", null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), false, null ); this.elementType = elementType; this.enumerable = enumerable; } - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return ((JavaTypeFactory) typeFactory).createType( elementType ); - } - - @Override public Statistic getStatistic() { return Statistics.UNKNOWN; @@ -274,21 +273,21 @@ public Enumerable scan( DataContext root ) { return enumerable; } else { //noinspection unchecked - return enumerable.select( new FieldSelector( (Class) elementType ) ); + return enumerable.select( new FieldSelector( (Class) elementType ) ); } } - @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - return new AbstractTableQueryable( dataContext, schema, this, tableName ) { + /*@Override + public Queryable asQueryable( DataContext dataContext, Snapshot snapshot, String tableName ) { + return new AbstractTableQueryable( dataContext, snapshot, this, tableName ) { @Override @SuppressWarnings("unchecked") public Enumerator enumerator() { return (Enumerator) enumerable.enumerator(); } }; - } + }*/ } @@ -360,12 +359,6 @@ public Statistic getStatistic() { } - @Override - public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { - return null; // todo dl - // return Expressions.field( schema.unwrap( ReflectiveSchema.class ).getTargetExpression( schema.getParentSchema(), schema.getName() ), field ); - } - } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java index e488a9b43e..7861ed4bce 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java @@ -30,10 +30,10 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.schema.ModelTrait; @@ -124,13 +124,13 @@ public DocType getDocType() { public LogicalValues getRelationalEquivalent() { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); - AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), traitSet, getCluster().getRootSchema() ); + AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), traitSet, getCluster().getSnapshot() ); return new LogicalValues( cluster, out, ((DocumentType) rowType).asRelational(), relationalize( documentTuples, cluster.getRexBuilder() ) ); } - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, Snapshot snapshot ) { return List.of( getRelationalEquivalent() ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java index d07f29ad05..c081f706eb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java @@ -68,7 +68,7 @@ public abstract class RelModify extends Modify { @Getter private final List updateColumnList; @Getter - private final List sourceExpressionList; + private final List sourceExpressionList; private AlgDataType inputRowType; @Getter private final boolean flattened; @@ -91,7 +91,7 @@ public abstract class RelModify extends Modify { * @param sourceExpressionList List of value expressions to be set (e.g. exp1, exp2); null if not UPDATE * @param flattened Whether set flattens the input row type */ - protected RelModify( AlgOptCluster cluster, AlgTraitSet traitSet, E table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + protected RelModify( AlgOptCluster cluster, AlgTraitSet traitSet, E table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traitSet, table, input ); this.operation = operation; this.updateColumnList = updateColumnList; @@ -108,7 +108,6 @@ protected RelModify( AlgOptCluster cluster, AlgTraitSet traitSet, E table, AlgNo } - public boolean isInsert() { return operation == Operation.INSERT; } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java index b5bf0fc617..ac60f1511e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java @@ -20,6 +20,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; @@ -35,7 +36,7 @@ default CatalogReader getCatalogReader() { } - List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ); + List getRelationalEquivalent( List values, List entities, Snapshot snapshot ); static Modify getModify( CatalogEntity entity, AlgNode alg, Operation operation ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 1d15062181..7446fbfca1 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -160,8 +160,8 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { for ( final CatalogForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { builder.clear(); - final LogicalTable scanOptTable = statement.getDataContext().getSnapshot().getTable( foreignKey.tableId ); - final LogicalTable refOptTable = statement.getDataContext().getSnapshot().getTable( foreignKey.referencedKeyTableId ); + final LogicalTable scanOptTable = statement.getDataContext().getSnapshot().getLogicalTable( foreignKey.tableId ); + final LogicalTable refOptTable = statement.getDataContext().getSnapshot().getLogicalTable( foreignKey.referencedKeyTableId ); final AlgNode scan = LogicalRelScan.create( modify.getCluster(), scanOptTable ); final LogicalRelScan ref = LogicalRelScan.create( modify.getCluster(), refOptTable ); @@ -255,7 +255,7 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s //builder.scan( table.getSchemaName(), table.name ); for ( CatalogConstraint constraint : constraints ) { builder.clear(); - builder.scan( table.getNamespaceName(), table.name );//LogicalTableScan.create( modify.getCluster(), modify.getTable() ); + builder.scan( table.getNamespaceName(), table.name );//LogicalTableScan.create( modify.getCluster(), modify.getLogicalTable() ); // Enforce uniqueness between the already existing values and the new values List keys = constraint.key .getColumnNames() diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java index a239f50c0a..02a5d21ab3 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java @@ -21,8 +21,8 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; @@ -50,7 +50,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, Snapshot snapshot ) { return List.of( RelationalTransformable.getModify( entities.get( 0 ), values.get( 0 ), operation ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java index e9d76cd001..fbda351c46 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java @@ -22,33 +22,33 @@ import org.polypheny.db.algebra.core.document.DocumentScan; import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; +import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.schema.ModelTrait; -public class LogicalDocumentScan extends DocumentScan implements RelationalTransformable { +public class LogicalDocumentScan extends DocumentScan implements RelationalTransformable { /** * Subclass of {@link DocumentScan} not targeted at any particular engine or calling convention. */ - public LogicalDocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, LogicalCollection document ) { + public LogicalDocumentScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity document ) { super( cluster, traitSet.replace( ModelTrait.DOCUMENT ), document ); } - public static AlgNode create( AlgOptCluster cluster, LogicalCollection collection ) { + public static AlgNode create( AlgOptCluster cluster, CatalogEntity collection ) { return new LogicalDocumentScan( cluster, cluster.traitSet(), collection ); } @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, Snapshot snapshot ) { return List.of( AlgOptRule.convert( LogicalRelScan.create( getCluster(), entities.get( 0 ) ), ModelTrait.RELATIONAL ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java index 017726135b..e15734ab50 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java @@ -26,13 +26,12 @@ import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.TranslatableGraph; @@ -55,7 +54,7 @@ public LogicalGraph( long id ) { @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, Snapshot snapshot ) { return null; } @@ -109,8 +108,8 @@ public Set getSubNamespaceNames() { @Override - public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { - return Schemas.subSchemaExpression( parentSchema, name, LogicalGraph.class ); + public Expression getExpression( Snapshot snapshot, String name ) { + return Schemas.subSchemaExpression( snapshot, name, LogicalGraph.class ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java index 8e803d907f..2a104af77c 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java @@ -22,14 +22,13 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.common.Modify; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; @@ -51,7 +50,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { @Override - public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List inputs, List entities, Snapshot snapshot ) { List modifies = new ArrayList<>(); // modify of nodes diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java index c7d4d04c83..2713dcb8a8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java @@ -28,30 +28,31 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; +import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.schema.ModelTrait; -public class LogicalLpgScan extends LpgScan implements RelationalTransformable { +public class LogicalLpgScan extends LpgScan implements RelationalTransformable { /** * Subclass of {@link LpgScan} not targeted at any particular engine or calling convention. */ - public LogicalLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, LogicalGraph graph, AlgDataType rowType ) { + public LogicalLpgScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity graph, AlgDataType rowType ) { super( cluster, traitSet.replace( ModelTrait.GRAPH ), graph ); this.rowType = rowType; } @Override - public List getRelationalEquivalent( List inputs, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List inputs, List entities, Snapshot snapshot ) { assert !entities.isEmpty(); AlgTraitSet out = getTraitSet().replace( ModelTrait.RELATIONAL ); LogicalRelScan nodes = new LogicalRelScan( getCluster(), out, entities.get( 0 ) ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java index 104be93273..846bf34140 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java @@ -36,10 +36,10 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.schema.ModelTrait; import org.polypheny.db.schema.graph.PolyEdge; @@ -114,10 +114,10 @@ public static LogicalLpgValues create( @Override - public List getRelationalEquivalent( List values, List entities, CatalogReader catalogReader ) { + public List getRelationalEquivalent( List values, List entities, Snapshot snapshot ) { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); - AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), out, catalogReader.getRootSchema() ); + AlgOptCluster cluster = AlgOptCluster.create( getCluster().getPlanner(), getCluster().getRexBuilder(), out, snapshot ); LogicalValues nodeValues = new LogicalValues( cluster, out, entities.get( 0 ).getRowType(), getNodeValues( nodes ) ); LogicalValues nodePropertyValues = new LogicalValues( cluster, out, entities.get( 1 ).getRowType(), getNodePropertyValues( nodes ) ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java index 4c70342ceb..c6ebd265e8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java @@ -47,12 +47,12 @@ public final class LogicalRelModify extends RelModify { * * Use {@link #create} unless you know what you're doing. */ - public LogicalRelModify( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public LogicalRelModify( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { super( cluster, traitSet.replace( ModelTrait.RELATIONAL ), table, input, operation, updateColumnList, sourceExpressionList, flattened ); } - public LogicalRelModify( AlgTraitSet traits, CatalogEntity table, AlgNode child, Operation operation, List targets, List sources ) { + public LogicalRelModify( AlgTraitSet traits, CatalogEntity table, AlgNode child, Operation operation, List targets, List sources ) { super( child.getCluster(), traits, table, child, operation, targets, sources, false ); } @@ -60,7 +60,7 @@ public LogicalRelModify( AlgTraitSet traits, CatalogEntity table, AlgNode child, /** * Creates a LogicalModify. */ - public static LogicalRelModify create( CatalogEntity table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + public static LogicalRelModify create( CatalogEntity table, AlgNode input, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { final AlgOptCluster cluster = input.getCluster(); final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ); return new LogicalRelModify( cluster, traitSet, table, input, operation, updateColumnList, sourceExpressionList, flattened ); diff --git a/core/src/main/java/org/polypheny/db/algebra/type/GraphType.java b/core/src/main/java/org/polypheny/db/algebra/type/GraphType.java new file mode 100644 index 0000000000..418cbc75bd --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/type/GraphType.java @@ -0,0 +1,195 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.type; + +import java.io.Serializable; +import java.nio.charset.Charset; +import java.util.List; +import org.polypheny.db.nodes.IntervalQualifier; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.util.Collation; + +public class GraphType implements Serializable, AlgDataType, AlgDataTypeFamily, AlgDataTypeField { + + @Override + public String getKey() { + return null; + } + + + @Override + public AlgDataType getValue() { + return null; + } + + + @Override + public AlgDataType setValue( AlgDataType value ) { + return null; + } + + + @Override + public boolean isStruct() { + return false; + } + + + @Override + public List getFieldList() { + return null; + } + + + @Override + public List getFieldNames() { + return null; + } + + + @Override + public int getFieldCount() { + return 0; + } + + + @Override + public StructKind getStructKind() { + return null; + } + + + @Override + public AlgDataTypeField getField( String fieldName, boolean caseSensitive, boolean elideRecord ) { + return null; + } + + + @Override + public boolean isNullable() { + return false; + } + + + @Override + public AlgDataType getComponentType() { + return null; + } + + + @Override + public Charset getCharset() { + return null; + } + + + @Override + public Collation getCollation() { + return null; + } + + + @Override + public IntervalQualifier getIntervalQualifier() { + return null; + } + + + @Override + public int getPrecision() { + return 0; + } + + + @Override + public int getRawPrecision() { + return 0; + } + + + @Override + public int getScale() { + return 0; + } + + + @Override + public PolyType getPolyType() { + return null; + } + + + @Override + public String getFullTypeString() { + return null; + } + + + @Override + public AlgDataTypeFamily getFamily() { + return null; + } + + + @Override + public AlgDataTypePrecedenceList getPrecedenceList() { + return null; + } + + + @Override + public AlgDataTypeComparability getComparability() { + return null; + } + + + @Override + public boolean isDynamicStruct() { + return false; + } + + + @Override + public String getName() { + return null; + } + + + @Override + public String getPhysicalName() { + return null; + } + + + @Override + public int getIndex() { + return 0; + } + + + @Override + public AlgDataType getType() { + return null; + } + + + @Override + public boolean isDynamicStar() { + return false; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 37fc4ab089..75e5321aaf 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -52,7 +52,7 @@ import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; diff --git a/core/src/main/java/org/polypheny/db/catalog/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/Snapshot.java index c9afc490df..6cfc3a9fdc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/Snapshot.java @@ -17,19 +17,40 @@ package org.polypheny.db.catalog; import java.util.List; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.algebra.constant.FunctionCategory; +import org.polypheny.db.algebra.constant.Syntax; +import org.polypheny.db.algebra.operators.OperatorTable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogNamespace; -import org.polypheny.db.catalog.entity.LogicalCollection; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.nodes.Identifier; +import org.polypheny.db.nodes.Operator; +import org.polypheny.db.util.NameMatcher; +import org.polypheny.db.util.NameMatchers; -public interface Snapshot { +public interface Snapshot extends OperatorTable { + + NameMatcher nameMatcher = NameMatchers.withCaseSensitive( RuntimeConfig.RELATIONAL_CASE_SENSITIVE.getBoolean() ); + + long getId(); + + default Expression getSnapshotExpression( long id ) { + return Expressions.call( Catalog.CATALOG_EXPRESSION, "getSnapshot", Expressions.constant( id ) ); + } + + //// NAMESPACES CatalogNamespace getNamespace( long id ); @@ -39,6 +60,23 @@ public interface Snapshot { //// ENTITIES + + CatalogEntity getEntity( long id ); + + CatalogEntity getEntity( long namespaceId, String name ); + + CatalogEntity getEntity( long namespaceId, Pattern name ); + + //// LOGICAL ENTITIES + @Deprecated + LogicalTable getLogicalTable( List names ); + + @Deprecated + LogicalCollection getLogicalCollection( List names ); + + @Deprecated + LogicalGraph getLogicalGraph( List names ); + LogicalTable getLogicalTable( long id ); LogicalTable getLogicalTable( long namespaceId, String name ); @@ -57,16 +95,43 @@ public interface Snapshot { List getLogicalGraphs( long namespaceId, Pattern name ); + //// ALLOCATION ENTITIES + AllocationTable getAllocTable( long id ); AllocationCollection getAllocCollection( long id ); AllocationGraph getAllocGraph( long id ); + //// PHYSICAL ENTITIES + PhysicalTable getPhysicalTable( long id ); + PhysicalTable getPhysicalTable( long logicalId, long adapterId ); + PhysicalCollection getPhysicalCollection( long id ); + PhysicalCollection getPhysicalCollection( long logicalId, long adapterId ); + + PhysicalGraph getPhysicalGraph( long id ); + PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ); + + //// LOGISTICS + + boolean isPartitioned( long id ); + + //// OTHERS + + @Override + default void lookupOperatorOverloads( Identifier opName, FunctionCategory category, Syntax syntax, List operatorList ) { + + } + + @Override + default List getOperatorList() { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index 8e8aff2a5c..a46da60dc8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -22,11 +22,15 @@ import org.polypheny.db.StatisticsManager; import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.algebra.type.GraphType; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.CatalogType; import org.polypheny.db.catalog.refactor.Expressible; import org.polypheny.db.plan.AlgMultipleTrait; +import org.polypheny.db.schema.Statistic; +import org.polypheny.db.schema.Statistics; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.ImmutableBitSet; @@ -48,7 +52,15 @@ protected CatalogEntity( long id, String name, EntityType type, NamespaceType na public AlgDataType getRowType() { - return null; + switch ( namespaceType ) { + case RELATIONAL: + throw new UnsupportedOperationException( "Should be overwritten by child" ); + case DOCUMENT: + return new DocumentType(); + case GRAPH: + return new GraphType(); + } + throw new RuntimeException( "Error while generating the RowType" ); } @@ -87,4 +99,9 @@ public AlgDistribution getDistribution() { return null; } + + public Statistic getStatistic() { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java index fbd6d35aa6..f82532991f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java @@ -23,11 +23,13 @@ public abstract class CatalogNamespace implements CatalogObject, Serializable { public final long id; public final NamespaceType namespaceType; + public final String name; - public CatalogNamespace( long id, NamespaceType type ) { + public CatalogNamespace( long id, String name, NamespaceType type ) { this.id = id; this.namespaceType = type; + this.name = name; } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java index a243204182..db979c6d4d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java @@ -53,7 +53,7 @@ public CatalogSchema( @NonNull final String ownerName, @NonNull final NamespaceType namespaceType, boolean caseSensitive ) { - super( id, namespaceType ); + super( id, name, namespaceType ); this.id = id; this.name = name; this.databaseId = databaseId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index a7e0eeed77..7fdeeec877 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -18,20 +18,32 @@ import java.io.Serializable; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.PolyImplementation; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.type.PolyType; public class AllocationTable extends CatalogEntity implements Allocation { - public List placements; + public final List placements; + public final long adapterId; - protected AllocationTable( long id, LogicalGraph graph ) { - super( id, graph.name, graph.entityType, graph.namespaceType ); + protected AllocationTable( long id, String name, long adapterId, List placements ) { + super( id, name, EntityType.ENTITY, NamespaceType.RELATIONAL ); + this.adapterId = adapterId; + this.placements = placements; } @@ -46,4 +58,19 @@ public Expression asExpression() { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getAllocTable", Expressions.constant( id ) ); } + + public Map getColumnNames() { + return null; + } + + + public Map getColumns() { + return null; + } + + + public Map getColumnNamesIds() { + return getColumnNames().entrySet().stream().collect( Collectors.toMap( Entry::getValue, Entry::getKey ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java similarity index 94% rename from core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index 56c23f931f..fd9b6bb6d9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import com.google.common.collect.ImmutableList; import java.io.Serializable; @@ -28,7 +28,8 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.Logical; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 9903063ddc..2d364fd1ee 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -28,7 +28,11 @@ import lombok.SneakyThrows; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -231,6 +235,20 @@ public LogicalTable getRenamed( String newName ) { } + @Override + public AlgDataType getRowType() { + final AlgDataTypeFactory.Builder fieldInfo = AlgDataTypeFactory.DEFAULT.builder(); + + for ( Long id : fieldIds ) { + CatalogColumn catalogColumn = Catalog.getInstance().getColumn( id ); + AlgDataType sqlType = catalogColumn.getAlgDataType( AlgDataTypeFactory.DEFAULT ); + fieldInfo.add( catalogColumn.name, null, sqlType ).nullable( catalogColumn.nullable ); + } + + return AlgDataTypeImpl.proto( fieldInfo.build() ).apply( AlgDataTypeFactory.DEFAULT ); + } + + public LogicalTable getConnectedViews( ImmutableList newConnectedViews ) { return new LogicalTable( id, @@ -267,7 +285,7 @@ public LogicalTable getTableWithColumns( ImmutableList newColumnIds ) { @Override public Expression asExpression() { - return Expressions.call( Expressions.call( Catalog.class, "getInstance" ), "getTable", Expressions.constant( id ) ); + return Expressions.call( Expressions.call( Catalog.class, "getInstance" ), "getLogicalTable", Expressions.constant( id ) ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 6aa123690b..bffc845c88 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -19,6 +19,7 @@ import com.google.common.collect.ImmutableList; import java.io.Serializable; import java.util.List; +import java.util.stream.Collectors; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.type.AlgDataType; @@ -38,16 +39,33 @@ public class PhysicalTable extends CatalogEntity implements Physical { public final ImmutableList placements; + public final ImmutableList columnIds; + public final ImmutableList columnNames; + public final String namespaceName; - protected PhysicalTable( long id, String name, EntityType type, NamespaceType namespaceType, List placements ) { + protected PhysicalTable( long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { super( id, name, type, namespaceType ); + this.namespaceName = namespaceName; this.placements = ImmutableList.copyOf( placements ); + this.columnIds = ImmutableList.copyOf( placements.stream().map( p -> p.columnId ).collect( Collectors.toList() ) ); + this.columnNames = ImmutableList.copyOf( columnNames ); } - public PhysicalTable( AllocationTable table ) { - this( table.id, table.name, table.entityType, table.namespaceType, table.placements ); + public PhysicalTable( AllocationTable table, String name, String namespaceName, List columnNames ) { + this( table.id, name, namespaceName, table.entityType, table.namespaceType, table.placements, columnNames ); + } + + + public PhysicalTable( PhysicalTable physicalTable ) { + this( physicalTable.id, physicalTable.name, physicalTable.namespaceName, physicalTable.entityType, physicalTable.namespaceType, physicalTable.placements, physicalTable.columnNames ); + } + + + @Override + public AlgDataType getRowType() { + return buildProto().apply( AlgDataTypeFactory.DEFAULT ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java b/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java index 59644bc8bf..e036044501 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java @@ -17,9 +17,14 @@ package org.polypheny.db.catalog.refactor; import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; public interface Expressible { Expression asExpression(); + default Expression asExpression( Class clazz ) { + return Expressions.typeAs( asExpression(), clazz ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/FilterableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/FilterableEntity.java index d9228549f9..5aeca50f2f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/FilterableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/FilterableEntity.java @@ -16,6 +16,16 @@ package org.polypheny.db.catalog.refactor; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.calcite.linq4j.AbstractEnumerable; +import org.apache.calcite.linq4j.Enumerable; +import org.apache.calcite.linq4j.Enumerator; +import org.polypheny.db.adapter.DataContext; +import org.polypheny.db.rex.RexNode; + public interface FilterableEntity { + Enumerable scan( DataContext dataContext, List filters ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/ModifiableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/ModifiableEntity.java index 39c931555d..08557d105b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/ModifiableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/ModifiableEntity.java @@ -34,7 +34,7 @@ Modify toModificationAlg( AlgNode child, Operation operation, List targets, - List sources ); + List sources ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java index 0ea37177d0..8ec1db2f1b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java @@ -19,15 +19,17 @@ import java.lang.reflect.Type; import org.apache.calcite.linq4j.Queryable; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.schema.PolyphenyDbSchema; +import org.polypheny.db.catalog.Snapshot; public interface QueryableEntity { /** * Converts this table into a {@link Queryable}. */ - Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, long entityId ); + Queryable asQueryable( DataContext dataContext, Snapshot snapshot, long entityId ); - Type getElementType(); + default Type getElementType() { + return Object[].class; + } } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/ScannableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/ScannableEntity.java index 4793a08261..8b3abfacb1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/ScannableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/ScannableEntity.java @@ -16,6 +16,14 @@ package org.polypheny.db.catalog.refactor; +import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.calcite.linq4j.AbstractEnumerable; +import org.apache.calcite.linq4j.Enumerable; +import org.apache.calcite.linq4j.Enumerator; +import org.polypheny.db.adapter.DataContext; + public interface ScannableEntity { + Enumerable scan( DataContext dataContext ); + } diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 67d8240142..80a2136a25 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -31,7 +31,7 @@ import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; diff --git a/core/src/main/java/org/polypheny/db/languages/LanguageManager.java b/core/src/main/java/org/polypheny/db/languages/LanguageManager.java index d33f8e43af..e2902f3472 100644 --- a/core/src/main/java/org/polypheny/db/languages/LanguageManager.java +++ b/core/src/main/java/org/polypheny/db/languages/LanguageManager.java @@ -23,6 +23,7 @@ import java.util.function.BiFunction; import java.util.function.Supplier; import lombok.Getter; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.prepare.Context; @@ -59,7 +60,7 @@ public void removeObserver( PropertyChangeListener listener ) { } - public void addQueryLanguage( NamespaceType namespaceType, String serializedName, List otherNames, ParserFactory factory, Supplier processorSupplier, BiFunction validatorSupplier ) { + public void addQueryLanguage( NamespaceType namespaceType, String serializedName, List otherNames, ParserFactory factory, Supplier processorSupplier, BiFunction validatorSupplier ) { QueryLanguage language = new QueryLanguage( namespaceType, serializedName, otherNames, factory, processorSupplier, validatorSupplier ); REGISTER.add( language ); listeners.firePropertyChange( "language", null, language ); diff --git a/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java b/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java index b0351f489c..774ef04f88 100644 --- a/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java +++ b/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java @@ -26,6 +26,7 @@ import java.util.function.BiFunction; import java.util.function.Supplier; import lombok.Getter; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.prepare.Context; @@ -43,12 +44,12 @@ public class QueryLanguage { @Getter private final Supplier processorSupplier; @Getter - private final BiFunction validatorSupplier; + private final BiFunction validatorSupplier; @Getter private final List otherNames; - public QueryLanguage( NamespaceType namespaceType, String serializedName, List otherNames, ParserFactory factory, Supplier processorSupplier, BiFunction validatorSupplier ) { + public QueryLanguage( NamespaceType namespaceType, String serializedName, List otherNames, ParserFactory factory, Supplier processorSupplier, BiFunction validatorSupplier ) { this.namespaceType = namespaceType; this.serializedName = serializedName; this.factory = factory; diff --git a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java index b7cce1e5b6..4938c1515e 100644 --- a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java @@ -19,6 +19,8 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Wrapper; @@ -45,7 +47,7 @@ public interface ValidatorCatalogReader extends Wrapper { * @param names Name of table, may be qualified or fully-qualified * @return Table with the given name, or null */ - ValidatorTable getTable( List names ); + LogicalTable getTable( List names ); /** * Finds a user-defined type with the given name, possibly qualified. @@ -72,6 +74,6 @@ public interface ValidatorCatalogReader extends Wrapper { /** * Returns the root namespace for name resolution. */ - PolyphenyDbSchema getRootSchema(); + Snapshot getSnapshot(); } diff --git a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorTable.java b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorTable.java index 35e2486ca9..54f274f8a5 100644 --- a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorTable.java +++ b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorTable.java @@ -42,12 +42,5 @@ public interface ValidatorTable extends Wrapper { */ Monotonicity getMonotonicity( String columnName ); - /** - * Returns the access type of the table - */ - AccessType getAllowedAccess(); - - boolean supportsModality( Modality modality ); - } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java index 9937be6974..0f38e8d662 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java @@ -34,6 +34,7 @@ package org.polypheny.db.plan; +import com.fasterxml.jackson.databind.type.TypeFactory; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -47,6 +48,7 @@ import org.polypheny.db.algebra.metadata.MetadataFactory; import org.polypheny.db.algebra.metadata.MetadataFactoryImpl; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModelTrait; @@ -71,7 +73,7 @@ public class AlgOptCluster { private AlgMetadataQuery mq; @Getter - private final PolyphenyDbSchema rootSchema; + private final Snapshot snapshot; /** @@ -79,7 +81,7 @@ public class AlgOptCluster { * * For use only from {@link #create} and {@link AlgOptQuery}. */ - private AlgOptCluster( AlgOptPlanner planner, AlgDataTypeFactory typeFactory, RexBuilder rexBuilder, AlgTraitSet traitSet, PolyphenyDbSchema rootSchema ) { + private AlgOptCluster( AlgOptPlanner planner, AlgDataTypeFactory typeFactory, RexBuilder rexBuilder, AlgTraitSet traitSet, Snapshot snapshot ) { this.nextCorrel = new AtomicInteger( 0 ); this.mapCorrelToAlg = new HashMap<>(); this.planner = Objects.requireNonNull( planner ); @@ -91,29 +93,29 @@ private AlgOptCluster( AlgOptPlanner planner, AlgDataTypeFactory typeFactory, Re setMetadataProvider( DefaultAlgMetadataProvider.INSTANCE ); this.emptyTraitSet = traitSet == null ? AlgTraitSet.createEmpty() : traitSet; assert emptyTraitSet.size() == planner.getAlgTraitDefs().size(); - this.rootSchema = rootSchema; + this.snapshot = snapshot; } /** * Creates a cluster. */ - public static AlgOptCluster create( AlgOptPlanner planner, RexBuilder rexBuilder, AlgTraitSet traitSet, PolyphenyDbSchema rootSchema ) { - return AlgOptCluster.create( planner, rexBuilder, planner.emptyTraitSet(), rootSchema ); + public static AlgOptCluster create( AlgOptPlanner planner, RexBuilder rexBuilder, AlgTraitSet traitSet, Snapshot snapshot ) { + return new AlgOptCluster( planner, AlgDataTypeFactory.DEFAULT, rexBuilder, traitSet, snapshot ); } - public static AlgOptCluster createDocument( AlgOptPlanner planner, RexBuilder rexBuilder, PolyphenyDbSchema rootSchema ) { + public static AlgOptCluster createDocument( AlgOptPlanner planner, RexBuilder rexBuilder, Snapshot snapshot ) { AlgTraitSet traitSet = planner.emptyTraitSet().replace( ModelTrait.DOCUMENT ); - return AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); + return AlgOptCluster.create( planner, rexBuilder, traitSet, snapshot ); } - public static AlgOptCluster createGraph( AlgOptPlanner planner, RexBuilder rexBuilder, PolyphenyDbSchema rootSchema ) { + public static AlgOptCluster createGraph( AlgOptPlanner planner, RexBuilder rexBuilder, Snapshot snapshot ) { AlgTraitSet traitSet = planner.emptyTraitSet().replace( ModelTrait.GRAPH ); - return AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); + return AlgOptCluster.create( planner, rexBuilder, traitSet, snapshot ); } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java b/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java index 68ec723cd7..31ced7dbd2 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptSchema.java @@ -35,6 +35,7 @@ import java.util.List; +import org.polypheny.db.catalog.entity.logical.LogicalTable; /** @@ -51,6 +52,6 @@ public interface AlgOptSchema { * * @param names Qualified name */ - AlgOptEntity getTableForMember( List names ); + LogicalTable getTableForMember( List names ); } diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index bf2cd7fa1a..b45467d50e 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -49,7 +49,6 @@ import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgReferentialConstraint; -import org.polypheny.db.algebra.constant.Modality; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -75,10 +74,8 @@ import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.Wrapper; -import org.polypheny.db.util.AccessType; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.InitializerExpressionFactory; import org.polypheny.db.util.NullInitializerExpressionFactory; @@ -175,7 +172,7 @@ public T unwrap( Class clazz ) { } } if ( clazz == PolyphenyDbSchema.class ) { - return clazz.cast( Schemas.subSchema( ((PolyphenyDbCatalogReader) schema).rootSchema, List.of( catalogEntity.unwrap( LogicalTable.class ).getNamespaceName(), catalogEntity.name ) ) ); + return clazz.cast( Schemas.subSchema( ((PolyphenyDbCatalogReader) schema).snapshot, List.of( catalogEntity.unwrap( LogicalTable.class ).getNamespaceName(), catalogEntity.name ) ) ); } return null; } @@ -192,7 +189,7 @@ public Expression getExpression( Class clazz ) { } else if ( catalogEntity != null ) { return Expressions.call( Expressions.call( Catalog.class, "getInstance" ), - "getTable", + "getLogicalTable", Expressions.constant( catalogEntity.id ) ); } @@ -344,17 +341,6 @@ public AlgDataType getRowType() { } - @Override - public boolean supportsModality( Modality modality ) { - switch ( modality ) { - case STREAM: - return entity instanceof StreamableEntity; - default: - return !(entity instanceof StreamableEntity); - } - } - - @Override public List getQualifiedName() { return List.of( catalogEntity.unwrap( LogicalTable.class ).getNamespaceName(), catalogEntity.name ); @@ -374,12 +360,6 @@ public Monotonicity getMonotonicity( String columnName ) { } - @Override - public AccessType getAllowedAccess() { - return AccessType.ALL; - } - - /** * Helper for {@link #getColumnStrategies()}. */ diff --git a/core/src/main/java/org/polypheny/db/prepare/Context.java b/core/src/main/java/org/polypheny/db/prepare/Context.java index 8a06985a40..25929df707 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Context.java +++ b/core/src/main/java/org/polypheny/db/prepare/Context.java @@ -20,6 +20,7 @@ import java.util.List; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; @@ -35,7 +36,7 @@ public interface Context { /** * Returns the root schema */ - PolyphenyDbSchema getRootSchema(); + Snapshot getSnapshot(); String getDefaultSchemaName(); diff --git a/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java b/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java index c90d756fcc..a76d45b745 100644 --- a/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java @@ -23,6 +23,7 @@ import lombok.Getter; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.config.PolyphenyDbConnectionConfigImpl; import org.polypheny.db.schema.PolyphenyDbSchema; @@ -32,7 +33,7 @@ public class ContextImpl implements Context { @Getter - private final PolyphenyDbSchema rootSchema; + private final Snapshot snapshot; @Getter private final JavaTypeFactory typeFactory; @Getter @@ -47,8 +48,8 @@ public class ContextImpl implements Context { private final int currentUserId; - public ContextImpl( PolyphenyDbSchema rootSchema, DataContext dataContext, String defaultSchemaName, long databaseId, int currentUserId, Statement statement ) { - this.rootSchema = rootSchema; + public ContextImpl( Snapshot snapshot, DataContext dataContext, String defaultSchemaName, long databaseId, int currentUserId, Statement statement ) { + this.snapshot = snapshot; this.typeFactory = dataContext.getTypeFactory(); this.dataContext = dataContext; this.defaultSchemaName = defaultSchemaName; diff --git a/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java b/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java index 9a3781e771..ec4b42896d 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java @@ -40,6 +40,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.metadata.CachingAlgMetadataProvider; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptPlanner; @@ -53,7 +54,6 @@ import org.polypheny.db.tools.Planner; import org.polypheny.db.tools.Program; import org.polypheny.db.tools.ValidationException; -import org.polypheny.db.util.Util; /** @@ -68,7 +68,7 @@ public class PlannerImpl implements Planner { /** * Holds the trait definitions to be registered with planner. May be null. */ - private final ImmutableList traitDefs; + private final ImmutableList> traitDefs; private State state; @@ -76,7 +76,7 @@ public class PlannerImpl implements Planner { private boolean open; // set in STATE_2_READY - private PolyphenyDbSchema defaultSchema; + private Snapshot snapshot; private JavaTypeFactory typeFactory; private AlgOptPlanner planner; private RexExecutor executor; @@ -89,7 +89,7 @@ public class PlannerImpl implements Planner { */ public PlannerImpl( FrameworkConfig config ) { this.config = config; - this.defaultSchema = config.getDefaultSchema(); + this.snapshot = config.getSnapshot(); this.programs = config.getPrograms(); this.state = State.STATE_0_CLOSED; this.traitDefs = config.getTraitDefs(); diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index e78e4d37e5..8a17c738b5 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -35,22 +35,20 @@ import java.util.ArrayList; import java.util.List; -import java.util.Objects; +import java.util.stream.Collectors; +import lombok.NonNull; import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.MonikerType; import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.entity.LogicalCollection; -import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.prepare.Prepare.PreparingEntity; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.Moniker; import org.polypheny.db.util.MonikerImpl; @@ -63,45 +61,37 @@ */ public class PolyphenyDbCatalogReader implements Prepare.CatalogReader { - protected final PolyphenyDbSchema rootSchema; + protected final Snapshot snapshot; protected final AlgDataTypeFactory typeFactory; - public PolyphenyDbCatalogReader( PolyphenyDbSchema rootSchema, AlgDataTypeFactory typeFactory ) { - this.rootSchema = Objects.requireNonNull( rootSchema ); + public PolyphenyDbCatalogReader( @NonNull Snapshot snapshot, AlgDataTypeFactory typeFactory ) { + this.snapshot = snapshot; this.typeFactory = typeFactory; } @Override - public PreparingEntity getTable( final List names ) { - // First look in the default schema, if any. If not found, look in the root schema. - CatalogEntity entity = rootSchema.getTable( names ); - return AlgOptEntityImpl.create( this, entity.getRowType(), entity, null, null ); - + public LogicalTable getTable( final List names ) { + return snapshot.getLogicalTable( names ); } @Override - public AlgOptEntity getCollection( final List names ) { - // First look in the default schema, if any. If not found, look in the root schema. - LogicalCollection collection = rootSchema.getCollection( names ); - if ( collection != null ) { - return AlgOptEntityImpl.create( this, collection.getRowType(), collection, null, null ); - } - return null; + public LogicalCollection getCollection( final List names ) { + return snapshot.getLogicalCollection( names ); } @Override public LogicalGraph getGraph( final String name ) { - return rootSchema.getGraph( List.of( name ) ); + return snapshot.getLogicalGraph( List.of( name ) ); } @Override public AlgDataType getNamedType( Identifier typeName ) { - LogicalTable table = rootSchema.getTable( typeName.getNames() ); + LogicalTable table = snapshot.getLogicalTable( typeName.getNames() ); if ( table != null ) { return table.getRowType(); } else { @@ -112,18 +102,16 @@ public AlgDataType getNamedType( Identifier typeName ) { @Override public List getAllSchemaObjectNames( List names ) { - final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, names, Wrapper.nameMatcher ); final List result = new ArrayList<>(); - - for ( String subSchema : rootSchema.getNamespaceNames() ) { - result.add( moniker( schema, subSchema, MonikerType.SCHEMA ) ); + for ( String subSchema : snapshot.getNamespaces( null ).stream().map( n -> n.name ).collect( Collectors.toList() ) ) { + result.add( moniker( subSchema, MonikerType.SCHEMA ) ); } return result; } - private Moniker moniker( PolyphenyDbSchema schema, String name, MonikerType type ) { + private Moniker moniker( String name, MonikerType type ) { /*final List path = schema.path( name ); if ( path.size() == 1 && !schema.root().getName().equals( "" ) && type == MonikerType.SCHEMA ) { type = MonikerType.CATALOG; @@ -133,7 +121,7 @@ private Moniker moniker( PolyphenyDbSchema schema, String name, MonikerType type @Override - public PreparingEntity getTableForMember( List names ) { + public LogicalTable getTableForMember( List names ) { return getTable( names ); } @@ -157,8 +145,8 @@ public List getOperatorList() { @Override - public PolyphenyDbSchema getRootSchema() { - return rootSchema; + public Snapshot getSnapshot() { + return snapshot; } diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java index 5d256821d2..34ce4e5ca6 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java @@ -102,6 +102,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.interpreter.BindableConvention; import org.polypheny.db.interpreter.Bindables; @@ -263,8 +264,8 @@ public void executeDdl( Context context, Node node ) { /** * Factory method for cluster. */ - protected AlgOptCluster createCluster( AlgOptPlanner planner, RexBuilder rexBuilder, AlgTraitSet traitSet, PolyphenyDbSchema rootSchema ) { - return AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); + protected AlgOptCluster createCluster( AlgOptPlanner planner, RexBuilder rexBuilder, AlgTraitSet traitSet, Snapshot snapshot ) { + return AlgOptCluster.create( planner, rexBuilder, traitSet, snapshot ); } /** @@ -456,8 +457,8 @@ public R perform( PrepareAction action ) { final JavaTypeFactory typeFactory = prepareContext.getTypeFactory(); final RexBuilder rexBuilder = new RexBuilder( typeFactory ); final AlgOptPlanner planner = createPlanner( prepareContext, action.getConfig().getContext(), action.getConfig().getCostFactory() ); - final AlgOptCluster cluster = createCluster( planner, rexBuilder, null, prepareContext.getRootSchema() ); - return action.apply( cluster, prepareContext.getRootSchema() ); + final AlgOptCluster cluster = createCluster( planner, rexBuilder, null, prepareContext.getDataContext().getSnapshot() ); + return action.apply( cluster, prepareContext.getDataContext().getSnapshot() ); } @@ -481,14 +482,14 @@ public static class PolyphenyDbPreparingStmt extends Prepare { PolyphenyDbPreparingStmt( PolyphenyDbPrepareImpl prepare, Context context, - CatalogReader catalogReader, + Snapshot snapshot, AlgDataTypeFactory typeFactory, PolyphenyDbSchema schema, Prefer prefer, AlgOptPlanner planner, Convention resultConvention, RexConvertletTable convertletTable ) { - super( context, catalogReader, resultConvention ); + super( context, snapshot, resultConvention ); this.prepare = prepare; this.schema = schema; this.prefer = prefer; @@ -516,15 +517,15 @@ protected AlgNode decorrelate( NodeToAlgConverter sqlToRelConverter, Node query, } - protected Validator createSqlValidator( CatalogReader catalogReader ) { - return QueryLanguage.from( "sql" ).getValidatorSupplier().apply( context, (PolyphenyDbCatalogReader) catalogReader ); + protected Validator createSqlValidator( Snapshot snapshot ) { + return QueryLanguage.from( "sql" ).getValidatorSupplier().apply( context, snapshot ); } @Override protected Validator getSqlValidator() { if ( sqlValidator == null ) { - sqlValidator = createSqlValidator( catalogReader ); + sqlValidator = createSqlValidator( snapshot ); } return sqlValidator; } @@ -563,7 +564,7 @@ protected PreparedResult implement( AlgRoot root ) { } try { - CatalogReader.THREAD_LOCAL.set( catalogReader ); + CatalogReader.THREAD_LOCAL.set( snapshot ); final Conformance conformance = context.config().conformance(); internalParameters.put( "_conformance", conformance ); Pair, String> implementationPair = EnumerableInterpretable.toBindable( diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index 33c1cebf3b..71e3d7af4e 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -54,7 +54,10 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.nodes.Node; @@ -93,7 +96,7 @@ public abstract class Prepare { protected static final Logger LOGGER = PolyphenyDbTrace.getStatementTracer(); protected final Context context; - protected final CatalogReader catalogReader; + protected final Snapshot snapshot; /** * Convention via which results should be returned by execution. */ @@ -114,10 +117,10 @@ public abstract class Prepare { public static final TryThreadLocal THREAD_EXPAND = TryThreadLocal.of( false ); - public Prepare( Context context, CatalogReader catalogReader, Convention resultConvention ) { + public Prepare( Context context, Snapshot snapshot, Convention resultConvention ) { assert context != null; this.context = context; - this.catalogReader = catalogReader; + this.snapshot = snapshot; this.resultConvention = resultConvention; } @@ -241,16 +244,16 @@ protected LogicalRelModify.Operation mapTableModOp( boolean isDml, Kind Kind ) { public interface CatalogReader extends AlgOptSchema, ValidatorCatalogReader, OperatorTable { @Override - PreparingEntity getTableForMember( List names ); + LogicalTable getTableForMember( List names ); @Override - PreparingEntity getTable( List names ); + LogicalTable getTable( List names ); - AlgOptEntity getCollection( List names ); + LogicalCollection getCollection( List names ); LogicalGraph getGraph( String name ); - ThreadLocal THREAD_LOCAL = new ThreadLocal<>(); + ThreadLocal THREAD_LOCAL = new ThreadLocal<>(); } diff --git a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java index f1c712042d..9d75ade3c5 100644 --- a/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/prepare/QueryableAlgBuilder.java @@ -64,9 +64,9 @@ import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.QueryableEntity; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; @@ -103,11 +103,11 @@ AlgNode toAlg( Queryable queryable ) { return alg; } if ( queryable instanceof AbstractTableQueryable ) { - final AbstractTableQueryable tableQueryable = (AbstractTableQueryable) queryable; - final QueryableEntity table = tableQueryable.table; + final AbstractTableQueryable tableQueryable = (AbstractTableQueryable) queryable; + final QueryableEntity table = tableQueryable.table.unwrap( QueryableEntity.class ); if ( table instanceof TranslatableEntity ) { - return ((TranslatableEntity) table).toAlg( translator.toAlgContext(), null, translator.cluster.traitSet() ); + return ((TranslatableEntity) table).toAlg( translator.toAlgContext(), translator.cluster.traitSet() ); } else { return LogicalRelScan.create( translator.cluster, null ); } diff --git a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java index e70b3f5261..b075112221 100644 --- a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java @@ -37,6 +37,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogEntityPlacement; import org.polypheny.db.util.Pair; @@ -80,7 +81,7 @@ public AbstractPolyphenyDbSchema( /** * Creates a root schema. */ - public static PolyphenyDbSchema createRootSchema() { + public static Snapshot createSnapshot() { return PolySchemaBuilder.getInstance().getCurrent(); } diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java index d66f7a61d2..1bbf515605 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java @@ -22,15 +22,12 @@ import java.util.TreeSet; import lombok.Getter; import org.apache.calcite.linq4j.Enumerable; -import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; @@ -39,14 +36,12 @@ import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; - +@Deprecated public class LogicalEntity extends CatalogEntity implements TranslatableEntity, ScannableEntity, ModifiableEntity { private AlgProtoDataType protoRowType; @@ -92,14 +87,14 @@ public String toString() { @Override - public Modify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, CatalogEntity entity, AlgNode child, Operation operation, List targets, List sources ) { + public Modify toModificationAlg( AlgOptCluster cluster, AlgTraitSet traits, CatalogEntity entity, AlgNode child, Operation operation, List targets, List sources ) { return new LogicalRelModify( cluster.traitSetOf( Convention.NONE ), entity, child, operation, targets, - sources); + sources ); } @@ -121,10 +116,15 @@ public Expression asExpression() { } - @Override public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { return null; } + + @Override + public Enumerable scan( DataContext dataContext ) { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java index e1ea954eed..1217a954db 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java @@ -23,6 +23,7 @@ import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Namespace.Schema; @@ -96,8 +97,8 @@ public Set getSubNamespaceNames() { @Override - public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { - return Schemas.subSchemaExpression( parentSchema, name, LogicalSchema.class ); + public Expression getExpression( Snapshot snapshot, String name ) { + return Schemas.subSchemaExpression( snapshot, name, LogicalSchema.class ); } diff --git a/core/src/main/java/org/polypheny/db/schema/Namespace.java b/core/src/main/java/org/polypheny/db/schema/Namespace.java index fcf452c8f4..a540c2d6ba 100644 --- a/core/src/main/java/org/polypheny/db/schema/Namespace.java +++ b/core/src/main/java/org/polypheny/db/schema/Namespace.java @@ -38,6 +38,7 @@ import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; @@ -130,11 +131,11 @@ public interface Namespace { /** * Returns the expression by which this schema can be referenced in generated code. * - * @param parentSchema Parent schema + * @param snapshot Parent schema * @param name Name of this schema * @return Expression by which this schema can be referenced in generated code */ - Expression getExpression( PolyphenyDbSchema parentSchema, String name ); + Expression getExpression( Snapshot snapshot, String name ); /** * Returns whether the user is allowed to create new tables, functions and sub-schemas in this schema, in addition to diff --git a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java index 6f2981b78c..2bc4e1b7fa 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -43,7 +43,7 @@ import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -235,7 +235,7 @@ private Map, CatalogEntityPlacement> buildPhysicalTable } /* - Entity entity = adapter.createTableSchema( + Entity entity = adapter.createAdapterTable( catalogTable, Catalog.getInstance().getColumnPlacementsOnAdapterSortedByPhysicalPosition( adapter.getAdapterId(), catalogTable.id ), partitionPlacement ); diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 3fab5f9331..235bb3ec89 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -20,7 +20,7 @@ import java.util.stream.Collectors; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogNamespace; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; diff --git a/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java index 6317735597..c07577a0d2 100644 --- a/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java @@ -38,6 +38,7 @@ import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; +import org.polypheny.db.catalog.Snapshot; /** @@ -58,10 +59,10 @@ public interface QueryableEntity extends Entity { /** * Generates an expression with which this table can be referenced in generated code. * - * @param schema Schema + * @param snapshot Schema * @param tableName Table name (unique within schema) * @param clazz The desired collection class; for example {@code Queryable}. */ - Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ); + Expression getExpression( Snapshot snapshot, String tableName, Class clazz ); } diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 1f0d8c2ddb..cd9019a45b 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -42,6 +42,7 @@ import java.util.Arrays; import java.util.List; import java.util.Map; +import lombok.Getter; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.Queryable; @@ -56,6 +57,7 @@ import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.refactor.QueryableEntity; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.config.PolyphenyDbConnectionConfigImpl; import org.polypheny.db.config.PolyphenyDbConnectionProperty; @@ -102,17 +104,17 @@ private static boolean canConvert( AlgDataType fromType, AlgDataType toType ) { /** * Returns the expression for a schema. */ - public static Expression expression( PolyphenyDbSchema schema ) { - return null;// return schema.getExpression( schema, schema.getName() ); todo dl + public static Expression expression( Snapshot snapshot ) { + return snapshot.getSnapshotExpression( snapshot.getId() ); } /** * Returns the expression for a sub-schema. */ - public static Expression subSchemaExpression( PolyphenyDbSchema schema, String name, Class type ) { + public static Expression subSchemaExpression( Snapshot snapshot, String name, Class type ) { // (Type) schemaExpression.getSubSchema("name") - final Expression schemaExpression = expression( schema ); + final Expression schemaExpression = expression( snapshot ); Expression call = Expressions.call( schemaExpression, @@ -130,7 +132,7 @@ public static Expression subSchemaExpression( PolyphenyDbSchema schema, String n /** * Converts a schema expression to a given type by calling the {@link SchemaPlus#unwrap(Class)} method. */ - public static Expression unwrap( Expression call, Class type ) { + public static Expression unwrap( Expression call, Class type ) { return Expressions.convert_( Expressions.call( call, BuiltInMethod.SCHEMA_PLUS_UNWRAP.method, Expressions.constant( type ) ), type ); } @@ -138,11 +140,11 @@ public static Expression unwrap( Expression call, Class type ) { /** * Returns the expression to access a table within a schema. */ - public static Expression tableExpression( PolyphenyDbSchema schema, Type elementType, String tableName, Class clazz ) { + public static Expression tableExpression( Snapshot snapshot, Type elementType, String tableName, Class clazz ) { final MethodCallExpression expression; if ( Entity.class.isAssignableFrom( clazz ) ) { expression = Expressions.call( - expression( schema ), + expression( snapshot ), BuiltInMethod.SCHEMA_GET_TABLE.method, Expressions.constant( tableName ) ); if ( ScannableEntity.class.isAssignableFrom( clazz ) ) { @@ -167,7 +169,7 @@ public static Expression tableExpression( PolyphenyDbSchema schema, Type element expression = Expressions.call( BuiltInMethod.SCHEMAS_QUERYABLE.method, DataContext.ROOT, - expression( schema ), + expression( snapshot ), Expressions.constant( elementType ), Expressions.constant( tableName ) ); } @@ -175,8 +177,8 @@ public static Expression tableExpression( PolyphenyDbSchema schema, Type element } - public static DataContext createDataContext( PolyphenyDbSchema rootSchema ) { - return new DummyDataContext( rootSchema ); + public static DataContext createDataContext( Snapshot snapshot ) { + return new DummyDataContext( snapshot ); } @@ -192,9 +194,9 @@ public static Queryable queryable( DataContext root, Class clazz, Stri * Returns a {@link Queryable}, given a fully-qualified table name as an iterable. */ public static Queryable queryable( DataContext root, Class clazz, Iterable names ) { - PolyphenyDbSchema schema = root.getSnapshot(); + Snapshot snapshot = root.getSnapshot(); - return queryable( root, schema, clazz, names.iterator().next() ); + return queryable( root, snapshot, clazz, names.iterator().next() ); } @@ -202,10 +204,10 @@ public static Queryable queryable( DataContext root, Class clazz, Iter /** * Returns a {@link Queryable}, given a schema and table name. */ - public static Queryable queryable( DataContext root, PolyphenyDbSchema schema, Class clazz, String tableName ) { + public static Queryable queryable( DataContext root, Snapshot snapshot, Class clazz, String tableName ) { //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); - LogicalTable table = schema.getTable( List.of( tableName ) ); - return table.unwrap( QueryableEntity.class ).asQueryable( root, schema, tableName ); + LogicalTable table = snapshot.getLogicalTable( List.of( tableName ) ); + return table.unwrap( QueryableEntity.class ).asQueryable( root, snapshot, table.id ); } @@ -242,58 +244,18 @@ private static int[] identity( int count ) { } - /** - * Returns an {@link org.apache.calcite.linq4j.Enumerable} over object arrays, given a fully-qualified table name which leads to a {@link ScannableEntity}. - */ - public static LogicalTable table( DataContext root, String... names ) { - PolyphenyDbSchema schema = root.getSnapshot(); - return schema.getTable( List.of( names ) ); - } - - /** - * Parses and validates a SQL query. For use within Polypheny-DB only. - */ - /*public static ParseResult parse( final PolyphenyDbSchema schema, final List schemaPath, final String sql ) { - final PolyphenyDbPrepare prepare = PolyphenyDbPrepare.DEFAULT_FACTORY.apply(); - final ImmutableMap propValues = ImmutableMap.of(); - final Context context = makeContext( schema, schemaPath, null, propValues ); - PolyphenyDbPrepare.Dummy.push( context ); - try { - return prepare.parse( context, sql ); - } finally { - PolyphenyDbPrepare.Dummy.pop( context ); - } - }*/ - - /** - * Parses and validates a SQL query and converts to relational algebra. For use within Polypheny-DB only. - */ - /*public static PolyphenyDbPrepare.ConvertResult convert( final PolyphenyDbSchema schema, final List schemaPath, final String sql ) { - final PolyphenyDbPrepare prepare = PolyphenyDbPrepare.DEFAULT_FACTORY.apply(); - final ImmutableMap propValues = ImmutableMap.of(); - final Context context = makeContext( schema, schemaPath, null, propValues ); - PolyphenyDbPrepare.Dummy.push( context ); - try { - return prepare.convert( context, sql ); - } finally { - PolyphenyDbPrepare.Dummy.pop( context ); - } - }*/ - - /** * Creates a context for the purposes of preparing a statement. * - * @param schema Schema * @param schemaPath Path wherein to look for functions * @param objectPath Path of the object being analyzed (usually a view), or null * @param propValues Connection properties * @return Context */ - private static Context makeContext( PolyphenyDbSchema schema, List schemaPath, List objectPath, final ImmutableMap propValues ) { + private static Context makeContext( Snapshot snapshot, List schemaPath, List objectPath, final ImmutableMap propValues ) { final Context context0 = PolyphenyDbPrepare.Dummy.peek(); final PolyphenyDbConnectionConfig config = mutate( context0.config(), propValues ); - return makeContext( config, context0.getTypeFactory(), context0.getDataContext(), schema, schemaPath, objectPath ); + return makeContext( config, context0.getTypeFactory(), context0.getDataContext(), snapshot, schemaPath, objectPath ); } @@ -309,7 +271,7 @@ private static Context makeContext( final PolyphenyDbConnectionConfig connectionConfig, final JavaTypeFactory typeFactory, final DataContext dataContext, - final PolyphenyDbSchema schema, + final Snapshot snapshot, final List schemaPath, final List objectPath_ ) { final ImmutableList objectPath = objectPath_ == null ? null : ImmutableList.copyOf( objectPath_ ); @@ -321,8 +283,8 @@ public JavaTypeFactory getTypeFactory() { @Override - public PolyphenyDbSchema getRootSchema() { - return schema; + public Snapshot getSnapshot() { + return snapshot; } @@ -399,13 +361,13 @@ public static AlgProtoDataType proto( final ScalarFunction function ) { * * The result is null if the initial schema is null or any sub-schema does not exist. */ - public static PolyphenyDbSchema subSchema( PolyphenyDbSchema schema, Iterable names ) { + public static Snapshot subSchema( Snapshot snapshot, Iterable names ) { for ( String string : names ) { - if ( schema == null ) { + if ( snapshot == null ) { return null; } } - return schema; + return snapshot; } @@ -431,22 +393,17 @@ public static Path path( SchemaPlus schema ) { */ private static class DummyDataContext implements DataContext { - private final PolyphenyDbSchema rootSchema; + @Getter + private final Snapshot snapshot; private final ImmutableMap map; - DummyDataContext( PolyphenyDbSchema rootSchema ) { - this.rootSchema = rootSchema; + DummyDataContext( Snapshot snapshot ) { + this.snapshot = snapshot; this.map = ImmutableMap.of(); } - @Override - public Snapshot getSnapshot() { - return rootSchema; - } - - @Override public JavaTypeFactory getTypeFactory() { //return connection.getTypeFactory(); diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java index 3be40ad7d2..7093eb682c 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java @@ -43,10 +43,10 @@ import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; @@ -56,7 +56,7 @@ * *

    Behavior is as follows:

    *
      - *
    • The schema has no tables unless you override {@link #getTableMap()}.
    • + *
    • The schema has no tables unless you override {@link #getTables()}.
    • *
    • The schema has no functions unless you override {@link #getFunctionMultimap()}.
    • *
    • The schema has no sub-schemas unless you override {@link #getSubSchemaMap()}.
    • *
    • The schema is mutable unless you override {@link #isMutable()}.
    • @@ -87,8 +87,8 @@ public Namespace snapshot( SchemaVersion version ) { @Override - public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { - return Schemas.subSchemaExpression( parentSchema, name, getClass() ); + public Expression getExpression( Snapshot snapshot, String name ) { + return Schemas.subSchemaExpression( snapshot, name, getClass() ); } @@ -101,20 +101,20 @@ public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { * * @return Map of tables in this schema by name */ - protected Map getTableMap() { + protected Map getTables() { return ImmutableMap.of(); } @Override public final Set getEntityNames() { - return getTableMap().keySet(); + return getTables().keySet(); } @Override public final CatalogEntity getEntity( String name ) { - return getTableMap().get( name ); + return getTables().get( name ); } diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java index c8fc0b3e20..ca85b5f11d 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java @@ -43,8 +43,10 @@ import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.AbstractQueryableEntity; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.QueryableEntity; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.QueryableEntity; /** @@ -54,25 +56,24 @@ * * @param element type */ -public abstract class AbstractTableQueryable extends AbstractQueryable { +public abstract class AbstractTableQueryable extends AbstractQueryable { public final DataContext dataContext; - public final PolyphenyDbSchema schema; - public final QueryableEntity table; - public final String tableName; + public final Snapshot snapshot; + public final K table; - public AbstractTableQueryable( DataContext dataContext, PolyphenyDbSchema schema, QueryableEntity table, String tableName ) { + public AbstractTableQueryable( DataContext dataContext, Snapshot snapshot, K table ) { this.dataContext = dataContext; - this.schema = schema; + this.snapshot = snapshot; + assert table.unwrap( QueryableEntity.class ) != null; this.table = table; - this.tableName = tableName; } @Override public Expression getExpression() { - return table.getExpression( schema, tableName, Queryable.class ); + return table.asExpression(); } diff --git a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java index 3afdfc4171..47a8fe2969 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java @@ -38,10 +38,10 @@ import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaVersion; @@ -82,8 +82,8 @@ public Namespace snapshot( SchemaVersion version ) { @Override - public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { - return namespace.getExpression( parentSchema, name ); + public Expression getExpression( Snapshot snapshot, String name ) { + return namespace.getExpression( snapshot, name ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index d5512f0d00..d49457b777 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -103,9 +103,11 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; @@ -128,7 +130,6 @@ import org.polypheny.db.runtime.Hook; import org.polypheny.db.runtime.PolyCollections.PolyDictionary; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.graph.PolyNode; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyType; @@ -165,7 +166,7 @@ public class AlgBuilder { @Getter protected final AlgOptCluster cluster; - protected final PolyphenyDbSchema schema; + protected final Snapshot snapshot; private final AlgFactories.FilterFactory filterFactory; private final AlgFactories.ProjectFactory projectFactory; private final AlgFactories.AggregateFactory aggregateFactory; @@ -185,9 +186,9 @@ public class AlgBuilder { private final RexSimplify simplifier; - protected AlgBuilder( Context context, AlgOptCluster cluster, PolyphenyDbSchema schema ) { + protected AlgBuilder( Context context, AlgOptCluster cluster, Snapshot snapshot ) { this.cluster = cluster; - this.schema = schema; + this.snapshot = snapshot; if ( context == null ) { context = Contexts.EMPTY_CONTEXT; } @@ -273,18 +274,18 @@ public int stackSize() { * Creates a AlgBuilder. */ public static AlgBuilder create( FrameworkConfig config ) { - final AlgOptCluster[] clusters = { null }; - final PolyphenyDbSchema[] schemas = { null }; + final AlgOptCluster[] cluster = new AlgOptCluster[1]; + final Snapshot[] snapshot = new Snapshot[1]; Frameworks.withPrepare( new Frameworks.PrepareAction( config ) { @Override - public Void apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { - clusters[0] = cluster; - schemas[0] = rootSchema; + public Void apply( AlgOptCluster c, Snapshot s ) { + cluster[0] = c; + snapshot[0] = s; return null; } } ); - return new AlgBuilder( config.getContext(), clusters[0], schemas[0] ); + return new AlgBuilder( config.getContext(), cluster[0], snapshot[0] ); } @@ -332,7 +333,7 @@ public RexBuilder getRexBuilder() { * Just add a {@link AlgOptCluster} and a {@link AlgOptSchema} */ public static AlgBuilderFactory proto( final Context context ) { - return ( cluster, schema ) -> new AlgBuilder( context, cluster, schema ); + return ( cluster, snapshot ) -> new AlgBuilder( context, cluster, snapshot ); } @@ -1328,13 +1329,13 @@ public RexNode patternExclude( RexNode node ) { */ public AlgBuilder scan( Iterable tableNames ) { final List names = ImmutableList.copyOf( tableNames ); - final CatalogEntity algOptEntity = schema.getTable( names ); - if ( algOptEntity == null ) { + final LogicalTable entity = snapshot.getLogicalTable( names ); + if ( entity == null ) { throw RESOURCE.tableNotFound( String.join( ".", names ) ).ex(); } - final AlgNode scan = scanFactory.createScan( cluster, algOptEntity ); + final AlgNode scan = scanFactory.createScan( cluster, entity ); push( scan ); - rename( algOptEntity.getRowType().getFieldNames() ); + rename( entity.getRowType().getFieldNames() ); return this; } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilderFactory.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilderFactory.java index 66303acd53..44a51c9ca5 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilderFactory.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilderFactory.java @@ -35,6 +35,7 @@ import org.polypheny.db.algebra.core.AlgFactories; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.schema.PolyphenyDbSchema; @@ -56,7 +57,7 @@ public interface AlgBuilderFactory { /** * Creates a AlgBuilder. */ - AlgBuilder create( AlgOptCluster cluster, PolyphenyDbSchema schema ); + AlgBuilder create( AlgOptCluster cluster, Snapshot snapshot ); } diff --git a/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java b/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java index c8881182d8..2b3077ee1a 100644 --- a/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java +++ b/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java @@ -37,6 +37,7 @@ import com.google.common.collect.ImmutableList; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.plan.AlgOptCostFactory; @@ -68,7 +69,7 @@ public interface FrameworkConfig { * Returns the default schema that should be checked before looking at the root schema. * Returns null to only consult the root schema. */ - PolyphenyDbSchema getDefaultSchema(); + Snapshot getSnapshot(); /** * Returns the executor used to evaluate constant expressions. @@ -106,7 +107,7 @@ public interface FrameworkConfig { * {@link AlgTraitDef#convert} in the order of this list. The most important trait comes first in the list, * followed by the second most important one, etc. */ - ImmutableList getTraitDefs(); + ImmutableList> getTraitDefs(); /** * Returns the convertlet table that should be used when converting from SQL to row expressions diff --git a/core/src/main/java/org/polypheny/db/tools/Frameworks.java b/core/src/main/java/org/polypheny/db/tools/Frameworks.java index 69eb4595e2..d9599dfb66 100644 --- a/core/src/main/java/org/polypheny/db/tools/Frameworks.java +++ b/core/src/main/java/org/polypheny/db/tools/Frameworks.java @@ -39,17 +39,18 @@ import java.util.Objects; import java.util.Properties; import lombok.Getter; +import lombok.Setter; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.DataContext.SlimDataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.config.PolyphenyDbConnectionProperty; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCostFactory; -import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgTraitDef; import org.polypheny.db.plan.Context; import org.polypheny.db.prepare.ContextImpl; @@ -58,7 +59,6 @@ import org.polypheny.db.prepare.PolyphenyDbPrepareImpl; import org.polypheny.db.rex.RexExecutor; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; -import org.polypheny.db.schema.PolyphenyDbSchema; /** @@ -89,7 +89,7 @@ public static Planner getPlanner( FrameworkConfig config ) { */ public interface PlannerAction { - R apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ); + R apply( AlgOptCluster cluster, Snapshot snapshot ); } @@ -118,7 +118,7 @@ public FrameworkConfig getConfig() { public abstract R apply( AlgOptCluster cluster, - PolyphenyDbSchema rootSchema ); + Snapshot snapshot ); } @@ -134,8 +134,8 @@ public static R withPlanner( final PlannerAction action, final FrameworkC return withPrepare( new Frameworks.PrepareAction<>( config ) { @Override - public R apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { - return action.apply( cluster, rootSchema ); + public R apply( AlgOptCluster cluster, Snapshot snapshot ) { + return action.apply( cluster, snapshot ); } } ); } @@ -148,11 +148,11 @@ public R apply( AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { * @return Return value from action */ public static R withPlanner( final PlannerAction action ) { - PolyphenyDbSchema rootSchema = Frameworks.createRootSchema( true ); + Snapshot snapshot = Frameworks.createSnapshot( true ); FrameworkConfig config = newConfigBuilder() - .defaultSchema( rootSchema ) + .defaultSchema( snapshot ) .prepareContext( new ContextImpl( - rootSchema, + snapshot, new SlimDataContext() { @Override public JavaTypeFactory getTypeFactory() { @@ -195,8 +195,8 @@ public static R withPrepare( PrepareAction action ) { * * @param cache Whether to create a caching schema. */ - public static PolyphenyDbSchema createRootSchema( boolean cache ) { - return AbstractPolyphenyDbSchema.createRootSchema(); + public static Snapshot createSnapshot( boolean cache ) { + return AbstractPolyphenyDbSchema.createSnapshot(); } @@ -227,10 +227,12 @@ public static class ConfigBuilder { private OperatorTable operatorTable; private ImmutableList programs; private Context context; - private ImmutableList traitDefs; + private ImmutableList> traitDefs; private ParserConfig parserConfig; private NodeToAlgConverter.Config sqlToRelConverterConfig; - private PolyphenyDbSchema defaultSchema; + @Getter + @Setter + private Snapshot snapshot; private RexExecutor executor; private AlgOptCostFactory costFactory; private AlgDataTypeSystem typeSystem; @@ -257,7 +259,7 @@ public ConfigBuilder( FrameworkConfig config ) { traitDefs = config.getTraitDefs(); parserConfig = config.getParserConfig(); sqlToRelConverterConfig = config.getSqlToRelConverterConfig(); - defaultSchema = config.getDefaultSchema(); + snapshot = config.getSnapshot(); executor = config.getExecutor(); costFactory = config.getCostFactory(); typeSystem = config.getTypeSystem(); @@ -274,7 +276,7 @@ public FrameworkConfig build() { traitDefs, parserConfig, sqlToRelConverterConfig, - defaultSchema, + snapshot, costFactory, typeSystem, executor, @@ -300,7 +302,7 @@ public ConfigBuilder operatorTable( OperatorTable operatorTable ) { } - public ConfigBuilder traitDefs( List traitDefs ) { + public ConfigBuilder traitDefs( List> traitDefs ) { if ( traitDefs == null ) { this.traitDefs = null; } else { @@ -310,7 +312,7 @@ public ConfigBuilder traitDefs( List traitDefs ) { } - public ConfigBuilder traitDefs( AlgTraitDef... traitDefs ) { + public ConfigBuilder traitDefs( AlgTraitDef... traitDefs ) { this.traitDefs = ImmutableList.copyOf( traitDefs ); return this; } @@ -328,8 +330,8 @@ public ConfigBuilder sqlToRelConverterConfig( NodeToAlgConverter.Config sqlToRel } - public ConfigBuilder defaultSchema( PolyphenyDbSchema defaultSchema ) { - this.defaultSchema = defaultSchema; + public ConfigBuilder defaultSchema( Snapshot snapshot ) { + this.snapshot = snapshot; return this; } @@ -386,13 +388,13 @@ public static class StdFrameworkConfig implements FrameworkConfig { private final OperatorTable operatorTable; private final ImmutableList programs; - private final ImmutableList traitDefs; + private final ImmutableList> traitDefs; private final ParserConfig parserConfig; private final NodeToAlgConverter.Config sqlToRelConverterConfig; - private final PolyphenyDbSchema defaultSchema; + private final Snapshot snapshot; private final AlgOptCostFactory costFactory; @@ -407,10 +409,10 @@ public StdFrameworkConfig( Context context, OperatorTable operatorTable, ImmutableList programs, - ImmutableList traitDefs, + ImmutableList> traitDefs, ParserConfig parserConfig, NodeToAlgConverter.Config nodeToRelConverterConfig, - PolyphenyDbSchema defaultSchema, + Snapshot snapshot, AlgOptCostFactory costFactory, AlgDataTypeSystem typeSystem, RexExecutor executor, @@ -421,7 +423,7 @@ public StdFrameworkConfig( this.traitDefs = traitDefs; this.parserConfig = parserConfig; this.sqlToRelConverterConfig = nodeToRelConverterConfig; - this.defaultSchema = defaultSchema; + this.snapshot = snapshot; this.costFactory = costFactory; this.typeSystem = typeSystem; this.executor = executor; diff --git a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java index 635b86aee7..5fb666dca2 100644 --- a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java @@ -26,13 +26,13 @@ import org.bson.BsonValue; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.Context; import org.polypheny.db.plan.Contexts; import org.polypheny.db.processing.DeepCopyShuttle; import org.polypheny.db.rex.RexLiteral; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Pair; @@ -46,8 +46,8 @@ public class RoutedAlgBuilder extends AlgBuilder { protected Map>> physicalPlacementsOfPartitions = new HashMap<>(); // PartitionId -> List - public RoutedAlgBuilder( Context context, AlgOptCluster cluster, PolyphenyDbSchema rootSchema ) { - super( context, cluster, rootSchema ); + public RoutedAlgBuilder( Context context, AlgOptCluster cluster, Snapshot snapshot ) { + super( context, cluster, snapshot ); } @@ -109,4 +109,4 @@ private List> map( List catalogCols } - } +} diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index 645bac35ac..417fb1f70e 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -162,7 +162,7 @@ public enum BuiltInMethod { INTO( ExtendedEnumerable.class, "into", Collection.class ), REMOVE_ALL( ExtendedEnumerable.class, "removeAll", Collection.class ), SCHEMA_GET_SUB_SCHEMA( Namespace.class, "getSubNamespace", String.class ), - SCHEMA_GET_TABLE( Namespace.class, "getTable", String.class ), + SCHEMA_GET_TABLE( Namespace.class, "getLogicalTable", String.class ), SCHEMA_PLUS_UNWRAP( SchemaPlus.class, "unwrap", Class.class ), SCHEMAS_ENUMERABLE_SCANNABLE( Schemas.class, "enumerable", ScannableEntity.class, DataContext.class ), SCHEMAS_ENUMERABLE_FILTERABLE( Schemas.class, "enumerable", FilterableEntity.class, DataContext.class ), diff --git a/core/src/main/java/org/polypheny/db/util/Util.java b/core/src/main/java/org/polypheny/db/util/Util.java index 21f6c0dc29..f82aba4ac9 100644 --- a/core/src/main/java/org/polypheny/db/util/Util.java +++ b/core/src/main/java/org/polypheny/db/util/Util.java @@ -96,8 +96,13 @@ import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.avatica.util.Spaces; import org.apache.calcite.linq4j.Ord; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.fun.AggFunction; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.BasicNodeVisitor; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Literal; @@ -2015,6 +2020,22 @@ public static Iterator filter( Iterator iterator, Predicate predica } + public static Monotonicity getMonotonicity( CatalogEntity entity, String columnName ) { + if ( entity.namespaceType != NamespaceType.RELATIONAL ) { + return Monotonicity.NOT_MONOTONIC; + } + + for ( AlgCollation collation : entity.getStatistic().getCollations() ) { + final AlgFieldCollation fieldCollation = collation.getFieldCollations().get( 0 ); + final int fieldIndex = fieldCollation.getFieldIndex(); + if ( fieldIndex < entity.getRowType().getFieldCount() && entity.getRowType().getFieldNames().get( fieldIndex ).equals( columnName ) ) { + return fieldCollation.direction.monotonicity(); + } + } + return Monotonicity.NOT_MONOTONIC; + } + + /** * Exception used to interrupt a tree walk of any kind. */ diff --git a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java index ebce3f39ef..5ff7c7d9ae 100644 --- a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java +++ b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java @@ -34,6 +34,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.type.PolyTypeUtil; @@ -257,14 +258,13 @@ public static void checkCharsetAndCollateConsistentIfCharType( AlgDataType type * * If not found, returns null. * - * @param rootSchema root schema * @param schemaPath full schema path of required schema * @param nameMatcher name matcher * @return PolyphenyDbSchema that corresponds specified schemaPath */ - public static PolyphenyDbSchema getSchema( PolyphenyDbSchema rootSchema, Iterable schemaPath, NameMatcher nameMatcher ) { + public static Snapshot getSchema( Snapshot snapshot, Iterable schemaPath, NameMatcher nameMatcher ) { - return rootSchema; + return snapshot; } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 84d0b555e0..4bec842376 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -27,7 +27,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index 3a808b1939..37d5b86422 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -35,16 +35,12 @@ import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; -import java.lang.reflect.Type; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgDistribution; @@ -53,9 +49,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgReferentialConstraint; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.constant.Modality; import org.polypheny.db.algebra.constant.Monotonicity; -import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -65,32 +59,25 @@ import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; -import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare.AbstractPreparingEntity; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; import org.polypheny.db.schema.CustomColumnResolvingEntity; import org.polypheny.db.schema.Entity; -import org.polypheny.db.schema.ExtensibleEntity; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.schema.TableType; import org.polypheny.db.schema.Wrapper; -import org.polypheny.db.test.JdbcTest.AbstractModifiableEntity; -import org.polypheny.db.util.AccessType; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.InitializerExpressionFactory; -import org.polypheny.db.util.NameMatchers; import org.polypheny.db.util.NullInitializerExpressionFactory; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Util; -import org.polypheny.db.util.ValidatorUtil; /** @@ -113,7 +100,7 @@ public abstract class MockCatalogReader extends PolyphenyDbCatalogReader { */ public MockCatalogReader( AlgDataTypeFactory typeFactory, boolean caseSensitive ) { super( - AbstractPolyphenyDbSchema.createRootSchema(),//DEFAULT_CATALOG ), + AbstractPolyphenyDbSchema.createSnapshot(),//DEFAULT_CATALOG ), typeFactory ); } @@ -160,7 +147,7 @@ protected void registerTablesWithRollUp( MockSchema schema, Fixture f ) { protected void registerType( final List names, final AlgProtoDataType algProtoDataType ) { assert names.get( 0 ).equals( DEFAULT_CATALOG ); final List schemaPath = Util.skipLast( names ); - final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, schemaPath, NameMatchers.withCaseSensitive( true ) ); + //final PolyphenyDbSchema schema = ValidatorUtil.getSchema( snapshot, schemaPath, NameMatchers.withCaseSensitive( true ) ); //schema.add( Util.last( names ), algProtoDataType ); } @@ -187,7 +174,7 @@ private void registerTable( final List names, final Entity entity ) { assert names.get( 0 ).equals( DEFAULT_CATALOG ); final List schemaPath = Util.skipLast( names ); final String tableName = Util.last( names ); - final PolyphenyDbSchema schema = ValidatorUtil.getSchema( rootSchema, schemaPath, NameMatchers.withCaseSensitive( true ) ); + //final PolyphenyDbSchema schema = ValidatorUtil.getSchema( snapshot, schemaPath, NameMatchers.withCaseSensitive( true ) ); //schema.add( tableName, entity ); } @@ -202,14 +189,14 @@ private void registerNestedSchema( MockSchema parentSchema, MockSchema schema, l } - private static List deduceMonotonicity( PreparingEntity table ) { + private static List deduceMonotonicity( CatalogEntity table ) { final List collationList = new ArrayList<>(); // Deduce which fields the table is sorted on. int i = -1; for ( AlgDataTypeField field : table.getRowType().getFieldList() ) { ++i; - final Monotonicity monotonicity = table.getMonotonicity( field.getName() ); + final Monotonicity monotonicity = Util.getMonotonicity( table, field.getName() ); if ( monotonicity != Monotonicity.NOT_MONOTONIC ) { final AlgFieldCollation.Direction direction = monotonicity.isDecreasing() @@ -267,7 +254,7 @@ public String getName() { * Mock implementation of * {@link PreparingEntity}. */ - public static class MockEntity extends AbstractPreparingEntity { + public static class MockEntity extends LogicalTable { protected final MockCatalogReader catalogReader; protected final boolean stream; @@ -298,6 +285,7 @@ public void registerRolledUpColumn( String columnName ) { private MockEntity( MockCatalogReader catalogReader, List names, boolean stream, double rowCount, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { + super( -1, Util.last( names ), null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), true, null ); this.catalogReader = catalogReader; this.stream = stream; this.rowCount = rowCount; @@ -313,6 +301,7 @@ private MockEntity( MockCatalogReader catalogReader, List names, boolean protected MockEntity( MockCatalogReader catalogReader, boolean stream, double rowCount, List> columnList, List keyList, AlgDataType rowType, List collationList, List names, Set monotonicColumnSet, StructKind kind, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { + super( -1, Util.last( names ), null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), true, null ); this.catalogReader = catalogReader; this.stream = stream; this.rowCount = rowCount; @@ -331,34 +320,11 @@ protected MockEntity( /** * Implementation of AbstractModifiableTable. */ - private class ModifiableEntity extends AbstractModifiableEntity implements ExtensibleEntity, Wrapper { + private class ModifiableEntity extends LogicalTable implements Wrapper { protected ModifiableEntity( String tableName ) { - super( tableName ); - } - - - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return typeFactory.createStructType( MockEntity.this.getRowType().getFieldList() ); - } - - - @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - return null; - } + super( -1, tableName, null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), false, null ); - - @Override - public Type getElementType() { - return null; - } - - - @Override - public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { - return null; } @@ -373,7 +339,7 @@ public C unwrap( Class aClass ) { } - @Override + /*@Override public Entity extend( final List fields ) { return new ModifiableEntity( Util.last( names ) ) { @Override @@ -382,18 +348,13 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { return typeFactory.createStructType( allFields ); } }; - } + }*/ - @Override - public int getExtendedColumnOffset() { - return rowType.getFieldCount(); - } - } - @Override + /*@Override protected AlgOptEntity extend( final Entity extendedEntity ) { return new MockEntity( catalogReader, names, stream, rowCount, resolver, initializerFactory ) { @Override @@ -401,7 +362,7 @@ public AlgDataType getRowType() { return extendedEntity.getRowType( catalogReader.typeFactory ); } }; - } + }*/ public static MockEntity create( MockCatalogReader catalogReader, MockSchema schema, String name, boolean stream, double rowCount ) { @@ -439,7 +400,7 @@ public T unwrap( Class clazz ) { return clazz.cast( initializerFactory ); } if ( clazz.isAssignableFrom( Entity.class ) ) { - final Entity entity = resolver == null + final CatalogEntity entity = resolver == null ? new ModifiableEntity( Util.last( names ) ) : new ModifiableEntityWithCustomColumnResolving( Util.last( names ) ); return clazz.cast( entity ); @@ -454,97 +415,24 @@ public double getRowCount() { } - @Override - public AlgOptSchema getRelOptSchema() { - return catalogReader; - } - - - @Override - public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { - return LogicalRelScan.create( context.getCluster(), this ); - } - - - @Override - public List getCollationList() { - return collationList; - } - - @Override public AlgDistribution getDistribution() { return AlgDistributions.BROADCAST_DISTRIBUTED; } - @Override - public boolean isKey( ImmutableBitSet columns ) { - return !keyList.isEmpty() && columns.contains( ImmutableBitSet.of( keyList ) ); - } - - - @Override - public List getReferentialConstraints() { - return referentialConstraints; - } - - @Override public AlgDataType getRowType() { return rowType; } - @Override - public boolean supportsModality( Modality modality ) { - return modality == (stream ? Modality.STREAM : Modality.RELATION); - } - - public void onRegister( AlgDataTypeFactory typeFactory ) { rowType = typeFactory.createStructType( kind, Pair.right( columnList ), Pair.left( columnList ) ); collationList = deduceMonotonicity( this ); } - @Override - public List getQualifiedName() { - return names; - } - - - @Override - public Monotonicity getMonotonicity( String columnName ) { - return monotonicColumnSet.contains( columnName ) - ? Monotonicity.INCREASING - : Monotonicity.NOT_MONOTONIC; - } - - - @Override - public AccessType getAllowedAccess() { - return AccessType.ALL; - } - - - @Override - public Expression getExpression( Class clazz ) { - throw new UnsupportedOperationException(); - } - - - @Override - public CatalogEntity getCatalogEntity() { - return null; - } - - - @Override - public CatalogPartitionPlacement getPartitionPlacement() { - return null; - } - public void addColumn( String name, AlgDataType type ) { addColumn( name, type, false ); @@ -578,17 +466,17 @@ public StructKind getKind() { /** * Subclass of {@link ModifiableEntity} that also implements {@link CustomColumnResolvingEntity}. */ - private class ModifiableEntityWithCustomColumnResolving extends ModifiableEntity implements CustomColumnResolvingEntity, Wrapper { + private class ModifiableEntityWithCustomColumnResolving extends ModifiableEntity implements Wrapper { ModifiableEntityWithCustomColumnResolving( String tableName ) { super( tableName ); } - @Override + /*@Override public List>> resolveColumn( AlgDataType rowType, AlgDataTypeFactory typeFactory, List names ) { return resolver.resolveColumn( rowType, typeFactory, names ); - } + }*/ } @@ -614,13 +502,13 @@ public void onRegister( AlgDataTypeFactory typeFactory ) { /** * Recreates an immutable rowType, if the table has Dynamic Record Type, when converts table to Rel. */ - @Override + /*@Override public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { if ( rowType.isDynamicStruct() ) { rowType = new AlgRecordType( rowType.getFieldList() ); } return super.toAlg( context, traitSet ); - } + }*/ } @@ -671,7 +559,8 @@ public boolean isKey( ImmutableBitSet columns ) { @Override public List getReferentialConstraints() { - return table.getReferentialConstraints(); + return List.of(); + //return table.getReferentialConstraints(); } diff --git a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java index f42f8f8740..d842de50e6 100644 --- a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java +++ b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java @@ -20,6 +20,7 @@ import java.util.HashMap; import java.util.Map; import org.polypheny.db.catalog.MockCatalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; @@ -41,6 +42,12 @@ public int addAdapter( String uniqueName, String clazz, AdapterType type, Map tables; + private final ImmutableMap tables; public HrClusteredSchema( long id ) { super( id ); - tables = ImmutableMap.builder() + tables = ImmutableMap.builder() .put( "emps", new PkClusteredEntity( @@ -105,7 +107,7 @@ public HrClusteredSchema( long id ) { @Override - protected Map getTableMap() { + protected Map getTables() { return tables; } @@ -113,7 +115,7 @@ protected Map getTableMap() { /** * A table sorted (ascending direction and nulls last) on the primary key. */ - private static class PkClusteredEntity extends AbstractEntity implements ScannableEntity { + private static class PkClusteredEntity extends LogicalTable implements ScannableEntity { private final ImmutableBitSet pkColumns; private final List data; @@ -121,7 +123,7 @@ private static class PkClusteredEntity extends AbstractEntity implements Scannab PkClusteredEntity( Function dataTypeBuilder, ImmutableBitSet pkColumns, List data ) { - super( null, null, null ); + super( -1, "", null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), false, null ); this.data = data; this.typeBuilder = dataTypeBuilder; this.pkColumns = pkColumns; @@ -137,18 +139,10 @@ public Statistic getStatistic() { return Statistics.of( (double) data.size(), ImmutableList.of( pkColumns ), ImmutableList.of( AlgCollations.of( collationFields ) ) ); } - - @Override - public AlgDataType getRowType( final AlgDataTypeFactory typeFactory ) { - return typeBuilder.apply( typeFactory ); - } - - @Override public Enumerable scan( final DataContext root ) { return Linq4j.asEnumerable( data ); } - } } diff --git a/core/src/test/java/org/polypheny/db/test/JdbcTest.java b/core/src/test/java/org/polypheny/db/test/JdbcTest.java index 4ca1238649..ae1e6a424d 100644 --- a/core/src/test/java/org/polypheny/db/test/JdbcTest.java +++ b/core/src/test/java/org/polypheny/db/test/JdbcTest.java @@ -21,11 +21,12 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.impl.AbstractEntity; @@ -44,10 +45,10 @@ protected AbstractModifiableEntity( String tableName ) { } - @Override - public RelModify toModificationAlg( + //@Override + public RelModify toModificationAlg( AlgOptCluster cluster, - AlgOptEntity table, + CatalogEntity entity, Prepare.CatalogReader catalogReader, AlgNode child, RelModify.Operation operation, @@ -55,7 +56,7 @@ public RelModify toModificationAlg( List sourceExpressionList, boolean flattened ) { return LogicalRelModify.create( - table, + entity, child, operation, updateColumnList, diff --git a/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java b/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java index b71655cd28..a64f4d8d1d 100644 --- a/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java +++ b/core/src/test/java/org/polypheny/db/test/ScannableEntityTest.java @@ -345,7 +345,7 @@ public void testTens() throws SQLException { // final Schema schema = // new AbstractSchema() { // @Override -// protected Map getTableMap() { +// protected Map getTables() { // return ImmutableMap.of( "TENS", // new SimpleTable() { // private Enumerable superScan( DataContext root ) { diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 26952e41b9..f633f01c55 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -62,7 +62,7 @@ import org.polypheny.db.catalog.logistic.NameGenerator; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -910,7 +910,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { PolySchemaBuilder.getInstance().getCurrent(); // Create table on store - dataStore.createTable( statement.getPrepareContext(), catalogTable, catalogTable.partitionProperty.partitionIds ); + dataStore.createPhysicalTable( statement.getPrepareContext(), catalogTable, , catalogTable.partitionProperty.partitionIds ); // Copy data to the newly added placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( dataStore.getAdapterId() ), addedColumns, partitionIds ); @@ -1468,7 +1468,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { null, DataPlacementRole.UPTODATE ) ); - storeInstance.createTable( statement.getPrepareContext(), catalogTable, newPartitionIdsOnDataPlacement ); + storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, , newPartitionIdsOnDataPlacement ); } // Copy the data to the newly added column placements @@ -1523,7 +1523,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part DataPlacementRole.UPTODATE ); } - storeInstance.createTable( statement.getPrepareContext(), catalogTable, newPartitions ); + storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, , newPartitions ); // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); @@ -1868,7 +1868,7 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR null, DataPlacementRole.UPTODATE ); - store.createTable( statement.getPrepareContext(), catalogMaterializedView, catalogMaterializedView.partitionProperty.partitionIds ); + store.createPhysicalTable( statement.getPrepareContext(), catalogMaterializedView, , catalogMaterializedView.partitionProperty.partitionIds ); } // Selected data from tables is added into the newly crated materialized view @@ -2240,7 +2240,7 @@ public void createTable( long schemaId, String name, List fiel null, DataPlacementRole.UPTODATE ); - store.createTable( statement.getPrepareContext(), catalogTable, catalogTable.partitionProperty.partitionIds ); + store.createPhysicalTable( statement.getPrepareContext(), catalogTable, , catalogTable.partitionProperty.partitionIds ); } } catch ( GenericCatalogException | UnknownColumnException | UnknownCollationException e ) { @@ -2682,7 +2682,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // First create new tables - store.createTable( statement.getPrepareContext(), partitionedTable, partitionedTable.partitionProperty.partitionIds ); + store.createPhysicalTable( statement.getPrepareContext(), partitionedTable, , partitionedTable.partitionProperty.partitionIds ); // Copy data from unpartitioned to partitioned // Get only columns that are actually on that store @@ -2792,7 +2792,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme DataPlacementRole.UPTODATE ); // First create new tables - store.createTable( statement.getPrepareContext(), mergedTable, mergedTable.partitionProperty.partitionIds ); + store.createPhysicalTable( statement.getPrepareContext(), mergedTable, , mergedTable.partitionProperty.partitionIds ); // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 40cdc83051..14901c9f2c 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -304,7 +304,7 @@ private void redistributePartitions( LogicalTable table, List partitionsFr DataPlacementRole.UPTODATE ); } - store.createTable( statement.getPrepareContext(), table, hotPartitionsToCreate ); + store.createPhysicalTable( statement.getPrepareContext(), table, , hotPartitionsToCreate ); List catalogColumns = new ArrayList<>(); catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> catalogColumns.add( catalog.getColumn( cp.columnId ) ) ); @@ -352,7 +352,7 @@ private void redistributePartitions( LogicalTable table, List partitionsFr null, null, DataPlacementRole.UPTODATE ); } - store.createTable( statement.getPrepareContext(), table, coldPartitionsToCreate ); + store.createPhysicalTable( statement.getPrepareContext(), table, , coldPartitionsToCreate ); List catalogColumns = new ArrayList<>(); catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> catalogColumns.add( catalog.getColumn( cp.columnId ) ) ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index da93bd68f1..58f128f8a0 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -768,7 +768,7 @@ public AlgNode visit( AlgNode node ) { // .collect( Collectors.toList() ); // } // final {@link AlgNode} replacement = LogicalModify.create( -// ltm.getTable(), +// ltm.getLogicalTable(), // transaction.getCatalogReader(), // newProject, // ltm.getOperation(), diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index fa10de5965..a3409ef187 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -330,8 +330,8 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final RexBuilder rexBuilder = root.getCluster().getRexBuilder(); for ( final CatalogForeignKey foreignKey : foreignKeys ) { - final LogicalTable algOptEntity = statement.getDataContext().getSnapshot().getTable( foreignKey.referencedKeyTableId ); - final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), algOptEntity ); + final LogicalTable entity = statement.getDataContext().getSnapshot().getLogicalTable( foreignKey.referencedKeyTableId ); + final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), entity ); RexNode joinCondition = rexBuilder.makeLiteral( true ); builder.push( input ); builder.project( foreignKey.getColumnNames().stream().map( builder::field ).collect( Collectors.toList() ) ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 558ff95101..0951eb635e 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -54,17 +54,17 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexDynamicParam; import org.polypheny.db.rex.RexNode; import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.schema.graph.PolyGraph; import org.polypheny.db.tools.AlgBuilder; @@ -321,7 +321,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List tables = catalog.getTables( Catalog.defaultDatabaseId, new Pattern( namespace.name ), null ); List> scans = tables.stream() @@ -472,14 +471,13 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogSchema names } - private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespace, Statement statement, Integer placementId ) { + private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Integer placementId ) { AlgOptCluster cluster = alg.getCluster(); List collections = catalog.getCollections( namespace.id, null ); List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); - LogicalCollection collection = statement.getTransaction().getSnapshot().getCollection( List.of( t.getNamespaceName(), t.name ) ); - AlgNode scan = algBuilder.documentScan( collection ).build(); + AlgNode scan = algBuilder.documentScan( t ).build(); routeDocument( algBuilder, (AlgNode & DocumentAlg) scan, statement ); return Pair.of( t.name, algBuilder.build() ); } ) @@ -495,10 +493,10 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogSchema namespa public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement statement ) { CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); - PhysicalTable nodesTable = statement.getDataContext().getSnapshot().getTable( mapping.nodesId ).unwrap( PhysicalTable.class ); - PhysicalTable nodePropertiesTable = statement.getDataContext().getSnapshot().getTable( mapping.nodesPropertyId ).unwrap( PhysicalTable.class ); - PhysicalTable edgesTable = statement.getDataContext().getSnapshot().getTable( mapping.edgesId ).unwrap( PhysicalTable.class ); - PhysicalTable edgePropertiesTable = statement.getDataContext().getSnapshot().getTable( mapping.edgesPropertyId ).unwrap( PhysicalTable.class ); + PhysicalTable nodesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.nodesId ).unwrap( PhysicalTable.class ); + PhysicalTable nodePropertiesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.nodesPropertyId ).unwrap( PhysicalTable.class ); + PhysicalTable edgesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.edgesId ).unwrap( PhysicalTable.class ); + PhysicalTable edgePropertiesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.edgesPropertyId ).unwrap( PhysicalTable.class ); AlgNode node = buildSubstitutionJoin( alg, nodesTable, nodePropertiesTable ); @@ -520,7 +518,7 @@ protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, ), nodes.name + "_" + nodes.partitionProperty.partitionIds.get( 0 ) ); - return statement.getDataContext().getSnapshot().getTable( qualifiedTableName ); + return statement.getDataContext().getSnapshot().getLogicalTable( qualifiedTableName ); } @@ -541,8 +539,7 @@ protected AlgNode buildSubstitutionJoin( AlgNode alg, CatalogEntity nodesTable, protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder, Integer adapterId ) { - Catalog catalog = Catalog.getInstance(); - PolyphenyDbCatalogReader reader = statement.getTransaction().getSnapshot(); + Snapshot snapshot = statement.getTransaction().getSnapshot(); if ( alg.entity.namespaceType != NamespaceType.DOCUMENT ) { if ( alg.entity.namespaceType == NamespaceType.GRAPH ) { @@ -573,7 +570,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st CatalogCollectionPlacement placement = catalog.getCollectionPlacement( collection.id, placementId ); String namespaceName = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, collection.getNamespaceName(), placement.physicalNamespaceName ); String collectionName = collection.name + "_" + placement.id; - LogicalCollection collectionTable = reader.getRootSchema().getCollection( List.of( namespaceName, collectionName ) ); + PhysicalTable collectionTable = snapshot.getPhysicalTable( collection.id, adapterId ); // we might previously have pushed the non-native transformer builder.clear(); return builder.push( LogicalDocumentScan.create( alg.getCluster(), collectionTable ) ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 6be776ddb6..fafa3d8794 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -78,6 +78,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; @@ -87,10 +88,11 @@ import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.logistic.EntityType; @@ -182,21 +184,21 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { Map newParameterValues = new HashMap<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { - CatalogReader catalogReader = statement.getTransaction().getSnapshot(); + Snapshot snapshot = statement.getTransaction().getSnapshot(); // Get placements on store List placementsOnAdapter = catalog.getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); // If this is an update, check whether we need to execute on this store at all List updateColumnList = modify.getUpdateColumnList(); - List sourceExpressionList = modify.getSourceExpressionList(); + List sourceExpressionList = modify.getSourceExpressionList(); if ( placementsOnAdapter.size() != catalogTable.fieldIds.size() ) { if ( modify.getOperation() == Modify.Operation.UPDATE ) { updateColumnList = new LinkedList<>( modify.getUpdateColumnList() ); sourceExpressionList = new LinkedList<>( modify.getSourceExpressionList() ); Iterator updateColumnListIterator = updateColumnList.iterator(); - Iterator sourceExpressionListIterator = sourceExpressionList.iterator(); + Iterator sourceExpressionListIterator = sourceExpressionList.iterator(); while ( updateColumnListIterator.hasNext() ) { String columnName = updateColumnListIterator.next(); sourceExpressionListIterator.next(); @@ -410,7 +412,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { pkPlacement.physicalSchemaName ), catalogTable.name + "_" + currentPartitionId ); - PhysicalTable physical = catalogReader.getRootSchema().getTable( qualifiedTableName ).unwrap( PhysicalTable.class ); + PhysicalTable physical = snapshot.getPhysicalTable( currentPartitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML @@ -500,7 +502,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { pkPlacement.physicalSchemaName ), catalogTable.name + "_" + entry.getKey() ); - PhysicalTable physical = catalogReader.getRootSchema().getTable( qualifiedTableName ).unwrap( PhysicalTable.class ); + PhysicalTable physical = snapshot.getPhysicalTable( entry.getKey() ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML @@ -588,7 +590,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { pkPlacement.physicalSchemaName ), catalogTable.name + "_" + partitionId ); - PhysicalTable physical = catalogReader.getRootSchema().getTable( qualifiedTableName ).unwrap( PhysicalTable.class ); + PhysicalTable physical = snapshot.getPhysicalTable( partitionId ); // Build DML Modify adjustedModify; @@ -712,7 +714,7 @@ public AlgNode handleBatchIterator( AlgNode alg, Statement statement, LogicalQue @Override public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Integer adapterId ) { - PolyphenyDbCatalogReader reader = statement.getTransaction().getSnapshot(); + Snapshot snapshot = statement.getTransaction().getSnapshot(); LogicalCollection collection = alg.entity.unwrap( LogicalCollection.class ); @@ -730,7 +732,7 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, String collectionName = collection.name + "_" + placement.id; - PhysicalCollection document = reader.getRootSchema().getCollection( List.of( namespaceName, collectionName ) ).unwrap( PhysicalCollection.class ); + PhysicalCollection document = snapshot.getPhysicalCollection( placement.id ); if ( !adapter.getSupportedNamespaces().contains( NamespaceType.DOCUMENT ) ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, statement, placementId, queryInformation ) ); @@ -765,7 +767,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement ) { @Override public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ) { - PolyphenyDbCatalogReader reader = statement.getTransaction().getSnapshot(); + Snapshot snapshot = statement.getTransaction().getSnapshot(); List modifies = new ArrayList<>(); boolean usedSubstitution = false; @@ -775,7 +777,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Logical CatalogGraphPlacement graphPlacement = Catalog.getInstance().getGraphPlacement( catalogGraph.id, adapterId ); String name = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, catalogGraph.name, graphPlacement.physicalName ); - LogicalGraph graph = reader.getGraph( name ); + PhysicalGraph graph = snapshot.getPhysicalGraph( catalogGraph.id, adapterId ); if ( graph == null ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, adapterId, statement ) ); @@ -787,11 +789,10 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Logical throw new RuntimeException( "Graph is not modifiable." ); } - modifies.add( ((ModifiableGraph) graph).toModificationAlg( + modifies.add( ((ModifiableEntity) graph).toModificationAlg( alg.getCluster(), alg.getTraitSet(), graph, - statement.getTransaction().getSnapshot(), buildGraphDml( alg.getInput(), statement, adapterId ), alg.operation, alg.ids, diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 5de4ea676f..b6ef4847a3 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -184,7 +184,7 @@ public void addTables( Transaction transaction, List tableNames ) { updateCandidates.put( transaction.getXid(), id ); } } catch ( UnknownTableException e ) { - throw new RuntimeException( "Not possible to getTable to update which Tables were changed.", e ); + throw new RuntimeException( "Not possible to getLogicalTable to update which Tables were changed.", e ); } } } diff --git a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java index 656bd00917..c3202885d2 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java @@ -67,6 +67,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.Parser; @@ -149,14 +150,14 @@ private static void dropTestSchema() throws SQLException { private AlgBuilder createAlgBuilder() { - final PolyphenyDbSchema rootSchema = transaction.getSnapshot(); + final Snapshot snapshot = transaction.getSnapshot(); FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) - .defaultSchema( rootSchema ) - .traitDefs( (List) null ) + .defaultSchema( snapshot ) + .traitDefs( (List>) null ) .programs( Programs.heuristicJoinOrder( Programs.RULE_SET, true, 2 ) ) .prepareContext( new ContextImpl( - rootSchema, + snapshot, new SlimDataContext() { @Override public JavaTypeFactory getTypeFactory() { diff --git a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java index 3b366d8417..5bc31ca636 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java @@ -33,7 +33,7 @@ import org.polypheny.db.TestHelper.MongoConnection; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.excluded.CassandraExcluded; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index d74e1f4974..07fa2dd472 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -53,6 +53,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -524,13 +525,13 @@ private StatisticQueryResult prepareNode( QueryResult queryResult, NodeType node @Nullable private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { - PolyphenyDbCatalogReader reader = statement.getTransaction().getSnapshot(); + Snapshot snapshot = statement.getTransaction().getSnapshot(); AlgBuilder relBuilder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = relBuilder.getRexBuilder(); final AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, statement.getDataContext().getSnapshot() ); AlgNode queryNode; - LogicalRelScan tableScan = getLogicalScan( queryResult.getEntity().id, reader, cluster ); + LogicalRelScan tableScan = getLogicalScan( queryResult.getEntity().id, snapshot, cluster ); switch ( nodeType ) { case MIN: case MAX: @@ -555,8 +556,8 @@ private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { /** * Gets a tableScan for a given table. */ - private LogicalRelScan getLogicalScan( long tableId, CatalogReader reader, AlgOptCluster cluster ) { - return LogicalRelScan.create( cluster, reader.getRootSchema().getTable( tableId ) ); + private LogicalRelScan getLogicalScan( long tableId, Snapshot snapshot, AlgOptCluster cluster ) { + return LogicalRelScan.create( cluster, snapshot.getLogicalTable( tableId ) ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java index 08cc766591..03407f28dc 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailEntity.java @@ -32,6 +32,7 @@ import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -44,7 +45,6 @@ import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.vitrivr.cottontail.grpc.CottontailGrpc.EntityName; import org.vitrivr.cottontail.grpc.CottontailGrpc.From; @@ -117,7 +117,7 @@ public Modify toModificationAlg( AlgNode input, Operation operation, List updateColumnList, - List sourceExpressionList + List sourceExpressionList ) { this.cottontailSchema.getConvention().register( cluster.getPlanner() ); return new LogicalRelModify( @@ -131,8 +131,8 @@ public Modify toModificationAlg( @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, long entityId ) { - return new CottontailTableQueryable( dataContext, schema, this ); + public Queryable asQueryable( DataContext dataContext, Snapshot snapshot, long entityId ) { + return new CottontailTableQueryable( dataContext, snapshot, this ); } @@ -153,10 +153,10 @@ public Type getElementType() { } - private class CottontailTableQueryable extends AbstractTableQueryable { + private static class CottontailTableQueryable extends AbstractTableQueryable { - public CottontailTableQueryable( DataContext dataContext, PolyphenyDbSchema schema, PhysicalTable physicalTable ) { - super( dataContext, schema, null, physicalTable.name ); + public CottontailTableQueryable( DataContext dataContext, Snapshot snapshot, CottontailEntity physicalTable ) { + super( dataContext, snapshot, physicalTable ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java index 9fccdacf80..81b4ca0de7 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java @@ -28,6 +28,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.pf4j.Plugin; @@ -42,6 +43,7 @@ import org.polypheny.db.adapter.cottontail.util.CottontailTypeUtil; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -51,7 +53,6 @@ import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.PolyphenyHomeDirManager; @@ -203,13 +204,13 @@ public CottontailStore( int storeId, String uniqueName, Map sett @Override - public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { - this.currentSchema = CottontailSchema.create( id, rootSchema, name, this.wrapper, this ); + public void createNewSchema( Snapshot snapshot, String name, long id ) { + this.currentSchema = CottontailSchema.create( id, snapshot, name, this.wrapper, this ); } @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { return new CottontailEntity( this.currentSchema, this.dbName, @@ -226,7 +227,7 @@ public Namespace getCurrentSchema() { @Override - public void createTable( Context context, LogicalTable combinedTable, List partitionIds ) { + public PhysicalTable createPhysicalTable( Context context, LogicalTable combinedTable, AllocationTable allocationTable ) { /* Begin or continue Cottontail DB transaction. */ final long txId = this.wrapper.beginOrContinue( context.getStatement().getTransaction() ); @@ -234,40 +235,31 @@ public void createTable( Context context, LogicalTable combinedTable, List /* Prepare CREATE TABLE message. */ final List columns = this.buildColumnDefinitions( this.catalog.getColumnPlacementsOnAdapterPerTable( this.getAdapterId(), combinedTable.id ) ); - for ( long partitionId : partitionIds ) { - final String physicalTableName = CottontailNameUtil.createPhysicalTableName( combinedTable.id, partitionId ); - catalog.updatePartitionPlacementPhysicalNames( - getAdapterId(), - partitionId, - this.dbName, - physicalTableName ); - - final EntityName tableEntity = EntityName.newBuilder() - .setSchema( this.currentSchema.getCottontailSchema() ) - .setName( physicalTableName ) - .build(); - final EntityDefinition definition = EntityDefinition.newBuilder() - .setEntity( tableEntity ) - .addAllColumns( columns ) - .build(); + final String physicalTableName = CottontailNameUtil.createPhysicalTableName( combinedTable.id, allocationTable.id ); + catalog.updatePartitionPlacementPhysicalNames( + getAdapterId(), + allocationTable.id, + this.dbName, + physicalTableName ); - CreateEntityMessage createEntityMessage = CreateEntityMessage.newBuilder() - .setMetadata( Metadata.newBuilder().setTransactionId( txId ).build() ) - .setDefinition( definition ).build(); - boolean success = this.wrapper.createEntityBlocking( createEntityMessage ); - if ( !success ) { - throw new RuntimeException( "Unable to create table." ); - } + final EntityName tableEntity = EntityName.newBuilder() + .setSchema( this.currentSchema.getCottontailSchema() ) + .setName( physicalTableName ) + .build(); + final EntityDefinition definition = EntityDefinition.newBuilder() + .setEntity( tableEntity ) + .addAllColumns( columns ) + .build(); + CreateEntityMessage createEntityMessage = CreateEntityMessage.newBuilder() + .setMetadata( Metadata.newBuilder().setTransactionId( txId ).build() ) + .setDefinition( definition ).build(); + boolean success = this.wrapper.createEntityBlocking( createEntityMessage ); + if ( !success ) { + throw new RuntimeException( "Unable to create table." ); } - for ( CatalogColumnPlacement placement : this.catalog.getColumnPlacementsOnAdapterPerTable( this.getAdapterId(), combinedTable.id ) ) { - this.catalog.updateColumnPlacementPhysicalNames( - this.getAdapterId(), - placement.columnId, - this.dbName, - CottontailNameUtil.createPhysicalColumnName( placement.columnId ), - true ); - } + + return new PhysicalTable( allocationTable, physicalTableName, this.dbName, allocationTable.getColumnNames().keySet().stream().map( CottontailNameUtil::createPhysicalColumnName ).collect( Collectors.toList() ) ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java index 5e748733ab..ce3fe8a00b 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailSchema.java @@ -25,11 +25,10 @@ import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.cottontail.CottontailPlugin.CottontailStore; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.impl.AbstractNamespace; @@ -93,12 +92,12 @@ public CottontailSchema( public static CottontailSchema create( Long id, - SchemaPlus parentSchema, + Snapshot snapshot, String name, CottontailWrapper wrapper, CottontailStore cottontailStore ) { - final Expression expression = Schemas.subSchemaExpression( parentSchema.polyphenyDbSchema(), name, CottontailSchema.class ); + final Expression expression = Schemas.subSchemaExpression( snapshot, name, CottontailSchema.class ); final CottontailConvention convention = CottontailConvention.of( name, expression ); return new CottontailSchema( id, wrapper, convention, cottontailStore, name ); } @@ -129,13 +128,13 @@ public Namespace snapshot( SchemaVersion version ) { @Override - public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { - return Schemas.subSchemaExpression( parentSchema, name, CottontailSchema.class ); + public Expression getExpression( Snapshot snapshot, String name ) { + return Schemas.subSchemaExpression( snapshot, name, CottontailSchema.class ); } @Override - protected Map getTableMap() { + protected Map getTables() { return ImmutableMap.copyOf( this.tableMap ); } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailProject.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailProject.java index 792e1e5e8f..29e58cbf48 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailProject.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailProject.java @@ -90,7 +90,7 @@ public void implement( CottontailImplementContext context ) { context.visitChild( 0, getInput() ); } - final List fieldList = context.cottontailTable.getRowType( getCluster().getTypeFactory() ).getFieldList(); + final List fieldList = context.cottontailTable.getRowType().getFieldList(); final List physicalColumnNames = new ArrayList<>( fieldList.size() ); final List columnTypes = new ArrayList<>( fieldList.size() ); diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java index dc58cc9e66..26a27ae59f 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailScan.java @@ -56,7 +56,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public void implement( CottontailImplementContext context ) { -// context.from = From.newBuilder().setEntity( this.cottontailTable.getTable() ).build(); +// context.from = From.newBuilder().setEntity( this.cottontailTable.getLogicalTable() ).build(); if ( context.queryType == null ) { context.cottontailTable = this.cottontailTable; context.schemaName = this.cottontailTable.getPhysicalSchemaName(); diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java index b872b0850b..1447fba137 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailTableModify.java @@ -74,7 +74,7 @@ public CottontailTableModify( AlgNode input, Operation operation, List updateColumnList, - List sourceExpressionList, + List sourceExpressionList, boolean flattened ) { super( input.getCluster(), traitSet, table, input, operation, updateColumnList, sourceExpressionList, flattened ); this.cottontailTable = table.unwrap( CottontailEntity.class ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java index 722db6fe2a..fbc08a3fa6 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java @@ -40,12 +40,12 @@ import org.apache.calcite.linq4j.Enumerator; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.refactor.FilterableEntity; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.FilterableEntity; import org.polypheny.db.util.Source; @@ -59,8 +59,8 @@ public class CsvFilterableTable extends CsvTable implements FilterableEntity { /** * Creates a CsvFilterableTable. */ - public CsvFilterableTable( Source source, AlgProtoDataType protoRowType, List fieldTypes, int[] fields, CsvSource csvSource, Long tableId ) { - super( source, protoRowType, fieldTypes, fields, csvSource, tableId ); + public CsvFilterableTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { + super( source, table, fieldTypes, fields, csvSource ); } @@ -75,7 +75,7 @@ public Enumerable scan( DataContext dataContext, List filters final String[] filterValues = new String[fieldTypes.size()]; filters.removeIf( filter -> addFilter( filter, filterValues ) ); final AtomicBoolean cancelFlag = DataContext.Variable.CANCEL_FLAG.get( dataContext ); - return new AbstractEnumerable() { + return new AbstractEnumerable<>() { @Override public Enumerator enumerator() { return new CsvEnumerator<>( source, cancelFlag, false, filterValues, new CsvEnumerator.ArrayRowConverter( fieldTypes, fields ) ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvProjectScanRule.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvProjectScanRule.java index 5a12ed1da1..6bfae38732 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvProjectScanRule.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvProjectScanRule.java @@ -71,12 +71,13 @@ public void onMatch( AlgOptRuleCall call ) { final LogicalProject project = call.alg( 0 ); final CsvScan scan = call.alg( 1 ); int[] fields = getProjectFields( project.getProjects() ); - if ( fields == null ) { + if ( fields == null || scan.getEntity().unwrap( CsvTable.class ) == null ) { // Project contains expressions more complex than just field references. return; } + call.transformTo( - new CsvScan( scan.getCluster(), scan.getEntity(), scan.csvTable, fields ) ); + new CsvScan( scan.getCluster(), scan.getEntity().unwrap( CsvTable.class ), scan.csvTable, fields ) ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java index ce16f01cc1..712af41ace 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScan.java @@ -70,7 +70,7 @@ public class CsvScan extends RelScan implements EnumerableAlg { final int[] fields; - protected CsvScan( AlgOptCluster cluster, AlgOptEntity table, CsvTranslatableTable csvTable, int[] fields ) { + protected CsvScan( AlgOptCluster cluster, CsvTable table, CsvTranslatableTable csvTable, int[] fields ) { super( cluster, cluster.traitSetOf( EnumerableConvention.INSTANCE ), table ); this.csvTable = csvTable; this.fields = fields; @@ -82,7 +82,7 @@ protected CsvScan( AlgOptCluster cluster, AlgOptEntity table, CsvTranslatableTab @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { assert inputs.isEmpty(); - return new CsvScan( getCluster(), table, csvTable, fields ); + return new CsvScan( getCluster(), entity, csvTable, fields ); } @@ -94,7 +94,7 @@ public AlgWriter explainTerms( AlgWriter pw ) { @Override public AlgDataType deriveRowType() { - final List fieldList = table.getRowType().getFieldList(); + final List fieldList = entity.getRowType().getFieldList(); final AlgDataTypeFactory.Builder builder = getCluster().getTypeFactory().builder(); for ( int field : fields ) { builder.add( fieldList.get( field ) ); @@ -116,7 +116,7 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) // The "+ 2D" on top and bottom keeps the function fairly smooth. // // For example, if table has 3 fields, project has 1 field, then factor = (1 + 2) / (3 + 2) = 0.6 - return super.computeSelfCost( planner, mq ).multiplyBy( ((double) fields.length + 2D) / ((double) table.getRowType().getFieldCount() + 2D) ); + return super.computeSelfCost( planner, mq ).multiplyBy( ((double) fields.length + 2D) / ((double) entity.getRowType().getFieldCount() + 2D) ); } @@ -127,7 +127,7 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { /*if ( table instanceof JsonTable ) { return implementor.result( physType, Blocks.toBlock( Expressions.call( table.getExpression( JsonTable.class ), "enumerable" ) ) ); }*/ - return implementor.result( physType, Blocks.toBlock( Expressions.call( table.getExpression( CsvTranslatableTable.class ), "project", implementor.getRootExpression(), Expressions.constant( fields ) ) ) ); + return implementor.result( physType, Blocks.toBlock( Expressions.call( entity.asExpression( CsvTranslatableTable.class ), "project", implementor.getRootExpression(), Expressions.constant( fields ) ) ) ); } } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java index 7f512de60b..2cc31e1fdb 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java @@ -40,7 +40,8 @@ import org.apache.calcite.linq4j.Enumerator; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.schema.ScannableEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.util.Source; @@ -55,8 +56,8 @@ public class CsvScannableTable extends CsvTable implements ScannableEntity { /** * Creates a CsvScannableTable. */ - protected CsvScannableTable( Source source, AlgProtoDataType protoRowType, List fieldTypes, int[] fields, CsvSource csvSource, Long tableId ) { - super( source, protoRowType, fieldTypes, fields, csvSource, tableId ); + protected CsvScannableTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { + super( source, table, fieldTypes, fields, csvSource ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index de0311f1ca..131d020b33 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -40,16 +40,16 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import lombok.Getter; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; @@ -67,6 +67,7 @@ public class CsvSchema extends AbstractNamespace implements Schema { private final URL directoryUrl; private final CsvTable.Flavor flavor; + @Getter private final Map tableMap = new HashMap<>(); @@ -83,12 +84,12 @@ public CsvSchema( long id, URL directoryUrl, CsvTable.Flavor flavor ) { } - public Entity createCsvTable( LogicalTable catalogTable, List columnPlacementsOnStore, CsvSource csvSource, CatalogPartitionPlacement partitionPlacement ) { + public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable allocationTable, CsvSource csvSource ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List fieldTypes = new LinkedList<>(); - List fieldIds = new ArrayList<>( columnPlacementsOnStore.size() ); - for ( CatalogColumnPlacement placement : columnPlacementsOnStore ) { + List fieldIds = new ArrayList<>( allocationTable.placements.size() ); + for ( CatalogColumnPlacement placement : allocationTable.placements ) { CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); AlgDataType sqlType = sqlType( typeFactory, catalogColumn.type, catalogColumn.length, catalogColumn.scale, null ); fieldInfo.add( catalogColumn.name, placement.physicalColumnName, sqlType ).nullable( catalogColumn.nullable ); @@ -107,29 +108,23 @@ public Entity createCsvTable( LogicalTable catalogTable, List i ).toArray(); - CsvTable table = createTable( source, AlgDataTypeImpl.proto( fieldInfo.build() ), fieldTypes, fields, csvSource, catalogTable.id ); - tableMap.put( catalogTable.name + "_" + partitionPlacement.partitionId, table ); + CsvTable table = createTable( source, allocationTable, fieldTypes, fields, csvSource ); + tableMap.put( catalogTable.name + "_" + allocationTable.id, table ); return table; } - @Override - public Map getTableMap() { - return new HashMap<>( tableMap ); - } - - /** * Creates different subtype of table based on the "flavor" attribute. */ - private CsvTable createTable( Source source, AlgProtoDataType protoRowType, List fieldTypes, int[] fields, CsvSource csvSource, Long tableId ) { + private CsvTable createTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { switch ( flavor ) { case TRANSLATABLE: - return new CsvTranslatableTable( source, protoRowType, fieldTypes, fields, csvSource, tableId ); + return new CsvTranslatableTable( source, table, fieldTypes, fields, csvSource ); case SCANNABLE: - return new CsvScannableTable( source, protoRowType, fieldTypes, fields, csvSource, tableId ); + return new CsvScannableTable( source, table, fieldTypes, fields, csvSource ); case FILTERABLE: - return new CsvFilterableTable( source, protoRowType, fieldTypes, fields, csvSource, tableId ); + return new CsvFilterableTable( source, table, fieldTypes, fields, csvSource ); default: throw new AssertionError( "Unknown flavor " + this.flavor ); } @@ -218,7 +213,7 @@ private AlgDataType parseTypeString( AlgDataTypeFactory typeFactory, String type // // // @Override -// public Map getTableMap() { +// public Map getTables() { // if ( tableMap == null ) { // tableMap = createTableMap(); // } @@ -250,7 +245,7 @@ private AlgDataType parseTypeString( AlgDataTypeFactory typeFactory, String type // } // final Source sourceSansCsv = sourceSansGz.trim( ".csv" ); // -// final Table table = createTable( source ); +// final Table table = createPhysicalTable( source ); // builder.put( sourceSansCsv.relative( baseSource ).path(), table ); // } // return builder.build(); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index fabaed9465..fa3b984b1f 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -41,6 +41,7 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.csv.CsvTable.Flavor; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -49,7 +50,6 @@ import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Source; @@ -127,14 +127,14 @@ public T parseSetting( String key, Class clazz ) { @Override - public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { + public void createNewSchema( Snapshot snapshot, String name, long id ) { currentSchema = new CsvSchema( id, csvDir, Flavor.SCANNABLE ); } @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { - return currentSchema.createCsvTable( catalogTable, columnPlacementsOnStore, this, partitionPlacement ); + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + return currentSchema.createCsvTable( logical, allocationTable, this ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java index 009481afd2..f4c2441948 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java @@ -39,6 +39,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Entity.Table; import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.util.Source; @@ -47,10 +49,9 @@ /** * Base class for table that reads CSV files. */ -public abstract class CsvTable extends AbstractEntity implements Table { +public abstract class CsvTable extends PhysicalTable implements Table { protected final Source source; - protected final AlgProtoDataType protoRowType; protected List fieldTypes; protected final int[] fields; protected final CsvSource csvSource; @@ -59,17 +60,16 @@ public abstract class CsvTable extends AbstractEntity implements Table { /** * Creates a CsvTable. */ - CsvTable( Source source, AlgProtoDataType protoRowType, List fieldTypes, int[] fields, CsvSource csvSource, Long tableId ) { - super( tableId, null, null ); + CsvTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { + super( table ); this.source = source; - this.protoRowType = protoRowType; this.fieldTypes = fieldTypes; this.fields = fields; this.csvSource = csvSource; } - @Override + /*@Override public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { if ( protoRowType != null ) { return protoRowType.apply( typeFactory ); @@ -80,7 +80,7 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { } else { return CsvEnumerator.deduceRowType( (JavaTypeFactory) typeFactory, source, null ); } - } + }*/ /** diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java index 05a6c40bde..7813d0c10b 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java @@ -44,26 +44,29 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.Schemas; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.util.Source; /** * Table based on a CSV file. */ -public class CsvTranslatableTable extends CsvTable implements QueryableEntity, TranslatableEntity { +public class CsvTranslatableTable extends CsvTable implements TranslatableEntity { /** * Creates a CsvTable. */ - CsvTranslatableTable( Source source, AlgProtoDataType protoRowType, List fieldTypes, int[] fields, CsvSource csvSource, Long tableId ) { - super( source, protoRowType, fieldTypes, fields, csvSource, tableId ); + CsvTranslatableTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { + super( source, table, fieldTypes, fields, csvSource ); } @@ -80,7 +83,7 @@ public String toString() { public Enumerable project( final DataContext dataContext, final int[] fields ) { dataContext.getStatement().getTransaction().registerInvolvedAdapter( csvSource ); final AtomicBoolean cancelFlag = DataContext.Variable.CANCEL_FLAG.get( dataContext ); - return new AbstractEnumerable() { + return new AbstractEnumerable<>() { @Override public Enumerator enumerator() { return new CsvEnumerator<>( source, cancelFlag, fieldTypes, fields ); @@ -90,27 +93,9 @@ public Enumerator enumerator() { @Override - public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { - return Schemas.tableExpression( schema, getElementType(), tableName, clazz ); - } - - - @Override - public Type getElementType() { - return Object[].class; - } - - - @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - throw new UnsupportedOperationException(); - } - - - @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { // Request all fields. - return new CsvScan( context.getCluster(), algOptEntity, this, fields ); + return new CsvScan( context.getCluster(), this, this, fields ); } } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index c54c8caf19..d696f28d6d 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -49,6 +49,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.cypher.CypherNode; @@ -85,7 +86,7 @@ @Slf4j public class CypherToAlgConverter { - private final PolyphenyDbCatalogReader catalogReader; + private final Snapshot snapshot; private final AlgBuilder algBuilder; private final Statement statement; private final RexBuilder rexBuilder; @@ -93,7 +94,7 @@ public class CypherToAlgConverter { public CypherToAlgConverter( Statement statement, AlgBuilder builder, RexBuilder rexBuilder, AlgOptCluster cluster ) { - this.catalogReader = statement.getTransaction().getSnapshot(); + this.snapshot = statement.getTransaction().getSnapshot(); this.statement = statement; this.algBuilder = builder; this.rexBuilder = rexBuilder; @@ -120,7 +121,7 @@ public AlgRoot convert( CypherNode query, ExtendedQueryParameters parameters, Al throw new RuntimeException( "Used a unsupported query." ); } - CypherContext context = new CypherContext( query, graph, cluster, algBuilder, rexBuilder, catalogReader ); + CypherContext context = new CypherContext( query, graph, cluster, algBuilder, rexBuilder, snapshot ); convertQuery( query, context ); @@ -413,7 +414,7 @@ public static class CypherContext { public final AlgDataType edgeType; public final AlgDataType pathType; public final AlgDataType numberType; - public final PolyphenyDbCatalogReader catalogReader; + public final Snapshot snapshot; public final AlgDataTypeFactory typeFactory; public CypherNode active; public Kind kind; @@ -427,7 +428,7 @@ private CypherContext( AlgOptCluster cluster, AlgBuilder algBuilder, RexBuilder rexBuilder, - PolyphenyDbCatalogReader catalogReader ) { + Snapshot snapshot ) { this.original = original; this.graph = graph; this.cluster = cluster; @@ -440,7 +441,7 @@ private CypherContext( this.pathType = cluster.getTypeFactory().createPolyType( PolyType.PATH ); this.numberType = cluster.getTypeFactory().createPolyType( PolyType.INTEGER ); this.typeFactory = cluster.getTypeFactory(); - this.catalogReader = catalogReader; + this.snapshot = snapshot; } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java index bb13a7bc13..701b9af8db 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidEntity.java @@ -54,6 +54,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.interpreter.BindableConvention; import org.polypheny.db.languages.OperatorRegistry; @@ -65,7 +66,6 @@ import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ModelTraitDef; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlNode; @@ -242,11 +242,12 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); // ViewScan needed for Views - final LogicalRelScan scan = LogicalRelScan.create( cluster, algOptEntity ); - return DruidQuery.create( cluster, cluster.traitSetOf( BindableConvention.INSTANCE ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptEntity, this, ImmutableList.of( scan ) ); + //final LogicalRelScan scan = LogicalRelScan.create( cluster, dr ); + //return DruidQuery.create( cluster, cluster.traitSetOf( BindableConvention.INSTANCE ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptEntity, this, ImmutableList.of( scan ) ); + return null; } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java index b475575099..e0d06e0ba5 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidQuery.java @@ -167,7 +167,7 @@ public class DruidQuery extends AbstractAlgNode implements BindableAlg { .build(); protected QuerySpec querySpec; - final AlgOptEntity table; + final CatalogEntity table; final DruidEntity druidTable; final ImmutableList intervals; final ImmutableList algs; @@ -194,7 +194,7 @@ public class DruidQuery extends AbstractAlgNode implements BindableAlg { * @param algs Internal relational expressions * @param converterOperatorMap mapping of Polypheny-DB Sql Operator to Druid Expression API. */ - protected DruidQuery( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidEntity druidTable, List intervals, List algs, Map converterOperatorMap ) { + protected DruidQuery( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, DruidEntity druidTable, List intervals, List algs, Map converterOperatorMap ) { super( cluster, traitSet ); this.table = table; this.druidTable = druidTable; @@ -216,7 +216,7 @@ static boolean isValidSignature( String signature ) { /** * Creates a DruidQuery. */ - public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidEntity druidTable, List algs ) { + public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, DruidEntity druidTable, List algs ) { final ImmutableMap.Builder mapBuilder = ImmutableMap.builder(); for ( DruidSqlOperatorConverter converter : DEFAULT_OPERATORS_LIST ) { mapBuilder.put( converter.polyphenyDbOperator(), converter ); @@ -228,7 +228,7 @@ public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, Al /** * Creates a DruidQuery. */ - public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidEntity druidTable, List algs, Map converterOperatorMap ) { + public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, DruidEntity druidTable, List algs, Map converterOperatorMap ) { return create( cluster, traitSet, table, druidTable, druidTable.intervals, algs, converterOperatorMap ); } @@ -236,7 +236,7 @@ public static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, Al /** * Creates a DruidQuery. */ - private static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, DruidEntity druidTable, List intervals, List algs, Map converterOperatorMap ) { + private static DruidQuery create( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, DruidEntity druidTable, List intervals, List algs, Map converterOperatorMap ) { return new DruidQuery( cluster, traitSet, table, druidTable, intervals, algs, converterOperatorMap ); } @@ -541,7 +541,7 @@ public AlgWriter explainTerms( AlgWriter pw ) { for ( AlgNode alg : algs ) { if ( alg instanceof RelScan ) { RelScan scan = (RelScan) alg; - pw.item( "table", scan.getEntity().getCatalogEntity().id ); + pw.item( "table", scan.getEntity().id ); pw.item( "intervals", intervals ); } else if ( alg instanceof Filter ) { pw.item( "filter", ((Filter) alg).getCondition() ); @@ -626,7 +626,7 @@ public void register( AlgOptPlanner planner ) { @Override public String algCompareString() { return this.getClass().getSimpleName() + "$" + - "." + table.getCatalogEntity().id + "$" + + "." + table.id + "$" + (algs != null ? algs.stream().map( AlgNode::algCompareString ).collect( Collectors.joining( "$" ) ) : "") + "&"; } diff --git a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidSchema.java b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidSchema.java index 47c646ffb2..e9163dca96 100644 --- a/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidSchema.java +++ b/plugins/druid-adapter/src/main/java/org/polypheny/db/adapter/druid/DruidSchema.java @@ -46,6 +46,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; @@ -60,7 +61,7 @@ public class DruidSchema extends AbstractNamespace implements Schema { final String url; final String coordinatorUrl; private final boolean discoverTables; - private Map tableMap = null; + private Map tableMap = null; /** @@ -79,7 +80,7 @@ public DruidSchema( long id, String url, String coordinatorUrl, boolean discover @Override - protected Map getTableMap() { + protected Map getTables() { if ( !discoverTables ) { return ImmutableMap.of(); } @@ -95,14 +96,15 @@ protected Map getTableMap() { } - private Entity table( String tableName, DruidConnectionImpl connection ) { + private CatalogEntity table( String tableName, DruidConnectionImpl connection ) { final Map fieldMap = new LinkedHashMap<>(); final Set metricNameSet = new LinkedHashSet<>(); final Map> complexMetrics = new HashMap<>(); connection.metadata( tableName, DruidEntity.DEFAULT_TIMESTAMP_COLUMN, null, fieldMap, metricNameSet, complexMetrics ); - return DruidEntity.create( DruidSchema.this, tableName, null, fieldMap, metricNameSet, DruidEntity.DEFAULT_TIMESTAMP_COLUMN, complexMetrics ); + //return DruidEntity.create( DruidSchema.this, tableName, null, fieldMap, metricNameSet, DruidEntity.DEFAULT_TIMESTAMP_COLUMN, complexMetrics ); + return null; } } diff --git a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java index b87c9b1e1f..43731d6073 100644 --- a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java +++ b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT.java @@ -3117,7 +3117,7 @@ public class DruidAdapterIT { // @Test // public void testTableMapReused() { // AbstractSchema schema = new DruidSchema( "http://localhost:8082", "http://localhost:8081", true ); -// assertSame( schema.getTable( "wikiticker" ), schema.getTable( "wikiticker" ) ); +// assertSame( schema.getLogicalTable( "wikiticker" ), schema.getLogicalTable( "wikiticker" ) ); // } // // diff --git a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java index 8ccfec8940..6d359e887c 100644 --- a/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java +++ b/plugins/druid-adapter/src/test/java/org/polypheny/db/test/DruidAdapterIT2.java @@ -2717,7 +2717,7 @@ public class DruidAdapterIT2 { // @Test // public void testTableMapReused() { // AbstractSchema schema = new DruidSchema( "http://localhost:8082", "http://localhost:8081", true ); -// assertSame( schema.getTable( "wikiticker" ), schema.getTable( "wikiticker" ) ); +// assertSame( schema.getLogicalTable( "wikiticker" ), schema.getLogicalTable( "wikiticker" ) ); // } // // diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java index 6bc6cd75f2..7b9cb89dee 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchEntity.java @@ -41,9 +41,11 @@ import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.io.UncheckedIOException; +import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; @@ -53,33 +55,33 @@ import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.apache.calcite.linq4j.AbstractQueryable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Linq4j; +import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.function.Function1; +import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.ModelTraitDef; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableEntity; -import org.polypheny.db.schema.impl.AbstractTableQueryable; -import org.polypheny.db.type.PolyType; /** * Table based on an Elasticsearch type. */ -public class ElasticsearchEntity extends AbstractQueryableEntity implements TranslatableEntity { +@Slf4j +public class ElasticsearchEntity extends PhysicalTable implements TranslatableEntity, QueryableEntity { /** * Used for constructing (possibly nested) Elastic aggregation nodes. @@ -97,7 +99,7 @@ public class ElasticsearchEntity extends AbstractQueryableEntity implements Tran * Creates an ElasticsearchTable. */ ElasticsearchEntity( ElasticsearchTransport transport, Long id, Long partitionId, Long adapterId ) { - super( Object[].class, id, partitionId, adapterId ); + super( null ); this.transport = Objects.requireNonNull( transport, "transport" ); this.version = transport.version; this.indexName = transport.indexName; @@ -313,14 +315,14 @@ public void accept( JsonNode node ) { } - @Override + /*@Override public AlgDataType getRowType( AlgDataTypeFactory algDataTypeFactory ) { final AlgDataType mapType = algDataTypeFactory.createMapType( algDataTypeFactory.createPolyType( PolyType.VARCHAR ), algDataTypeFactory.createTypeWithNullability( algDataTypeFactory.createPolyType( PolyType.ANY ), true ) ); // TODO (PCP) return algDataTypeFactory.builder().add( "_MAP", null, mapType ).build(); - } + }*/ @Override @@ -330,15 +332,22 @@ public String toString() { @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - return new ElasticsearchQueryable<>( dataContext, schema, this, tableName ); + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { + final AlgOptCluster cluster = context.getCluster(); + // return new ElasticsearchScan( cluster, cluster.traitSetOf( ElasticsearchRel.CONVENTION ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptEntity, this, null ); + return null; } @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { - final AlgOptCluster cluster = context.getCluster(); - return new ElasticsearchScan( cluster, cluster.traitSetOf( ElasticsearchRel.CONVENTION ).replace( traitSet.getTrait( ModelTraitDef.INSTANCE ) ), algOptEntity, this, null ); + public Queryable asQueryable( DataContext dataContext, Snapshot snapshot, long id ) { + return new ElasticsearchQueryable<>( dataContext, snapshot, this, id ); + } + + + @Override + public Type getElementType() { + return Object[].class; } @@ -347,10 +356,15 @@ public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitS * * @param element type */ - public static class ElasticsearchQueryable extends AbstractTableQueryable { + public static class ElasticsearchQueryable extends AbstractQueryable { + + @Getter + private final ElasticsearchEntity table; - ElasticsearchQueryable( DataContext dataContext, SchemaPlus schema, ElasticsearchEntity table, String tableName ) { - super( dataContext, schema, table, tableName ); + + ElasticsearchQueryable( DataContext dataContext, Snapshot snapshot, ElasticsearchEntity elasticsearchEntity, long id ) { + this.table = elasticsearchEntity; + //super( dataContext, snapshot, table, tableName ); } @@ -360,11 +374,6 @@ public Enumerator enumerator() { } - private ElasticsearchEntity getTable() { - return (ElasticsearchEntity) table; - } - - /** * Called via code-generation. * @@ -390,6 +399,30 @@ public Enumerable find( } } + + @Override + public Type getElementType() { + return null; + } + + + @Override + public Expression getExpression() { + return null; + } + + + @Override + public QueryProvider getProvider() { + return null; + } + + + @Override + public Iterator iterator() { + return null; + } + } } diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java index ed44e80bd0..bb94e6cf11 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchRel.java @@ -41,6 +41,7 @@ import java.util.Objects; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; import org.polypheny.db.util.Pair; @@ -107,7 +108,7 @@ class Implementor { */ Long fetch; - AlgOptEntity table; + CatalogEntity table; ElasticsearchEntity elasticsearchTable; diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java index 6b1f71b410..95cf655bd8 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchScan.java @@ -41,6 +41,7 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.rules.AggregateExpandDistinctAggregatesRule; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptEntity; @@ -70,7 +71,7 @@ public class ElasticsearchScan extends RelScan implements ElasticsearchRel { * @param elasticsearchTable Elasticsearch table * @param projectRowType Fields and types to project; null to project raw row */ - ElasticsearchScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, ElasticsearchEntity elasticsearchTable, AlgDataType projectRowType ) { + ElasticsearchScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, ElasticsearchEntity elasticsearchTable, AlgDataType projectRowType ) { super( cluster, traitSet, table ); this.elasticsearchTable = Objects.requireNonNull( elasticsearchTable, "elasticsearchTable" ); this.projectRowType = projectRowType; @@ -114,7 +115,7 @@ public void register( AlgOptPlanner planner ) { @Override public void implement( Implementor implementor ) { implementor.elasticsearchTable = elasticsearchTable; - implementor.table = table; + implementor.table = entity; } } diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java index e1a1b51574..f1136f8917 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchSchema.java @@ -50,6 +50,7 @@ import java.util.Set; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; @@ -68,7 +69,7 @@ public class ElasticsearchSchema extends AbstractNamespace implements Schema { private final ObjectMapper mapper; - private final Map tableMap; + private final Map tableMap; /** * Default batch size to be used during scrolling. @@ -115,16 +116,16 @@ public ElasticsearchSchema( long id, RestClient client, ObjectMapper mapper, Str @Override - protected Map getTableMap() { + protected Map getTables() { return tableMap; } - private Map createTables( Iterable types, Long id, Long partitionId, Long adapterId ) { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + private Map createTables( Iterable types, Long id, Long partitionId, Long adapterId ) { + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( String type : types ) { final ElasticsearchTransport transport = new ElasticsearchTransport( client, mapper, index, type, fetchSize ); - builder.put( type, new ElasticsearchEntity( transport, id, partitionId, adapterId ) ); + //builder.put( type, new ElasticsearchEntity( transport, id, partitionId, adapterId ) ); } return builder.build(); } diff --git a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchToEnumerableConverter.java b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchToEnumerableConverter.java index b4d597d602..7c6e1034af 100644 --- a/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchToEnumerableConverter.java +++ b/plugins/elasticsearch-adapter/src/main/java/org/polypheny/db/adapter/elasticsearch/ElasticsearchToEnumerableConverter.java @@ -107,7 +107,7 @@ public int size() { } } ), Pair.class ) ); - final Expression table = block.append( "table", implementor.table.getExpression( ElasticsearchEntity.ElasticsearchQueryable.class ) ); + final Expression table = block.append( "table", implementor.table.asExpression( ElasticsearchEntity.ElasticsearchQueryable.class ) ); final Expression ops = block.append( "ops", Expressions.constant( implementor.list ) ); final Expression sort = block.append( "sort", constantArrayList( implementor.sort, Pair.class ) ); final Expression groupBy = block.append( "groupBy", Expressions.constant( implementor.groupBy ) ); diff --git a/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/ScrollingTest.java b/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/ScrollingTest.java index 20d1951e8f..14b8eda6dc 100644 --- a/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/ScrollingTest.java +++ b/plugins/elasticsearch-adapter/src/test/java/org/polypheny/db/adapter/elasticsearch/ScrollingTest.java @@ -100,7 +100,7 @@ public class ScrollingTest { // // get node stats // final Response response = NODE.restClient().performRequest( "GET", "/_nodes/stats/indices/search" ); // -// try ( InputStream is = response.getTable().getContent() ) { +// try ( InputStream is = response.getLogicalTable().getContent() ) { // final ObjectNode node = NODE.mapper().readValue( is, ObjectNode.class ); // final String path = "/indices/search/scroll_current"; // final JsonNode scrollCurrent = node.with( "nodes" ).elements().next().at( path ); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileAlg.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileAlg.java index a928ca560e..66fbd88fb6 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileAlg.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileAlg.java @@ -43,6 +43,7 @@ public enum Operation { @Getter + @Setter private transient FileTranslatableEntity fileTable; @Getter private final List columnNames = new ArrayList<>(); @@ -68,20 +69,11 @@ public FileImplementor() { //intentionally empty } - - public void setFileTable( final FileTranslatableEntity fileTable ) { - this.fileTable = fileTable; - this.columnNames.clear(); - this.columnNames.addAll( fileTable.getColumnNames() ); - } - - public void setColumnNames( final List columnNames ) { this.columnNames.clear(); this.columnNames.addAll( columnNames ); } - /** * A FileProject can directly provide the projectionMapping, a FileModify will provide the columnNames only */ @@ -105,7 +97,7 @@ else if ( operation == Operation.UPDATE ) { int i = 0; List mapping = new ArrayList<>(); for ( Value update : updates ) { - int index = getFileTable().getColumnNames().indexOf( columnNames.get( i ) ); + int index = new ArrayList<>( getFileTable().getColumnNames().values() ).indexOf( columnNames.get( i ) ); update.setColumnReference( index ); mapping.add( index ); i++; @@ -146,7 +138,7 @@ public Integer[] getProjectionMapping() { if ( ithProject.contains( "." ) ) { ithProject = ithProject.substring( ithProject.lastIndexOf( "." ) + 1 ); } - int indexOf = columnNames.indexOf( ithProject ); + int indexOf = new ArrayList<>( fileTable.getColumnNames().values() ).indexOf( ithProject ); if ( indexOf == -1 ) { throw new RuntimeException( "Could not perform the projection." ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java index cdb8dfc024..c18f55bad6 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java @@ -48,6 +48,7 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -63,7 +64,6 @@ import org.polypheny.db.information.InformationManager; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.PolyphenyHomeDirManager; @@ -175,17 +175,17 @@ private void setInformationPage() { @Override - public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { + public void createNewSchema( Snapshot snapshot, String name, long id ) { // it might be worth it to check why createNewSchema is called multiple times with different names if ( currentSchema == null ) { - currentSchema = new FileStoreSchema( id, rootSchema, name, this ); + currentSchema = new FileStoreSchema( id, snapshot, name, this ); } } @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { - return currentSchema.createFileTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + return currentSchema.createFileTable( logical, allocationTable ); } @@ -196,7 +196,7 @@ public Namespace getCurrentSchema() { @Override - public void createTable( Context context, LogicalTable catalogTable, List partitionIds ) { + public PhysicalTable createPhysicalTable( Context context, LogicalTable catalogTable, AllocationTable allocationTable ) { context.getStatement().getTransaction().registerInvolvedAdapter( this ); for ( long partitionId : partitionIds ) { @@ -457,8 +457,8 @@ public void truncate( Context context, LogicalTable table ) { for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( getAdapterId(), table.id ) ) { FileTranslatableEntity fileTable = (FileTranslatableEntity) currentSchema.getEntity( table.name + "_" + partitionPlacement.partitionId ); try { - for ( String colName : fileTable.getColumnNames() ) { - File columnFolder = getColumnFolder( fileTable.getColumnIdMap().get( colName ), fileTable.getPartitionId() ); + for ( long id : fileTable.columnIds ) { + File columnFolder = getColumnFolder( id, fileTable.allocation.id ); FileUtils.cleanDirectory( columnFolder ); } } catch ( IOException e ) { diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java index e89e300667..a931bab367 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java @@ -37,11 +37,15 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.SchemaPlus; @@ -55,18 +59,19 @@ public class FileStoreSchema extends AbstractNamespace implements FileSchema, Sc @Getter private final String schemaName; - private final Map tableMap = new HashMap<>(); + @Getter + private final Map tables = new HashMap<>(); @Getter private final FileStore store; @Getter private final FileConvention convention; - public FileStoreSchema( long id, SchemaPlus parentSchema, String schemaName, FileStore store ) { + public FileStoreSchema( long id, Snapshot snapshot, String schemaName, FileStore store ) { super( id ); this.schemaName = schemaName; this.store = store; - final Expression expression = Schemas.subSchemaExpression( parentSchema, schemaName, FileStoreSchema.class ); + final Expression expression = Schemas.subSchemaExpression( snapshot, schemaName, FileStoreSchema.class ); this.convention = new FileConvention( schemaName, expression, this ); } @@ -83,44 +88,9 @@ public int getAdapterId() { } - @Override - protected Map getTableMap() { - return new HashMap<>( tableMap ); - } - - - public Entity createFileTable( + public PhysicalTable createFileTable( LogicalTable catalogTable, - List columnPlacementsOnStore, - CatalogPartitionPlacement partitionPlacement ) { - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - ArrayList columnIds = new ArrayList<>(); - ArrayList columnTypes = new ArrayList<>(); - ArrayList columnNames = new ArrayList<>(); - columnPlacementsOnStore.sort( Comparator.comparingLong( p -> p.columnId ) ); - for ( CatalogColumnPlacement p : columnPlacementsOnStore ) { - CatalogColumn catalogColumn; - catalogColumn = Catalog.getInstance().getColumn( p.columnId ); - if ( p.adapterId == store.getAdapterId() ) { - columnIds.add( p.columnId ); - if ( catalogColumn.collectionsType != null ) { - columnTypes.add( PolyType.ARRAY ); - } else { - columnTypes.add( catalogColumn.type ); - } - columnNames.add( catalogColumn.name ); - - if ( catalogColumn.type.allowsScale() && catalogColumn.length != null && catalogColumn.scale != null ) { - fieldInfo.add( catalogColumn.name, p.physicalColumnName, catalogColumn.type, catalogColumn.length, catalogColumn.scale ).nullable( catalogColumn.nullable ); - } else if ( catalogColumn.type.allowsPrec() && catalogColumn.length != null ) { - fieldInfo.add( catalogColumn.name, p.physicalColumnName, catalogColumn.type, catalogColumn.length ).nullable( catalogColumn.nullable ); - } else { - fieldInfo.add( catalogColumn.name, p.physicalColumnName, catalogColumn.type ).nullable( catalogColumn.nullable ); - } - } - } - AlgProtoDataType protoRowType = AlgDataTypeImpl.proto( fieldInfo.build() ); + AllocationTable allocationTable ) { List pkIds; if ( catalogTable.primaryKey != null ) { CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( catalogTable.primaryKey ); @@ -128,19 +98,11 @@ public Entity createFileTable( } else { pkIds = new ArrayList<>(); } - // FileTable table = new FileTable( store.getRootDir(), schemaName, catalogEntity.id, columnIds, columnTypes, columnNames, store, this ); FileTranslatableEntity table = new FileTranslatableEntity( this, - catalogTable.name + "_" + partitionPlacement.partitionId, - catalogTable.id, - partitionPlacement.partitionId, - getAdapterId(), - columnIds, - columnTypes, - columnNames, - pkIds, - protoRowType ); - tableMap.put( catalogTable.name + "_" + partitionPlacement.partitionId, table ); + allocationTable, + pkIds ); + tables.put( catalogTable.name + "_" + allocationTable.id, table ); return table; } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java index e9bc3e4043..69590ba0a0 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java @@ -22,145 +22,91 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; import lombok.Getter; -import org.apache.calcite.linq4j.Enumerator; -import org.apache.calcite.linq4j.Queryable; -import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.file.algebra.FileScan; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableEntity; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableEntity; -import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.type.PolyType; -public class FileTranslatableEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity { +public class FileTranslatableEntity extends PhysicalTable implements TranslatableEntity, ModifiableEntity { private final File rootDir; @Getter - private final String tableName; - @Getter - private final Long partitionId; - @Getter - private final List columnNames; + private final Map columnNames; + @Getter - private final Map columnIdMap; + private final Map columnNamesIds; @Getter - private final Map columnTypeMap; + private final Map columnTypeMap; @Getter private final List pkIds; // Ids of the columns that are part of the primary key @Getter private final Long adapterId; @Getter private final FileSchema fileSchema; - private final AlgProtoDataType protoRowType; + public final AllocationTable allocation; public FileTranslatableEntity( final FileSchema fileSchema, - final String tableName, - final Long tableId, - final long partitionId, - long adapterId, final List columnIds, - final ArrayList columnTypes, - final List columnNames, - final List pkIds, - final AlgProtoDataType protoRowType ) { - super( Object[].class, tableId, partitionId, adapterId ); + final AllocationTable allocationTable, + final List pkIds ) { + super( allocationTable ); this.fileSchema = fileSchema; this.rootDir = fileSchema.getRootDir(); - this.tableName = tableName; - this.partitionId = partitionId; this.adapterId = (long) fileSchema.getAdapterId(); this.pkIds = pkIds; - this.protoRowType = protoRowType; - - this.columnNames = columnNames; - this.columnIdMap = new HashMap<>(); - this.columnTypeMap = new HashMap<>(); - int i = 0; - for ( String columnName : columnNames ) { - this.columnIdMap.put( columnName, columnIds.get( i ) ); - this.columnTypeMap.put( columnName, columnTypes.get( i ) ); - i++; - } - } - + this.allocation = allocationTable; - @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { - fileSchema.getConvention().register( context.getCluster().getPlanner() ); - return new FileScan( context.getCluster(), algOptEntity, this ); + this.columnNames = allocationTable.getColumnNames(); + this.columnNamesIds = allocationTable.getColumnNamesIds(); + this.columnTypeMap = allocationTable.getColumns().entrySet().stream().collect( Collectors.toMap( Entry::getKey, a -> a.getValue().type ) ); } @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return protoRowType.apply( typeFactory ); + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { + fileSchema.getConvention().register( context.getCluster().getPlanner() ); + return new FileScan( context.getCluster(), allocation, this ); } @Override - public RelModify toModificationAlg( + public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptEntity table, - CatalogReader catalogReader, + AlgTraitSet traits, + CatalogEntity entity, AlgNode child, Operation operation, List updateColumnList, - List sourceExpressionList, - boolean flattened ) { + List sourceExpressionList + ) { fileSchema.getConvention().register( cluster.getPlanner() ); return new LogicalRelModify( cluster, cluster.traitSetOf( Convention.NONE ), - table, - catalogReader, + entity, child, operation, updateColumnList, sourceExpressionList, - flattened ); - } - - - @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - throw new UnsupportedOperationException(); - //System.out.println("as Queryable"); - //fileSchema.getConvention().register( dataContext.getStatement().getQueryProcessor().getPlanner() ); - //return new FileQueryable<>( dataContext, schema, this, tableName ); + true ); } - public class FileQueryable extends AbstractTableQueryable { - - public FileQueryable( DataContext dataContext, SchemaPlus schema, FileTranslatableEntity table, String tableName ) { - super( dataContext, schema, FileTranslatableEntity.this, tableName ); - } - - - @Override - public Enumerator enumerator() { - throw new RuntimeException( "FileQueryable enumerator not yet implemented" ); - } - - } - } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/Value.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/Value.java index 58b500341b..2b0338031f 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/Value.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/Value.java @@ -114,14 +114,14 @@ public static List getUpdates( final List exps, FileImplementor List valueList = new ArrayList<>(); int offset; boolean noCheck; - if ( exps.size() == implementor.getColumnNames().size() ) { + if ( exps.size() == implementor.getFileTable().columnIds.size() ) { noCheck = true; offset = 0; } else { noCheck = false; - offset = implementor.getColumnNames().size(); + offset = implementor.getFileTable().columnIds.size(); } - for ( int i = offset; i < implementor.getColumnNames().size() + offset; i++ ) { + for ( int i = offset; i < implementor.getFileTable().columnIds.size() + offset; i++ ) { if ( noCheck || exps.size() > i ) { RexNode lit = exps.get( i ); if ( lit instanceof RexLiteral ) { diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java index 0a48d8f515..602daa974e 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileRules.java @@ -32,10 +32,10 @@ import org.polypheny.db.algebra.convert.ConverterRule; import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.Filter; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.nodes.Function; import org.polypheny.db.nodes.Operator; @@ -107,19 +107,23 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { - final RelModify modify = (RelModify) alg; + final RelModify modify = (RelModify) alg; final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { log.warn( "Returning null during conversion" ); return null; } + if ( modify.getEntity().unwrap( FileTranslatableEntity.class ) == null ) { + log.warn( "Returning null during conversion" ); + return null; + } + final AlgTraitSet traitSet = modify.getTraitSet().replace( convention ); return new FileTableModify( modify.getCluster(), traitSet, - modify.getEntity(), - modify.getCatalogReader(), + modify.getEntity().unwrap( FileTranslatableEntity.class ), AlgOptRule.convert( modify.getInput(), traitSet ), modify.getOperation(), modify.getUpdateColumnList(), diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java index dc13f25d78..07d4d5d220 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileScan.java @@ -25,9 +25,9 @@ import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.ModelTrait; @@ -38,7 +38,7 @@ public class FileScan extends RelScan implements FileAlg { private final FileTranslatableEntity fileTable; - public FileScan( AlgOptCluster cluster, AlgOptEntity table, FileTranslatableEntity fileTable ) { + public FileScan( AlgOptCluster cluster, CatalogEntity table, FileTranslatableEntity fileTable ) { //convention was: EnumerableConvention.INSTANCE super( cluster, cluster.traitSetOf( fileTable.getFileSchema().getConvention() ).replace( ModelTrait.RELATIONAL ), table ); this.fileTable = fileTable; @@ -47,13 +47,13 @@ public FileScan( AlgOptCluster cluster, AlgOptEntity table, FileTranslatableEnti @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new FileScan( getCluster(), table, fileTable ); + return new FileScan( getCluster(), entity, fileTable ); } @Override public AlgDataType deriveRowType() { - return fileTable.getRowType( getCluster().getTypeFactory() ); + return fileTable.getRowType(); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java index d422474928..8ca438490f 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileTableModify.java @@ -26,6 +26,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptEntity; @@ -39,10 +40,10 @@ import org.polypheny.db.type.PolyType; -public class FileTableModify extends RelModify implements FileAlg { +public class FileTableModify extends RelModify implements FileAlg { - public FileTableModify( AlgOptCluster cluster, AlgTraitSet traits, AlgOptEntity table, CatalogReader catalogReader, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { - super( cluster, traits, table, catalogReader, child, operation, updateColumnList, sourceExpressionList, flattened ); + public FileTableModify( AlgOptCluster cluster, AlgTraitSet traits, FileTranslatableEntity table, AlgNode child, Operation operation, List updateColumnList, List sourceExpressionList, boolean flattened ) { + super( cluster, traits, table, child, operation, updateColumnList, sourceExpressionList, flattened ); } @@ -57,8 +58,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new FileTableModify( getCluster(), traitSet, - getEntity(), - getCatalogReader(), + entity, AbstractAlgNode.sole( inputs ), getOperation(), getUpdateColumnList(), @@ -77,8 +77,8 @@ public void register( AlgOptPlanner planner ) { public void implement( final FileImplementor implementor ) { setOperation( implementor );//do it first, so children know that we have an insert/update/delete implementor.visitChild( 0, getInput() ); - FileTranslatableEntity fileTable = (FileTranslatableEntity) getEntity().getEntity(); - implementor.setFileTable( fileTable ); + + implementor.setFileTable( entity ); if ( getOperation() == Operation.UPDATE ) { if ( getSourceExpressionList() != null ) { if ( implementor.getUpdates() == null ) { @@ -89,9 +89,9 @@ public void implement( final FileImplementor implementor ) { int i = 0; for ( RexNode src : getSourceExpressionList() ) { if ( src instanceof RexLiteral ) { - values.add( new Value( implementor.getFileTable().getColumnIdMap().get( getUpdateColumnList().get( i ) ).intValue(), ((RexLiteral) src).getValueForFileCondition(), false ) ); + values.add( new Value( implementor.getFileTable().getColumnNamesIds().get( getUpdateColumnList().get( i ) ).intValue(), ((RexLiteral) src).getValueForFileCondition(), false ) ); } else if ( src instanceof RexDynamicParam ) { - values.add( new Value( implementor.getFileTable().getColumnIdMap().get( getUpdateColumnList().get( i ) ).intValue(), ((RexDynamicParam) src).getIndex(), true ) ); + values.add( new Value( implementor.getFileTable().getColumnNamesIds().get( getUpdateColumnList().get( i ) ).intValue(), ((RexDynamicParam) src).getIndex(), true ) ); } else if ( src instanceof RexCall && src.getType().getPolyType() == PolyType.ARRAY ) { values.add( Value.fromArrayRexCall( (RexCall) src ) ); } else { diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileToEnumerableConverter.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileToEnumerableConverter.java index 2ddf88f59a..8ae33d9f7f 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileToEnumerableConverter.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/algebra/FileToEnumerableConverter.java @@ -83,9 +83,9 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { ArrayList columnIds = new ArrayList<>(); ArrayList columnTypes = new ArrayList<>(); - for ( String colName : fileImplementor.getColumnNames() ) { - columnIds.add( Expressions.constant( fileImplementor.getFileTable().getColumnIdMap().get( colName ), Long.class ) ); - columnTypes.add( Expressions.constant( fileImplementor.getFileTable().getColumnTypeMap().get( colName ), PolyType.class ) ); + for ( long id : fileImplementor.getFileTable().columnIds ) { + columnIds.add( Expressions.constant( id, Long.class ) ); + columnTypes.add( Expressions.constant( fileImplementor.getFileTable().getColumnTypeMap().get( id ), PolyType.class ) ); } Expression _insertValues = Expressions.constant( null ); @@ -120,7 +120,7 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { enumeratorMethod, Expressions.constant( fileImplementor.getOperation() ), Expressions.constant( fileImplementor.getFileTable().getAdapterId() ), - Expressions.constant( fileImplementor.getFileTable().getPartitionId() ), + Expressions.constant( fileImplementor.getFileTable().allocation.id ), DataContext.ROOT, Expressions.constant( fileSchema.getRootDir().getAbsolutePath() ), Expressions.newArrayInit( Long.class, columnIds.toArray( new Expression[0] ) ), @@ -137,7 +137,7 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { FileMethod.EXECUTE_MODIFY.method, Expressions.constant( fileImplementor.getOperation() ), Expressions.constant( fileImplementor.getFileTable().getAdapterId() ), - Expressions.constant( fileImplementor.getFileTable().getPartitionId() ), + Expressions.constant( fileImplementor.getFileTable().allocation.id ), DataContext.ROOT, Expressions.constant( fileSchema.getRootDir().getAbsolutePath() ), Expressions.newArrayInit( Long.class, columnIds.toArray( new Expression[0] ) ), diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java index ebea9ee43d..ba9daf10e1 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java @@ -36,6 +36,7 @@ import org.polypheny.db.adapter.Adapter.AdapterSettingString; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; @@ -45,7 +46,6 @@ import org.polypheny.db.information.InformationText; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.PolyphenyHomeDirManager; @@ -83,14 +83,14 @@ private void init( final Map settings ) { @Override - public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { - currentSchema = new QfsSchema( id, rootSchema, name, this ); + public void createNewSchema( Snapshot snapshot, String name, long id ) { + currentSchema = new QfsSchema( id, snapshot, name, this ); } @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { - return currentSchema.createFileTable( combinedTable, columnPlacementsOnStore, partitionPlacement ); + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocation, PhysicalTable physicalTable ) { + return currentSchema.createFileTable( logical, allocation ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java index 374af574d2..25ceca171d 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java @@ -41,10 +41,12 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; @@ -66,11 +68,11 @@ public class QfsSchema extends AbstractNamespace implements FileSchema, Schema { private final FileConvention convention; - public QfsSchema( long id, SchemaPlus parentSchema, String schemaName, Qfs source ) { + public QfsSchema( long id, Snapshot snapshot, String schemaName, Qfs source ) { super( id ); this.schemaName = schemaName; this.source = source; - final Expression expression = Schemas.subSchemaExpression( parentSchema, schemaName, QfsSchema.class ); + final Expression expression = Schemas.subSchemaExpression( snapshot, schemaName, QfsSchema.class ); this.convention = new QfsConvention( schemaName, expression, this ); } @@ -87,63 +89,20 @@ public int getAdapterId() { } - @Override - protected Map getTableMap() { - return new HashMap<>( tableMap ); - } - + public FileTranslatableEntity createFileTable( LogicalTable logicalTable, AllocationTable allocationTable ) { - public Entity createFileTable( LogicalTable catalogTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - ArrayList columnIds = new ArrayList<>(); - ArrayList columnTypes = new ArrayList<>(); - ArrayList columnNames = new ArrayList<>(); - columnPlacementsOnStore.sort( Comparator.comparingLong( p -> p.columnId ) ); - for ( CatalogColumnPlacement p : columnPlacementsOnStore ) { - CatalogColumn catalogColumn; - catalogColumn = Catalog.getInstance().getColumn( p.columnId ); - if ( p.adapterId == source.getAdapterId() ) { - columnIds.add( p.columnId ); - if ( catalogColumn.collectionsType != null ) { - columnTypes.add( PolyType.ARRAY ); - } else { - columnTypes.add( catalogColumn.type ); - } - columnNames.add( catalogColumn.name ); - - if ( catalogColumn.type.allowsScale() && catalogColumn.length != null && catalogColumn.scale != null ) { - fieldInfo.add( catalogColumn.name, p.physicalColumnName, catalogColumn.type, catalogColumn.length, catalogColumn.scale ) - .nullable( catalogColumn.nullable ); - } else if ( catalogColumn.type.allowsPrec() && catalogColumn.length != null ) { - fieldInfo.add( catalogColumn.name, p.physicalColumnName, catalogColumn.type, catalogColumn.length ) - .nullable( catalogColumn.nullable ); - } else { - fieldInfo.add( catalogColumn.name, p.physicalColumnName, catalogColumn.type ) - .nullable( catalogColumn.nullable ); - } - } - } - AlgProtoDataType protoRowType = AlgDataTypeImpl.proto( fieldInfo.build() ); List pkIds; - if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( catalogTable.primaryKey ); + if ( logicalTable.primaryKey != null ) { + CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( logicalTable.primaryKey ); pkIds = primaryKey.columnIds; } else { pkIds = new ArrayList<>(); } FileTranslatableEntity table = new FileTranslatableEntity( this, - catalogTable.name + "_" + partitionPlacement.partitionId, - catalogTable.id, - partitionPlacement.partitionId, - getAdapterId(), - columnIds, - columnTypes, - columnNames, - pkIds, - protoRowType ); - tableMap.put( catalogTable.name + "_" + partitionPlacement.partitionId, table ); + allocationTable, + pkIds ); + tableMap.put( logicalTable.name + "_" + allocationTable.name, table ); return table; } @@ -151,6 +110,7 @@ public Entity createFileTable( LogicalTable catalogTable, List execute( final Operation operation, final Integer adapterId, @@ -164,7 +124,7 @@ public static Enumerable execute( final Condition condition, final Value[] updates ) { dataContext.getStatement().getTransaction().registerInvolvedAdapter( AdapterManager.getInstance().getAdapter( adapterId ) ); - return new AbstractEnumerable() { + return new AbstractEnumerable<>() { @Override public Enumerator enumerator() { return new QfsEnumerator<>( dataContext, path, columnIds, projectionMapping, condition ); diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java index d8b7613466..7f8d539a26 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeAlg.java @@ -39,6 +39,7 @@ import java.util.List; import java.util.Map; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; @@ -74,7 +75,7 @@ class GeodeImplementContext { final Map oqlAggregateFunctions = new LinkedHashMap<>(); Long limitValue; - AlgOptEntity table; + CatalogEntity table; GeodeEntity geodeTable; diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java index acbeacb56b..4f1abc4311 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeEntity.java @@ -52,20 +52,20 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.geode.util.GeodeUtils; import org.polypheny.db.adapter.geode.util.JavaTypeFactoryExtImpl; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.runtime.Hook; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Util; @@ -75,14 +75,14 @@ * Table based on a Geode Region */ @Slf4j -public class GeodeEntity extends AbstractQueryableEntity implements TranslatableEntity { +public class GeodeEntity extends PhysicalTable implements TranslatableEntity, QueryableEntity { private final String regionName; private final AlgDataType rowType; - GeodeEntity( Region region, Long id, Long partitionId, Long adapterId ) { - super( Object[].class, id, partitionId, adapterId ); + GeodeEntity( Region region, AllocationTable allocation ) { + super( allocation ); this.regionName = region.getName(); this.rowType = GeodeUtils.autodetectRelTypeFromRegion( region ); } @@ -216,21 +216,15 @@ public Enumerator enumerator() { @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - return new GeodeQueryable<>( dataContext, schema, this, tableName ); + public Queryable asQueryable( DataContext dataContext, Snapshot snapshot, long id ) { + return new GeodeQueryable<>( dataContext, snapshot, this ); } @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); - return new GeodeScan( cluster, cluster.traitSetOf( GeodeAlg.CONVENTION ), algOptEntity, this, null ); - } - - - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return rowType; + return new GeodeScan( cluster, cluster.traitSetOf( GeodeAlg.CONVENTION ), this, this, null ); } @@ -239,10 +233,10 @@ public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { * * @param type */ - public static class GeodeQueryable extends AbstractTableQueryable { + public static class GeodeQueryable extends AbstractTableQueryable { - public GeodeQueryable( DataContext dataContext, SchemaPlus schema, GeodeEntity table, String tableName ) { - super( dataContext, schema, table, tableName ); + public GeodeQueryable( DataContext dataContext, Snapshot snapshot, GeodeEntity table ) { + super( dataContext, snapshot, table ); } @@ -254,12 +248,13 @@ public Enumerator enumerator() { private GeodeEntity getTable() { - return (GeodeEntity) table; + return table; } private GemFireCache getClientCache() { - return schema.unwrap( GeodeSchema.class ).cache; + return null; + //return schema.unwrap( GeodeSchema.class ).cache; } diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java index 8da1f05317..2c455d97d5 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeScan.java @@ -38,6 +38,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; @@ -63,7 +64,7 @@ public class GeodeScan extends RelScan implements GeodeAlg { * @param geodeTable Geode table * @param projectRowType Fields and types to project; null to project raw row */ - GeodeScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, GeodeEntity geodeTable, AlgDataType projectRowType ) { + GeodeScan( AlgOptCluster cluster, AlgTraitSet traitSet, CatalogEntity table, GeodeEntity geodeTable, AlgDataType projectRowType ) { super( cluster, traitSet, table ); this.geodeTable = geodeTable; this.projectRowType = projectRowType; @@ -99,7 +100,7 @@ public void register( AlgOptPlanner planner ) { public void implement( GeodeImplementContext geodeImplementContext ) { // Note: Scan is the leaf and we do NOT visit its inputs geodeImplementContext.geodeTable = geodeTable; - geodeImplementContext.table = table; + geodeImplementContext.table = entity; } } diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java index ed9a78b588..a3e24f05d2 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeSchema.java @@ -42,6 +42,7 @@ import org.apache.geode.cache.GemFireCache; import org.apache.geode.cache.Region; import org.polypheny.db.adapter.geode.util.GeodeUtils; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; @@ -54,7 +55,7 @@ public class GeodeSchema extends AbstractNamespace implements Schema { final GemFireCache cache; private final List regionNames; - private ImmutableMap tableMap; + private ImmutableMap tableMap; GeodeSchema( long id, String locatorHost, int locatorPort, Iterable regionNames, String pdxAutoSerializerPackageExp ) { @@ -70,12 +71,12 @@ public class GeodeSchema extends AbstractNamespace implements Schema { @Override - protected Map getTableMap() { + protected Map getTables() { if ( tableMap == null ) { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( String regionName : regionNames ) { Region region = GeodeUtils.createRegion( cache, regionName ); - Entity entity = new GeodeEntity( region, null, null, null ); + CatalogEntity entity = new GeodeEntity( region, null ); builder.put( regionName, entity ); } tableMap = builder.build(); diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeToEnumerableConverter.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeToEnumerableConverter.java index af015b9381..c564d5aef8 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeToEnumerableConverter.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/algebra/GeodeToEnumerableConverter.java @@ -126,7 +126,7 @@ public int size() { // Expression meta-program for calling the GeodeTable.GeodeQueryable#query method form the generated code final BlockBuilder blockBuilder = new BlockBuilder().append( Expressions.call( - geodeImplementContext.table.getExpression( GeodeEntity.GeodeQueryable.class ), + geodeImplementContext.table.asExpression( GeodeEntity.GeodeQueryable.class ), GEODE_QUERY_METHOD, // fields constantArrayList( Pair.zip( geodeFieldNames( rowType ), physFieldClasses ), Pair.class ), diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java index d2650db1f0..f21cc2e1b0 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleScannableEntity.java @@ -43,22 +43,23 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.schema.ScannableEntity; -import org.polypheny.db.schema.impl.AbstractEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.ScannableEntity; /** * Geode Simple Scannable Table Abstraction */ -public class GeodeSimpleScannableEntity extends AbstractEntity implements ScannableEntity { +public class GeodeSimpleScannableEntity extends PhysicalTable implements ScannableEntity { private final AlgDataType algDataType; private String regionName; private ClientCache clientCache; - public GeodeSimpleScannableEntity( String regionName, AlgDataType algDataType, ClientCache clientCache ) { - super( null, null, null ); + public GeodeSimpleScannableEntity( String regionName, AlgDataType algDataType, ClientCache clientCache, AllocationTable allocationTable ) { + super( allocationTable ); this.regionName = regionName; this.clientCache = clientCache; this.algDataType = algDataType; @@ -71,18 +72,13 @@ public String toString() { } - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return algDataType; - } - @Override public Enumerable scan( DataContext root ) { - return new AbstractEnumerable() { + return new AbstractEnumerable<>() { @Override public Enumerator enumerator() { - return new GeodeSimpleEnumerator( clientCache, regionName ) { + return new GeodeSimpleEnumerator<>( clientCache, regionName ) { @Override public Object[] convert( Object obj ) { Object values = convertToRowValues( algDataType.getFieldList(), obj ); diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleSchema.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleSchema.java index 0d038bbd61..447c787584 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleSchema.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/simple/GeodeSimpleSchema.java @@ -39,6 +39,7 @@ import org.apache.geode.cache.Region; import org.apache.geode.cache.client.ClientCache; import org.polypheny.db.adapter.geode.util.GeodeUtils; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; @@ -54,7 +55,7 @@ public class GeodeSimpleSchema extends AbstractNamespace implements Schema { private String[] regionNames; private String pdxAutoSerializerPackageExp; private ClientCache clientCache; - private ImmutableMap tableMap; + private ImmutableMap tableMap; public GeodeSimpleSchema( long id, String locatorHost, int locatorPort, String[] regionNames, String pdxAutoSerializerPackageExp ) { @@ -69,12 +70,12 @@ public GeodeSimpleSchema( long id, String locatorHost, int locatorPort, String[] @Override - protected Map getTableMap() { + protected Map getTables() { if ( tableMap == null ) { - final ImmutableMap.Builder builder = ImmutableMap.builder(); + final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( String regionName : regionNames ) { Region region = GeodeUtils.createRegion( clientCache, regionName ); - Entity entity = new GeodeSimpleScannableEntity( regionName, GeodeUtils.autodetectRelTypeFromRegion( region ), clientCache ); + CatalogEntity entity = new GeodeSimpleScannableEntity( regionName, GeodeUtils.autodetectRelTypeFromRegion( region ), clientCache, null ); builder.put( regionName, entity ); } tableMap = builder.build(); diff --git a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/util/GeodeUtils.java b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/util/GeodeUtils.java index 504f37856c..62a8b1d2c6 100644 --- a/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/util/GeodeUtils.java +++ b/plugins/geode-adapter/src/main/java/org/polypheny/db/adapter/geode/util/GeodeUtils.java @@ -70,7 +70,7 @@ public class GeodeUtils { /** * Cache for the client proxy regions created in the current ClientCache. */ - private static final Map REGION_MAP = new ConcurrentHashMap<>(); + private static final Map> REGION_MAP = new ConcurrentHashMap<>(); private static String currentLocatorHost = ""; private static int currentLocatorPort = -1; @@ -133,10 +133,10 @@ public static synchronized void closeClientCache() { * @param regionName Name of the region to create proxy for. * @return Returns a Region proxy to a remote (on the Server) regions. */ - public static synchronized Region createRegion( GemFireCache cache, String regionName ) { + public static synchronized Region createRegion( GemFireCache cache, String regionName ) { Objects.requireNonNull( cache, "cache" ); Objects.requireNonNull( regionName, "regionName" ); - Region region = REGION_MAP.get( regionName ); + Region region = REGION_MAP.get( regionName ); if ( region == null ) { try { region = ((ClientCache) cache).createClientRegionFactory( ClientRegionShortcut.PROXY ).create( regionName ); diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index aaff9c9cf7..dc76026e88 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -30,6 +30,7 @@ import org.polypheny.db.adapter.Adapter.AdapterSettingInteger; import org.polypheny.db.adapter.Adapter.AdapterSettingList; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.jdbc.JdbcEntity; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; @@ -106,8 +107,8 @@ protected ConnectionFactory deployEmbedded() { @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { - return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); + public JdbcEntity createAdapterTable( LogicalTable logicalTable, AllocationTable allocationTable, PhysicalTable physicalTable ) { + return currentJdbcSchema.createJdbcTable( logicalTable, allocationTable, physicalTable ); } diff --git a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlSchema.java b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlSchema.java index 666cb8e1fd..bdeda19086 100644 --- a/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlSchema.java +++ b/plugins/html-adapter/src/main/java/org/polypheny/db/adapter/html/HtmlSchema.java @@ -93,7 +93,7 @@ private static String trimOrNull( String s, String suffix ) { @Override - protected Map getTableMap() { + protected Map getTables() { final ImmutableMap.Builder builder = ImmutableMap.builder(); for ( Map tableDef : this.tables ) { diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 2dabf4a89b..ef497cbbd5 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -38,23 +38,29 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Objects; +import java.util.stream.Collectors; +import lombok.NonNull; import org.apache.calcite.avatica.ColumnMetaData; +import org.apache.calcite.avatica.ColumnMetaData.Rep; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.common.Modify.Operation; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.ScannableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.plan.AlgOptCluster; @@ -64,12 +70,9 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TableType; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.sql.language.SqlBasicCall; import org.polypheny.db.sql.language.SqlIdentifier; @@ -90,80 +93,56 @@ * applying Queryable operators such as {@link org.apache.calcite.linq4j.Queryable#where(org.apache.calcite.linq4j.function.Predicate2)}. * The resulting queryable can then be converted to a SQL query, which can be executed efficiently on the JDBC server. */ -public class JdbcEntity extends AbstractQueryableEntity implements TranslatableEntity, ScannableEntity, ModifiableEntity { +public class JdbcEntity extends PhysicalTable implements TranslatableEntity, ScannableEntity, ModifiableEntity { - private final AlgProtoDataType protoRowType; + private final AllocationTable allocation; + private final LogicalTable logical; + private final PhysicalTable physical; private JdbcSchema jdbcSchema; - private final String physicalSchemaName; - private final String physicalTableName; - private final List physicalColumnNames; - - private final List logicalColumnNames; - private final TableType jdbcTableType; public JdbcEntity( JdbcSchema jdbcSchema, - String logicalSchemaName, - String logicalTableName, - List logicalColumnNames, - TableType jdbcTableType, - AlgProtoDataType protoRowType, - String physicalSchemaName, - String physicalTableName, - List physicalColumnNames, - long tableId, - long partitionId, - long adapterId ) { - super( Object[].class, tableId, partitionId, adapterId ); + LogicalTable logicalTable, + AllocationTable allocationTable, + PhysicalTable physicalTable, + @NonNull TableType jdbcTableType ) { + super( physicalTable ); + this.logical = logicalTable; + this.allocation = allocationTable; + this.physical = physicalTable; this.jdbcSchema = jdbcSchema; - this.logicalColumnNames = logicalColumnNames; - this.physicalSchemaName = physicalSchemaName; - this.physicalTableName = physicalTableName; - this.physicalColumnNames = physicalColumnNames; - this.jdbcTableType = Objects.requireNonNull( jdbcTableType ); - this.protoRowType = protoRowType; + this.jdbcTableType = jdbcTableType; } public String toString() { - return "JdbcTable {" + physicalSchemaName + "." + physicalTableName + "}"; - } - - - @Override - public TableType getJdbcTableType() { - return jdbcTableType; + return "JdbcTable {" + physical.namespaceName + "." + physical.name + "}"; } - @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return protoRowType.apply( typeFactory ); - } - private List> fieldClasses( final JavaTypeFactory typeFactory ) { - final AlgDataType rowType = protoRowType.apply( typeFactory ); - return Lists.transform( rowType.getFieldList(), f -> { + final AlgDataType rowType = getRowType(); + return rowType.getFieldList().stream().map( f -> { final AlgDataType type = f.getType(); final Class clazz = (Class) typeFactory.getJavaClass( type ); - final ColumnMetaData.Rep rep = Util.first( ColumnMetaData.Rep.of( clazz ), ColumnMetaData.Rep.OBJECT ); + final Rep rep = Util.first( Rep.of( clazz ), Rep.OBJECT ); return Pair.of( rep, type.getPolyType().getJdbcOrdinal() ); - } ); + } ).collect( Collectors.toList() ); } SqlString generateSql() { List pcnl = Expressions.list(); - for ( String str : physicalColumnNames ) { - pcnl.add( new SqlIdentifier( Arrays.asList( physicalTableName, str ), ParserPos.ZERO ) ); + for ( String str : physical.columnNames ) { + pcnl.add( new SqlIdentifier( Arrays.asList( physical.name, str ), ParserPos.ZERO ) ); } final SqlNodeList selectList = new SqlNodeList( pcnl, ParserPos.ZERO ); - SqlIdentifier physicalTableName = new SqlIdentifier( Arrays.asList( physicalSchemaName, this.physicalTableName ), ParserPos.ZERO ); + SqlIdentifier physicalTableName = new SqlIdentifier( Arrays.asList( physical.namespaceName, physical.name ), ParserPos.ZERO ); SqlSelect node = new SqlSelect( ParserPos.ZERO, SqlNodeList.EMPTY, @@ -183,7 +162,7 @@ SqlString generateSql() { public SqlIdentifier physicalTableName() { - return new SqlIdentifier( Arrays.asList( physicalSchemaName, physicalTableName ), ParserPos.ZERO ); + return new SqlIdentifier( Arrays.asList( physical.namespaceName, physical.name ), ParserPos.ZERO ); } @@ -275,7 +254,7 @@ public void setSchema( JdbcSchema jdbcSchema ) { * * @param element type */ - private class JdbcTableQueryable extends AbstractTableQueryable { + private class JdbcTableQueryable extends AbstractTableQueryable { JdbcTableQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { super( dataContext, schema, JdbcEntity.this, tableName ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index be72c59cc9..bde8761931 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -55,19 +55,19 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; @@ -144,57 +144,26 @@ public JdbcSchema( public JdbcEntity createJdbcTable( - LogicalTable catalogTable, - List columnPlacementsOnStore, - CatalogPartitionPlacement partitionPlacement ) { - // Temporary type factory, just for the duration of this method. Allowable because we're creating a proto-type, - // not a type; before being used, the proto-type will be copied into a real type factory. - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - List logicalColumnNames = new LinkedList<>(); - List physicalColumnNames = new LinkedList<>(); - String physicalSchemaName = null; - - for ( CatalogColumnPlacement placement : columnPlacementsOnStore ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); - if ( physicalSchemaName == null ) { - physicalSchemaName = placement.physicalSchemaName; - } - - AlgDataType sqlType = catalogColumn.getAlgDataType( typeFactory ); - fieldInfo.add( catalogColumn.name, placement.physicalColumnName, sqlType ).nullable( catalogColumn.nullable ); - logicalColumnNames.add( catalogColumn.name ); - physicalColumnNames.add( placement.physicalColumnName ); - } - - JdbcEntity table = new JdbcEntity( + LogicalTable logicalTable, + AllocationTable allocationTable, + PhysicalTable physicalTable ) { + return new JdbcEntity( this, - catalogTable.getNamespaceName(), - catalogTable.name, - logicalColumnNames, - TableType.TABLE, - AlgDataTypeImpl.proto( fieldInfo.build() ), - physicalSchemaName, - partitionPlacement.physicalTableName, - physicalColumnNames, - catalogTable.id, - partitionPlacement.partitionId, - adapter.getAdapterId() - ); - tableMap.put( catalogTable.name + "_" + partitionPlacement.partitionId, table ); - physicalToLogicalTableNameMap.put( partitionPlacement.physicalTableName, catalogTable.name ); - return table; + logicalTable, + allocationTable, + physicalTable, + TableType.TABLE ); } public static JdbcSchema create( Long id, - SchemaPlus parentSchema, + Snapshot snapshot, String name, ConnectionFactory connectionFactory, SqlDialect dialect, Adapter adapter ) { - final Expression expression = Schemas.subSchemaExpression( parentSchema, name, JdbcSchema.class ); + final Expression expression = Schemas.subSchemaExpression( snapshot, name, JdbcSchema.class ); final JdbcConvention convention = JdbcConvention.of( dialect, expression, name ); return new JdbcSchema( id, connectionFactory, dialect, convention, adapter ); } @@ -239,8 +208,8 @@ public ConnectionHandler getConnectionHandler( DataContext dataContext ) { @Override - public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { - return Schemas.subSchemaExpression( parentSchema, name, JdbcSchema.class ); + public Expression getExpression( Snapshot snapshot, String name ) { + return Schemas.subSchemaExpression( snapshot, name, JdbcSchema.class ); } @@ -263,7 +232,7 @@ public final Set getFunctionNames() { @Override - public Entity getEntity( String name ) { + public CatalogEntity getEntity( String name ) { return getTableMap().get( name ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 54b3217daa..8e5c6c743e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -37,10 +37,10 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.transaction.PUID; import org.polypheny.db.transaction.PolyXid; @@ -113,8 +113,8 @@ protected ConnectionFactory createConnectionFactory( final Map s @Override - public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { - currentJdbcSchema = JdbcSchema.create( id, rootSchema, name, connectionFactory, dialect, this ); + public void createNewSchema( Snapshot snapshot, String name, long id ) { + currentJdbcSchema = JdbcSchema.create( id, snapshot, name, connectionFactory, dialect, this ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 7d08eeb869..7dedbec305 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -19,9 +19,9 @@ import java.sql.SQLException; import java.util.ArrayList; -import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.pf4j.ExtensionPoint; @@ -31,16 +31,18 @@ import org.polypheny.db.adapter.jdbc.JdbcUtils; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.prepare.Context; import org.polypheny.db.runtime.PolyphenyDbException; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlLiteral; import org.polypheny.db.transaction.PolyXid; @@ -109,8 +111,8 @@ protected void registerJdbcInformation() { @Override - public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { - currentJdbcSchema = JdbcSchema.create( id, rootSchema, name, connectionFactory, dialect, this ); + public void createNewSchema( Snapshot snapshot, String name, long id ) { + currentJdbcSchema = JdbcSchema.create( id, snapshot, name, connectionFactory, dialect, this ); } @@ -123,45 +125,23 @@ public void createUdfs() { @Override - public void createTable( Context context, LogicalTable catalogTable, List partitionIds ) { - List qualifiedNames = new LinkedList<>(); - qualifiedNames.add( catalogTable.getNamespaceName() ); - qualifiedNames.add( catalogTable.name ); + public PhysicalTable createPhysicalTable( Context context, LogicalTable logicalTable, AllocationTable allocationTable ) { + String physicalTableName = getPhysicalTableName( logicalTable.id, allocationTable.id ); - List existingPlacements = catalog.getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogTable.id ); - - // Remove the unpartitioned table name again, otherwise it would cause, table already exist due to create statement - for ( long partitionId : partitionIds ) { - String physicalTableName = getPhysicalTableName( catalogTable.id, partitionId ); - - if ( log.isDebugEnabled() ) { - log.debug( "[{}] createTable: Qualified names: {}, physicalTableName: {}", getUniqueName(), qualifiedNames, physicalTableName ); - } - StringBuilder query = buildCreateTableQuery( getDefaultPhysicalSchemaName(), physicalTableName, catalogTable ); - if ( RuntimeConfig.DEBUG.getBoolean() ) { - log.info( "{} on store {}", query.toString(), this.getUniqueName() ); - } - executeUpdate( query, context ); - - catalog.updatePartitionPlacementPhysicalNames( - getAdapterId(), - partitionId, - getDefaultPhysicalSchemaName(), - physicalTableName ); - - for ( CatalogColumnPlacement placement : existingPlacements ) { - catalog.updateColumnPlacementPhysicalNames( - getAdapterId(), - placement.columnId, - getDefaultPhysicalSchemaName(), - getPhysicalColumnName( placement.columnId ), - true ); - } + if ( log.isDebugEnabled() ) { + log.debug( "[{}] createPhysicalTable: Qualified names: {}, physicalTableName: {}", getUniqueName(), getDefaultPhysicalSchemaName(), physicalTableName ); } + StringBuilder query = buildCreateTableQuery( getDefaultPhysicalSchemaName(), physicalTableName, allocationTable ); + if ( RuntimeConfig.DEBUG.getBoolean() ) { + log.info( "{} on store {}", query.toString(), this.getUniqueName() ); + } + executeUpdate( query, context ); + + return new PhysicalTable( allocationTable, getDefaultPhysicalSchemaName(), physicalTableName, allocationTable.getColumns().values().stream().map( c -> getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ) ); } - protected StringBuilder buildCreateTableQuery( String schemaName, String physicalTableName, LogicalTable catalogTable ) { + protected StringBuilder buildCreateTableQuery( String schemaName, String physicalTableName, AllocationTable allocationTable ) { StringBuilder builder = new StringBuilder(); builder.append( "CREATE TABLE " ) .append( dialect.quoteIdentifier( schemaName ) ) @@ -169,8 +149,8 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica .append( dialect.quoteIdentifier( physicalTableName ) ) .append( " ( " ); boolean first = true; - for ( CatalogColumnPlacement placement : catalog.getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogTable.id ) ) { - CatalogColumn catalogColumn = catalog.getColumn( placement.columnId ); + for ( CatalogColumnPlacement placement : allocationTable.placements ) { + CatalogColumn catalogColumn = allocationTable.getColumns().get( placement.columnId ); if ( !first ) { builder.append( ", " ); } diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java index 4377ba165f..50557a589c 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverterTest.java @@ -121,14 +121,14 @@ public class AlgToSqlConverterTest extends SqlLanguageDependent { */ private Sql sql( String sql ) { final SchemaPlus schema = Frameworks - .createRootSchema( true ) + .createSnapshot( true ) .add( "foodmart", new ReflectiveSchema( new FoodmartSchema(), -1 ), NamespaceType.RELATIONAL ); return new Sql( schema, sql, PolyphenyDbSqlDialect.DEFAULT, DEFAULT_REL_CONFIG, ImmutableList.of() ); } private static Planner getPlanner( List traitDefs, ParserConfig parserConfig, SchemaPlus schema, Config sqlToRelConf, Program... programs ) { - final SchemaPlus rootSchema = Frameworks.createRootSchema( false ); + final SchemaPlus rootSchema = Frameworks.createSnapshot( false ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( parserConfig ) .defaultSchema( schema ) @@ -178,7 +178,7 @@ private static MysqlSqlDialect mySqlDialect( NullCollation nullCollation ) { */ private static AlgBuilder algBuilder() { // Creates a config based on the "scott" schema. - final SchemaPlus schema = Frameworks.createRootSchema( true ).add( "scott", new ReflectiveSchema( new ScottSchema(), -1 ), NamespaceType.RELATIONAL ); + final SchemaPlus schema = Frameworks.createSnapshot( true ).add( "scott", new ReflectiveSchema( new ScottSchema(), -1 ), NamespaceType.RELATIONAL ); Frameworks.ConfigBuilder configBuilder = Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) .defaultSchema( schema ) diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java index 44769fd038..2a45b136c2 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/PlannerTest.java @@ -108,7 +108,7 @@ private Planner getPlanner( List traitDefs, Program... programs ) { private Planner getPlanner( List traitDefs, ParserConfig parserConfig, Program... programs ) { final SchemaPlus schema = Frameworks - .createRootSchema( true ) + .createSnapshot( true ) .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java index 981e996dba..c8fdb04724 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java @@ -47,6 +47,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; @@ -54,7 +55,6 @@ import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Statistic; @@ -119,7 +119,7 @@ public Set getSubNamespaceNames() { @Override - public Expression getExpression( PolyphenyDbSchema parentSchema, String name ) { + public Expression getExpression( Snapshot snapshot, String name ) { return null; } @@ -217,7 +217,7 @@ public AlgDistribution getDistribution() { } }; - private static final SchemaPlus ROOT_SCHEMA = AbstractPolyphenyDbSchema.createRootSchema( "" ).add( "myDb", NAMESPACE, NamespaceType.RELATIONAL ).plus(); + private static final SchemaPlus ROOT_SCHEMA = AbstractPolyphenyDbSchema.createSnapshot( "" ).add( "myDb", NAMESPACE, NamespaceType.RELATIONAL ).plus(); private AlgToSqlConverterTest.Sql sql( String sql ) { diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index f58f33cf44..4a1d45cefd 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -62,7 +62,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -429,11 +429,11 @@ public void restoreColumnPlacements( Transaction transaction ) { // TODO only full placements atm here if ( !restoredTables.containsKey( store.getAdapterId() ) ) { - store.createTable( transaction.createStatement().getPrepareContext(), catalogTable, catalogTable.partitionProperty.partitionIds ); + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, , catalogTable.partitionProperty.partitionIds ); restoredTables.put( store.getAdapterId(), Collections.singletonList( catalogTable.id ) ); } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( catalogTable.id )) ) { - store.createTable( transaction.createStatement().getPrepareContext(), catalogTable, catalogTable.partitionProperty.partitionIds ); + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, , catalogTable.partitionProperty.partitionIds ); List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); ids.add( catalogTable.id ); restoredTables.put( store.getAdapterId(), ids ); @@ -449,13 +449,13 @@ public void restoreColumnPlacements( Transaction transaction ) { DataStore store = manager.getStore( p.adapterId ); if ( !restoredTables.containsKey( store.getAdapterId() ) ) { - store.createTable( transaction.createStatement().getPrepareContext(), table, table.partitionProperty.partitionIds ); + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, , table.partitionProperty.partitionIds ); List ids = new ArrayList<>(); ids.add( table.id ); restoredTables.put( store.getAdapterId(), ids ); } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( table.id )) ) { - store.createTable( transaction.createStatement().getPrepareContext(), table, table.partitionProperty.partitionIds ); + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, , table.partitionProperty.partitionIds ); List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); ids.add( table.id ); restoredTables.put( store.getAdapterId(), ids ); diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index ccbed9b37e..bdaa153687 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -359,7 +359,7 @@ public void testKey() throws GenericCatalogException { assertTrue( catalog.getPrimaryKey( catalog.getTable( tableId ).primaryKey ).columnIds.contains( columnId1 ) ); //catalog.deletePrimaryKey( tableId ); - //assertNull( catalog.getTable( tableId ).primaryKey ); + //assertNull( catalog.getLogicalTable( tableId ).primaryKey ); catalog.addPrimaryKey( tableId, Arrays.asList( columnId1, columnId2 ) ); diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 8fb2ae0e4d..ea46c23335 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -93,7 +93,7 @@ protected ConnectionFactory createConnectionFactory( final Map s @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java index 42f8e813b6..255ef45d91 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java @@ -230,7 +230,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java index c58e1417c6..bd3f8b23b4 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java @@ -54,6 +54,7 @@ import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.Convention; @@ -92,7 +93,7 @@ class Implementor extends AlgShuttleImpl implements Serializable { public boolean onlyOne = false; public boolean isDocumentUpdate = false; - AlgOptEntity table; + CatalogEntity table; @Setter @Getter public boolean hasProject = false; @@ -144,9 +145,9 @@ public void setStaticRowType( AlgRecordType staticRowType ) { public String getPhysicalName( String name ) { - int index = mongoEntity.getCatalogEntity().unwrap( LogicalTable.class ).getColumnNames().indexOf( name ); + int index = mongoEntity.logical.getColumnNames().indexOf( name ); if ( index != -1 ) { - return MongoStore.getPhysicalColumnName( name, mongoEntity.getCatalogEntity().unwrap( LogicalTable.class ).fieldIds.get( index ) ); + return MongoStore.getPhysicalColumnName( name, mongoEntity.logical.fieldIds.get( index ) ); } throw new RuntimeException( "This column is not part of the table." ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index 61e920f591..46e5ce4965 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -68,34 +68,27 @@ import org.bson.json.JsonWriterSettings; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.java.AbstractQueryableEntity; import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; import org.polypheny.db.adapter.mongodb.util.MongoDynamic; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.common.Modify.Operation; -import org.polypheny.db.algebra.core.document.DocumentModify; -import org.polypheny.db.algebra.logical.document.LogicalDocumentModify; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.entity.LogicalCollection; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.ModifiableCollection; -import org.polypheny.db.schema.ModifiableEntity; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.util.BsonUtil; @@ -106,18 +99,14 @@ * Table based on a MongoDB collection. */ @Slf4j -public class MongoEntity extends AbstractQueryableEntity implements TranslatableEntity, ModifiableEntity, ModifiableCollection { +public class MongoEntity extends PhysicalTable implements TranslatableEntity, ModifiableEntity, QueryableEntity { @Getter private final String collectionName; @Getter - private final AlgProtoDataType protoRowType; - @Getter private final MongoSchema mongoSchema; @Getter private final MongoCollection collection; - @Getter - private final LogicalTable catalogTable; @Getter private final LogicalCollection catalogCollection; @@ -126,34 +115,39 @@ public class MongoEntity extends AbstractQueryableEntity implements Translatable @Getter private final long storeId; + public final LogicalTable logical; + public final AllocationTable allocation; + public final PhysicalTable physical; + /** * Creates a MongoTable. */ - MongoEntity( LogicalTable catalogTable, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long storeId, CatalogPartitionPlacement partitionPlacement ) { - super( Object[].class, catalogTable.id, partitionPlacement.partitionId, storeId ); - this.collectionName = MongoStore.getPhysicalTableName( catalogTable.id, partitionPlacement.partitionId ); + MongoEntity( LogicalTable logicalTable, AllocationTable allocationTable, PhysicalTable physicalTable, MongoSchema schema, TransactionProvider transactionProvider ) { + super( physicalTable ); + this.collectionName = physicalTable.name; this.transactionProvider = transactionProvider; - this.catalogTable = catalogTable; + this.logical = logicalTable; + this.allocation = allocationTable; + this.physical = physicalTable; this.catalogCollection = null; - this.protoRowType = proto; this.mongoSchema = schema; this.collection = schema.database.getCollection( collectionName ); - this.storeId = storeId; + this.storeId = allocation.adapterId; } - public MongoEntity( LogicalCollection catalogEntity, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long adapter, CatalogCollectionPlacement partitionPlacement ) { + /*public MongoEntity( LogicalCollection catalogEntity, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long adapter, CatalogCollectionPlacement partitionPlacement ) { super( Object[].class, catalogEntity.id, partitionPlacement.id, adapter ); this.collectionName = MongoStore.getPhysicalTableName( catalogEntity.id, partitionPlacement.id ); this.transactionProvider = transactionProvider; - this.catalogTable = null; + this.logical = null; this.catalogCollection = catalogEntity; this.protoRowType = proto; this.mongoSchema = schema; this.collection = schema.database.getCollection( collectionName ); this.storeId = adapter; - } + }*/ public String toString() { @@ -162,21 +156,15 @@ public String toString() { @Override - public AlgDataType getRowType( AlgDataTypeFactory typeFactory ) { - return protoRowType.apply( typeFactory ); + public Queryable asQueryable( DataContext dataContext, Snapshot snapshot, long entityId ) { + return new MongoQueryable<>( dataContext, snapshot, this ); } @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - return new MongoQueryable<>( dataContext, schema, this, tableName ); - } - - - @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { final AlgOptCluster cluster = context.getCluster(); - return new MongoScan( cluster, traitSet.replace( MongoAlg.CONVENTION ), algOptEntity, this, null ); + return new MongoScan( cluster, traitSet.replace( MongoAlg.CONVENTION ), this, null ); } @@ -256,8 +244,8 @@ private Enumerable aggregate( } } - if ( logicalCols.size() != 0 && catalogTable != null ) { - list.add( 0, getPhysicalProjections( logicalCols, catalogTable.getColumnNames(), catalogTable.fieldIds ) ); + if ( logicalCols.size() != 0 && logical != null ) { + list.add( 0, getPhysicalProjections( logicalCols, logical.getColumnNames(), logical.fieldIds ) ); } final Function1 getter = MongoEnumerator.getter( fields, arrayFields ); @@ -316,41 +304,39 @@ private static Integer parseIntString( String valueString ) { @Override - public RelModify toModificationAlg( + public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptEntity table, - CatalogReader catalogReader, + AlgTraitSet traitSet, + CatalogEntity table, AlgNode child, Operation operation, List updateColumnList, - List sourceExpressionList, - boolean flattened ) { + List sourceExpressionList + ) { mongoSchema.getConvention().register( cluster.getPlanner() ); return new LogicalRelModify( - cluster, cluster.traitSetOf( Convention.NONE ), table, - catalogReader, child, operation, updateColumnList, - sourceExpressionList, - flattened ); + sourceExpressionList + ); } - @Override - public DocumentModify toModificationAlg( + /*@Override + public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptEntity table, - CatalogReader catalogReader, + AlgTraitSet traitSet, + CatalogEntity entity, AlgNode child, Operation operation, List keys, List updates ) { mongoSchema.getConvention().register( cluster.getPlanner() ); - return new LogicalDocumentModify( child.getTraitSet(), table, catalogReader, child, operation, keys, updates ); - } + return new LogicalDocumentModify( child.getTraitSet(), entity, catalogReader, child, operation, keys, updates ); + }*/ /** @@ -358,10 +344,10 @@ public DocumentModify toModificationAlg( * * @param element type */ - public static class MongoQueryable extends AbstractTableQueryable { + public static class MongoQueryable extends AbstractTableQueryable { - MongoQueryable( DataContext dataContext, SchemaPlus schema, MongoEntity table, String tableName ) { - super( dataContext, schema, table, tableName ); + MongoQueryable( DataContext dataContext, Snapshot snapshot, MongoEntity table ) { + super( dataContext, snapshot, table ); } @@ -374,12 +360,12 @@ public Enumerator enumerator() { private MongoDatabase getMongoDb() { - return schema.unwrap( MongoSchema.class ).database; + return table.mongoSchema.database; } private MongoEntity getTable() { - return (MongoEntity) table; + return table; } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java index b475c560f2..f9c1c79db2 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java @@ -59,6 +59,7 @@ import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; @@ -67,7 +68,7 @@ import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; @@ -81,7 +82,6 @@ import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFamily; @@ -229,7 +229,7 @@ public void resetDockerConnection( ConfigDocker c ) { @Override - public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { + public void createNewSchema( Snapshot snapshot, String name, long id ) { String[] splits = name.split( "_" ); String database = name; if ( splits.length >= 2 ) { @@ -240,8 +240,8 @@ public void createNewSchema( SchemaPlus rootSchema, String name, Long id ) { @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { - return currentSchema.createTable( combinedTable, columnPlacementsOnStore, getAdapterId(), partitionPlacement ); + public PhysicalTable createAdapterTable( LogicalTable logicalTable, AllocationTable allocationTable, PhysicalTable physicalTable ) { + return currentSchema.createTable( logicalTable, allocationTable, physicalTable ); } @@ -264,7 +264,7 @@ public void truncate( Context context, LogicalTable table ) { @Override public Entity createDocumentSchema( LogicalCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { - return this.currentSchema.createCollection( catalogEntity, partitionPlacement ); + return null;//this.currentSchema.createCollection( catalogEntity, partitionPlacement ); } @@ -306,33 +306,17 @@ protected void reloadSettings( List updatedSettings ) { @Override - public void createTable( Context context, LogicalTable catalogTable, List partitionIds ) { - Catalog catalog = Catalog.getInstance(); + public PhysicalTable createPhysicalTable( Context context, LogicalTable catalogTable, AllocationTable allocationTable ) { commitAll(); if ( this.currentSchema == null ) { createNewSchema( null, catalogTable.getNamespaceName(), catalogTable.namespaceId ); } - for ( long partitionId : partitionIds ) { - String physicalTableName = getPhysicalTableName( catalogTable.id, partitionId ); - this.currentSchema.database.createCollection( physicalTableName ); + String physicalTableName = getPhysicalTableName( catalogTable.id, allocationTable.id ); + this.currentSchema.database.createCollection( physicalTableName ); - catalog.updatePartitionPlacementPhysicalNames( - getAdapterId(), - partitionId, - catalogTable.getNamespaceName(), - physicalTableName ); - - for ( CatalogColumnPlacement placement : catalog.getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogTable.id ) ) { - catalog.updateColumnPlacementPhysicalNames( - getAdapterId(), - placement.columnId, - catalogTable.getNamespaceName(), - physicalTableName, - true ); - } - } + return new PhysicalTable( allocationTable, catalogTable.getNamespaceName(), physicalTableName, allocationTable.getColumns().values().stream().map( this::getPhysicalColumnName ).collect( Collectors.toList() ) ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java index df78183694..d91b12d44e 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java @@ -83,7 +83,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java index 3ed7c6b758..dfff8a2be8 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java @@ -53,9 +53,8 @@ * * Additional operations might be applied, using the "find" or "aggregate" methods.

      */ -public class MongoScan extends RelScan implements MongoAlg { +public class MongoScan extends RelScan implements MongoAlg { - final MongoEntity mongoEntity; final AlgDataType projectRowType; @@ -65,15 +64,12 @@ public class MongoScan extends RelScan implements MongoAlg { * @param cluster Cluster * @param traitSet Traits * @param table Table - * @param mongoEntity MongoDB table * @param projectRowType Fields and types to project; null to project raw row */ - protected MongoScan( AlgOptCluster cluster, AlgTraitSet traitSet, AlgOptEntity table, MongoEntity mongoEntity, AlgDataType projectRowType ) { + protected MongoScan( AlgOptCluster cluster, AlgTraitSet traitSet, MongoEntity table, AlgDataType projectRowType ) { super( cluster, traitSet, table ); - this.mongoEntity = mongoEntity; this.projectRowType = projectRowType; - assert mongoEntity != null; assert getConvention() == CONVENTION; } @@ -109,8 +105,8 @@ public void register( AlgOptPlanner planner ) { @Override public void implement( Implementor implementor ) { - implementor.mongoEntity = mongoEntity; - implementor.table = table; + implementor.mongoEntity = entity; + implementor.table = entity; implementor.setStaticRowType( (AlgRecordType) rowType ); implementor.physicalMapper.addAll( rowType.getFieldNames() ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java index fd957e00e8..d12efb877e 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java @@ -48,12 +48,14 @@ import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.plan.Convention; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; @@ -103,28 +105,12 @@ public MongoSchema( long id, String database, MongoClient connection, Transactio } - private String buildDatabaseName( CatalogColumn column ) { - return column.getDatabaseName() + "_" + column.getSchemaName() + "_" + column.name; + public MongoEntity createTable( LogicalTable logicalTable, AllocationTable allocationTable, PhysicalTable physicalTable ) { + return new MongoEntity( logicalTable, allocationTable, physicalTable, this, transactionProvider ); } - public MongoEntity createTable( LogicalTable catalogTable, List columnPlacementsOnStore, int storeId, CatalogPartitionPlacement partitionPlacement ) { - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - - for ( CatalogColumnPlacement placement : columnPlacementsOnStore ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); - AlgDataType sqlType = catalogColumn.getAlgDataType( typeFactory ); - fieldInfo.add( catalogColumn.name, MongoStore.getPhysicalColumnName( catalogColumn.name, catalogColumn.id ), sqlType ).nullable( catalogColumn.nullable ); - } - MongoEntity table = new MongoEntity( catalogTable, this, AlgDataTypeImpl.proto( fieldInfo.build() ), transactionProvider, storeId, partitionPlacement ); - - tableMap.put( catalogTable.name + "_" + partitionPlacement.partitionId, table ); - return table; - } - - - public Entity createCollection( LogicalCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { + /*public Entity createCollection( LogicalCollection catalogEntity, CatalogCollectionPlacement partitionPlacement ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); @@ -135,7 +121,7 @@ public Entity createCollection( LogicalCollection catalogEntity, CatalogCollecti tableMap.put( catalogEntity.name + "_" + partitionPlacement.id, table ); return table; - } + }*/ } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java index 6cfc7b2392..c9998296b2 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java @@ -23,7 +23,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java index a8dd0b52fc..fcd547930e 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java @@ -23,7 +23,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java index 31080b35b9..5345bb60f2 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java @@ -19,7 +19,7 @@ import java.util.List; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 48fcd0084b..267964dd26 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -95,7 +95,7 @@ public MysqlSource( int storeId, String uniqueName, final Map se @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java index 663a2f5e7a..68a3c10f4c 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java @@ -53,6 +53,7 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.Catalog.Pattern; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -72,7 +73,6 @@ import org.polypheny.db.docker.DockerManager.Container; import org.polypheny.db.docker.DockerManager.ContainerBuilder; import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; @@ -239,7 +239,7 @@ public List getSupportedSchemaType() { @Override - public void createTable( Context context, LogicalTable combinedTable, List partitionIds ) { + public PhysicalTable createPhysicalTable( Context context, LogicalTable combinedTable, AllocationTable allocationTable ) { Catalog catalog = Catalog.getInstance(); if ( this.currentSchema == null ) { @@ -497,7 +497,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name ) { @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { return this.currentSchema.createTable( allocationTable ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java index 792f82ad5c..bdc9d9c7cf 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoEntity.java @@ -90,7 +90,7 @@ public Modify toModificationAlg( AlgNode child, Operation operation, List targets, - List sources ) { + List sources ) { NeoConvention.INSTANCE.register( cluster.getPlanner() ); return new LogicalRelModify( traits.replace( Convention.NONE ), diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java index 8808a079a5..cf034c8626 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoGraph.java @@ -101,7 +101,7 @@ public LpgModify toModificationAlg( AlgNode child, Operation operation, List targets, - List sources ) { + List sources ) { NeoConvention.INSTANCE.register( cluster.getPlanner() ); return new LogicalLpgModify( cluster, diff --git a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigSchema.java b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigSchema.java index 953178c0fe..3197fafea6 100644 --- a/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigSchema.java +++ b/plugins/pig-adapter/src/main/java/org/polypheny/db/adapter/pig/PigSchema.java @@ -55,7 +55,7 @@ public PigSchema( long id ) { @Override - protected Map getTableMap() { + protected Map getTables() { return tableMap; } diff --git a/plugins/pig-adapter/src/test/java/org/polypheny/db/test/PigAlgBuilderStyleTest.java b/plugins/pig-adapter/src/test/java/org/polypheny/db/test/PigAlgBuilderStyleTest.java index 52c2b003d7..482abdf2b2 100644 --- a/plugins/pig-adapter/src/test/java/org/polypheny/db/test/PigAlgBuilderStyleTest.java +++ b/plugins/pig-adapter/src/test/java/org/polypheny/db/test/PigAlgBuilderStyleTest.java @@ -230,7 +230,7 @@ public void testImplWithJoinAndGroupBy() throws Exception { private SchemaPlus createTestSchema() { - SchemaPlus result = Frameworks.createRootSchema( false ); + SchemaPlus result = Frameworks.createSnapshot( false ); result.add( "t", new PigEntity( "build/test-classes/data.txt", new String[]{ "tc0", "tc1" } ) ); result.add( "s", new PigEntity( "build/test-classes/data2.txt", new String[]{ "sc0", "sc1" } ) ); return result; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 31499caeb6..d04ad2430e 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -30,6 +30,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entities.CatalogUser; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; @@ -39,8 +40,6 @@ import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingEntity; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.util.Moniker; @@ -175,19 +174,19 @@ public AlgDataType createTypeFromProjection( AlgDataType type, List colu @Override - public PolyphenyDbSchema getRootSchema() { + public Snapshot getSnapshot() { return null; } @Override - public PreparingEntity getTableForMember( List names ) { + public LogicalTable getTableForMember( List names ) { return null; } @Override - public PreparingEntity getTable( List names ) { + public LogicalTable getTable( List names ) { return null; } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 9ee87fdae3..1fdb126fdc 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -84,7 +84,7 @@ public static void register() { @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java index 13dbf5b19f..a62301140f 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java @@ -196,7 +196,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public PhysicalTable createTableSchema( LogicalTable logical, AllocationTable allocationTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java index 81bb7527f1..9c1c1bf658 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java @@ -35,6 +35,7 @@ import org.polypheny.db.algebra.operators.ChainedOperatorTable; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.operators.OperatorTable; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.PolyphenyDbConnectionProperty; import org.polypheny.db.languages.LanguageManager; @@ -209,13 +210,13 @@ public static void startup() { } - public static PolyphenyDbSqlValidator getSqlValidator( org.polypheny.db.prepare.Context context, PolyphenyDbCatalogReader catalogReader ) { + public static PolyphenyDbSqlValidator getSqlValidator( org.polypheny.db.prepare.Context context, Snapshot snapshot ) { final OperatorTable opTab0 = fun( OperatorTable.class, SqlStdOperatorTable.instance() ); - final OperatorTable opTab = ChainedOperatorTable.of( opTab0, catalogReader ); + final OperatorTable opTab = ChainedOperatorTable.of( opTab0, snapshot ); final JavaTypeFactory typeFactory = context.getTypeFactory(); final Conformance conformance = context.config().conformance(); - return new PolyphenyDbSqlValidator( opTab, catalogReader, typeFactory, conformance ); + return new PolyphenyDbSqlValidator( opTab, snapshot, typeFactory, conformance ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index aece9af301..61b0c7b090 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -37,6 +37,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -154,8 +155,8 @@ public Pair validate( Transaction transaction, Node parsed, b } final Conformance conformance = parserConfig.conformance(); - final PolyphenyDbCatalogReader catalogReader = transaction.getSnapshot(); - validator = new PolyphenyDbSqlValidator( SqlStdOperatorTable.instance(), catalogReader, transaction.getTypeFactory(), conformance ); + final Snapshot snapshot = transaction.getSnapshot(); + validator = new PolyphenyDbSqlValidator( SqlStdOperatorTable.instance(), snapshot, transaction.getTypeFactory(), conformance ); validator.setIdentifierExpansion( true ); Node validated; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java index aaf3efa030..83d29c8770 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java @@ -35,11 +35,15 @@ import org.apache.calcite.linq4j.function.Functions; import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.constant.Modality; import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypePrecedenceList; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.BasicNodeVisitor; @@ -52,6 +56,7 @@ import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.NodeList; import org.polypheny.db.nodes.Operator; +import org.polypheny.db.schema.StreamableEntity; import org.polypheny.db.type.PolyTypeUtil; import org.polypheny.db.util.BarfingInvocationHandler; import org.polypheny.db.util.CoreUtil; @@ -600,6 +605,26 @@ public static SqlLiteral symbol( Enum o, ParserPos parserPos ) { } + public static AlgDataType getNamedType( Identifier node, Snapshot snapshot ) { + LogicalTable table = snapshot.getLogicalTable( node.getNames() ); + if ( table != null ) { + return table.getRowType(); + } else { + return null; + } + } + + + public static boolean supportsModality( Modality modality, CatalogEntity entity ) { + + if ( Objects.requireNonNull( modality ) == Modality.STREAM ) { + return entity instanceof StreamableEntity; + } + return !(entity instanceof StreamableEntity); + + } + + /** * Handles particular {@link DatabaseMetaData} methods; invocations of other methods will fall through to the base class, * {@link org.polypheny.db.util.BarfingInvocationHandler}, which will throw an error. diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/advise/SqlAdvisorValidator.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/advise/SqlAdvisorValidator.java index 902624add6..c78d5c42c4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/advise/SqlAdvisorValidator.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/advise/SqlAdvisorValidator.java @@ -23,6 +23,7 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.validate.ValidatorCatalogReader; @@ -56,12 +57,11 @@ public class SqlAdvisorValidator extends SqlValidatorImpl { * Creates a SqlAdvisor validator. * * @param opTab Operator table - * @param catalogReader Catalog reader * @param typeFactory Type factory * @param conformance Compatibility mode */ - public SqlAdvisorValidator( OperatorTable opTab, ValidatorCatalogReader catalogReader, AlgDataTypeFactory typeFactory, Conformance conformance ) { - super( opTab, catalogReader, typeFactory, conformance ); + public SqlAdvisorValidator( OperatorTable opTab, Snapshot snapshot, AlgDataTypeFactory typeFactory, Conformance conformance ) { + super( opTab, snapshot, typeFactory, conformance ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index dd1466c6ed..6a98571eff 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -209,7 +209,7 @@ public void execute( Context context, Statement statement, QueryParameters param long schemaId; try { - // Cannot use getTable() here since table does not yet exist + // Cannot use getLogicalTable() here since table does not yet exist if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName schemaId = catalog.getSchema( name.names.get( 0 ), name.names.get( 1 ) ).id; tableName = name.names.get( 2 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/SqlTypeUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/SqlTypeUtil.java index c64a199159..b6eca3e722 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/SqlTypeUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/SqlTypeUtil.java @@ -31,6 +31,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeFactoryImpl; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.NodeToAlgConverter.Config; import org.polypheny.db.languages.Parser; @@ -231,12 +232,12 @@ public static boolean inCharFamily( AlgDataType type ) { public NodeToAlgConverter createToRelConverter( QueryLanguage language, Validator validator, - CatalogReader catalogReader, + Snapshot snapshot, AlgOptCluster cluster, RexConvertletTable convertletTable, Config config ) { if ( language == QueryLanguage.from( "sql" ) ) { - return getSqlToRelConverter( (SqlValidator) validator, catalogReader, cluster, (SqlRexConvertletTable) convertletTable, config ); + return getSqlToRelConverter( (SqlValidator) validator, snapshot, cluster, (SqlRexConvertletTable) convertletTable, config ); } throw new UnsupportedLanguageOperation( language ); @@ -245,22 +246,22 @@ public NodeToAlgConverter createToRelConverter( private SqlToAlgConverter getSqlToRelConverter( SqlValidator validator, - CatalogReader catalogReader, + Snapshot snapshot, AlgOptCluster cluster, SqlRexConvertletTable convertletTable, Config config ) { - return new SqlToAlgConverter( validator, catalogReader, cluster, convertletTable, config ); + return new SqlToAlgConverter( validator, snapshot, cluster, convertletTable, config ); } public Validator createPolyphenyValidator( QueryLanguage language, OperatorTable operatorTable, - PolyphenyDbCatalogReader catalogReader, + Snapshot snapshot, JavaTypeFactory typeFactory, Conformance conformance ) { if ( language == QueryLanguage.from( "sql" ) ) { - return new PolyphenyDbSqlValidator( operatorTable, catalogReader, typeFactory, conformance ); + return new PolyphenyDbSqlValidator( operatorTable, snapshot, typeFactory, conformance ); } throw new UnsupportedLanguageOperation( language ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/AbstractNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/AbstractNamespace.java index cedabb514d..f5ffe05189 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/AbstractNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/AbstractNamespace.java @@ -24,7 +24,7 @@ import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.nodes.validate.ValidatorTable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Util; @@ -147,7 +147,7 @@ public SqlNode getEnclosingNode() { @Override - public ValidatorTable getTable() { + public CatalogEntity getTable() { return null; } @@ -161,7 +161,7 @@ public SqlValidatorNamespace lookupChild( String name ) { @Override public boolean fieldExists( String name ) { final AlgDataType rowType = getRowType(); - return validator.catalogReader.nameMatcher.field( rowType, name ) != null; + return validator.snapshot.nameMatcher.field( rowType, name ) != null; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingNamespace.java index 59c2dabd50..b7e1784e74 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingNamespace.java @@ -20,7 +20,7 @@ import java.util.List; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.nodes.validate.ValidatorTable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.util.Pair; @@ -50,7 +50,7 @@ public SqlValidator getValidator() { @Override - public ValidatorTable getTable() { + public CatalogEntity getTable() { return namespace.getTable(); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index 4fa6f73e07..95430dc8fd 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -26,6 +26,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.DynamicRecordType; import org.polypheny.db.algebra.type.StructKind; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.CustomColumnResolvingEntity; @@ -92,7 +93,7 @@ void resolveInNamespace( SqlValidatorNamespace ns, boolean nullable, List names ) { + public LogicalTable getTable( List names ) { return catalogReader.getTable( names ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingSqlValidatorTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingSqlValidatorTable.java index 53904009e3..b9ec4841c5 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingSqlValidatorTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingSqlValidatorTable.java @@ -21,7 +21,6 @@ import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.nodes.validate.ValidatorTable; -import org.polypheny.db.util.AccessType; /** @@ -60,10 +59,5 @@ public Monotonicity getMonotonicity( String columnName ) { } - @Override - public AccessType getAllowedAccess() { - return table.getAllowedAccess(); - } - } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index 0780b9784a..4a796deede 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -24,8 +24,8 @@ import java.util.Map; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlDataTypeSpec; @@ -81,9 +81,9 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, @Override public SqlValidatorNamespace getTableNamespace( List names ) { - ValidatorTable table = validator.catalogReader.getTable( names ); + CatalogEntity table = validator.snapshot.getLogicalTable( names ); return table != null - ? new TableNamespace( validator, table ) + ? new EntityNamespace( validator, table ) : null; } @@ -93,9 +93,9 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path final List resolves = ((ResolvedImpl) resolved).resolves; // Look in the default schema, then default catalog, then root schema. - LogicalTable table = validator.catalogReader.getRootSchema().getTable( names ); + LogicalTable table = validator.snapshot.getLogicalTable( names ); if ( table != null ) { - resolves.add( new Resolve( validator.catalogReader.getRootSchema().getTable( names ) ) ); + resolves.add( new Resolve( validator.snapshot.getLogicalTable( names ) ) ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java similarity index 85% rename from plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java rename to plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java index 6d13d1f552..35a0157ab4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/TableNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java @@ -20,12 +20,13 @@ import com.google.common.collect.ImmutableList; import java.util.List; import java.util.Map; -import java.util.Objects; +import lombok.Getter; +import lombok.NonNull; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.nodes.validate.ValidatorTable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ExtensibleEntity; @@ -40,23 +41,24 @@ /** * Namespace based on a table from the catalog. */ -class TableNamespace extends AbstractNamespace { +class EntityNamespace extends AbstractNamespace { - private final ValidatorTable table; + @Getter + private final CatalogEntity table; public final ImmutableList extendedFields; /** * Creates a TableNamespace. */ - private TableNamespace( SqlValidatorImpl validator, ValidatorTable table, List fields ) { + private EntityNamespace( SqlValidatorImpl validator, @NonNull CatalogEntity entity, List fields ) { super( validator, null ); - this.table = Objects.requireNonNull( table ); + this.table = entity; this.extendedFields = ImmutableList.copyOf( fields ); } - TableNamespace( SqlValidatorImpl validator, ValidatorTable table ) { + EntityNamespace( SqlValidatorImpl validator, CatalogEntity table ) { this( validator, table, ImmutableList.of() ); } @@ -81,15 +83,15 @@ public SqlNode getNode() { @Override - public ValidatorTable getTable() { + public CatalogEntity getTable() { return table; } @Override public Monotonicity getMonotonicity( String columnName ) { - final ValidatorTable table = getTable(); - return table.getMonotonicity( columnName ); + final CatalogEntity table = getTable(); + return Util.getMonotonicity( table, columnName ); } @@ -98,7 +100,7 @@ public Monotonicity getMonotonicity( String columnName ) { * * Extended fields are "hidden" or undeclared fields that may nevertheless be present if you ask for them. */ - public TableNamespace extend( SqlNodeList extendList ) { + public EntityNamespace extend( SqlNodeList extendList ) { final List identifierList = Util.quotientList( extendList.getSqlList(), 2, 0 ); SqlValidatorUtil.checkIdentifierListForDuplicates( identifierList, validator.getValidationErrorFunction() ); final ImmutableList.Builder builder = ImmutableList.builder(); @@ -108,11 +110,11 @@ public TableNamespace extend( SqlNodeList extendList ) { final Entity schemaEntity = table.unwrap( Entity.class ); if ( schemaEntity != null && table instanceof AlgOptEntity && schemaEntity instanceof ExtensibleEntity ) { checkExtendedColumnTypes( extendList ); - final AlgOptEntity algOptEntity = ((AlgOptEntity) table).extend( extendedFields ); - final ValidatorTable validatorTable = algOptEntity.unwrap( ValidatorTable.class ); - return new TableNamespace( validator, validatorTable, ImmutableList.of() ); + //final AlgOptEntity algOptEntity = ((AlgOptEntity) table).extend( extendedFields ); + //final CatalogEntity validatorTable = algOptEntity.unwrap( ValidatorTable.class ); + return new EntityNamespace( validator, table, ImmutableList.of() ); } - return new TableNamespace( validator, table, extendedFields ); + return new EntityNamespace( validator, table, extendedFields ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java index 5bfd29eede..c034f1d809 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java @@ -27,17 +27,19 @@ import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.languages.ParserPos; -import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.sql.language.SqlNodeList; +import org.polypheny.db.sql.language.SqlUtil; import org.polypheny.db.util.CyclicDefinitionException; import org.polypheny.db.util.NameMatcher; import org.polypheny.db.util.NameMatchers; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Static; +import org.polypheny.db.util.Util; /** @@ -50,7 +52,7 @@ public class IdentifierNamespace extends AbstractNamespace { public final SqlNodeList extendList; /** - * The underlying namespace. Often a {@link TableNamespace}. Set on validate. + * The underlying namespace. Often a {@link EntityNamespace}. Set on validate. */ private SqlValidatorNamespace resolvedNamespace; @@ -94,7 +96,7 @@ protected static Pair split( SqlNode node ) { private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; + final NameMatcher nameMatcher = validator.snapshot.nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); final List names = SqlIdentifier.toStar( id.names ); try { @@ -153,11 +155,11 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { @Override public AlgDataType validateImpl( AlgDataType targetRowType ) { resolvedNamespace = Objects.requireNonNull( resolveImpl( id ) ); - if ( resolvedNamespace instanceof TableNamespace ) { - ValidatorTable table = resolvedNamespace.getTable(); + if ( resolvedNamespace instanceof EntityNamespace ) { + CatalogEntity table = resolvedNamespace.getTable(); if ( validator.shouldExpandIdentifiers() ) { // TODO: expand qualifiers for column references also - List qualifiedNames = table.getQualifiedName(); + List qualifiedNames = List.of( table.name ); if ( qualifiedNames != null ) { // Assign positions to the components of the fully-qualified identifier, as best we can. We assume that qualification adds names to the front, e.g. FOO.BAR becomes BAZ.FOO.BAR. List poses = new ArrayList<>( Collections.nCopies( qualifiedNames.size(), id.getPos() ) ); @@ -177,10 +179,10 @@ public AlgDataType validateImpl( AlgDataType targetRowType ) { AlgDataType rowType = resolvedNamespace.getRowType(); if ( extendList != null ) { - if ( !(resolvedNamespace instanceof TableNamespace) ) { + if ( !(resolvedNamespace instanceof EntityNamespace) ) { throw new RuntimeException( "cannot convert" ); } - resolvedNamespace = ((TableNamespace) resolvedNamespace).extend( extendList ); + resolvedNamespace = ((EntityNamespace) resolvedNamespace).extend( extendList ); rowType = resolvedNamespace.getRowType(); } @@ -220,7 +222,7 @@ public SqlValidatorNamespace resolve() { @Override - public ValidatorTable getTable() { + public CatalogEntity getTable() { return resolvedNamespace == null ? null : resolve().getTable(); } @@ -233,18 +235,19 @@ public List> getMonotonicExprs() { @Override public Monotonicity getMonotonicity( String columnName ) { - final ValidatorTable table = getTable(); - return table.getMonotonicity( columnName ); + final CatalogEntity table = getTable(); + return Util.getMonotonicity( table, columnName ); } @Override public boolean supportsModality( Modality modality ) { - final ValidatorTable table = getTable(); + final CatalogEntity table = getTable(); if ( table == null ) { return modality == Modality.RELATION; } - return table.supportsModality( modality ); + + return SqlUtil.supportsModality( modality, table ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java index 0981ac1b86..bfd4f48c82 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java @@ -95,14 +95,14 @@ private ScopeChild findChild( List names, NameMatcher nameMatcher ) { } // Look up the 2 tables independently, in case one is qualified with catalog & schema and the other is not. - /*final ValidatorTable table = child.namespace.getTable(); + /*final ValidatorTable table = child.namespace.getLogicalTable(); if ( table != null ) { final ResolvedImpl resolved = new ResolvedImpl(); resolveTable( names, nameMatcher, Path.EMPTY, resolved ); if ( resolved.count() == 1 && resolved.only().remainingNames.isEmpty() && resolved.only().namespace instanceof TableNamespace - && resolved.only().namespace.getTable().getQualifiedName().equals( table.getQualifiedName() ) ) { + && resolved.only().namespace.getLogicalTable().getQualifiedName().equals( table.getQualifiedName() ) ) { return child; } }*/ @@ -132,7 +132,7 @@ public void findAliases( Collection result ) { @Override public Pair findQualifyingTableName( final String columnName, SqlNode ctx ) { - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; + final NameMatcher nameMatcher = validator.snapshot.nameMatcher; final Map map = findQualifyingTableNames( columnName, ctx, nameMatcher ); switch ( map.size() ) { case 0: @@ -214,7 +214,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, @Override public AlgDataType resolveColumn( String columnName, SqlNode ctx ) { - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; + final NameMatcher nameMatcher = validator.snapshot.nameMatcher; int found = 0; AlgDataType type = null; for ( ScopeChild child : children ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java index a1c2e1a237..b8f45b8c85 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/MatchRecognizeScope.java @@ -47,7 +47,7 @@ public class MatchRecognizeScope extends ListScope { public MatchRecognizeScope( SqlValidatorScope parent, SqlMatchRecognize matchRecognize ) { super( parent ); this.matchRecognize = matchRecognize; - patternVars = validator.getCatalogReader().nameMatcher.createSet(); + patternVars = validator.getSnapshot().nameMatcher.createSet(); patternVars.add( STAR ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/OrderByScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/OrderByScope.java index 27191f69d6..70dcba068e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/OrderByScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/OrderByScope.java @@ -77,7 +77,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { final SqlValidatorNamespace selectNs = validator.getSqlNamespace( select ); final AlgDataType rowType = selectNs.getRowType(); - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; + final NameMatcher nameMatcher = validator.snapshot.nameMatcher; final AlgDataTypeField field = nameMatcher.field( rowType, name ); final int aliasCount = aliasCount( nameMatcher, name ); if ( aliasCount > 1 ) { @@ -112,7 +112,7 @@ private int aliasCount( NameMatcher nameMatcher, String name ) { public AlgDataType resolveColumn( String name, SqlNode ctx ) { final SqlValidatorNamespace selectNs = validator.getSqlNamespace( select ); final AlgDataType rowType = selectNs.getRowType(); - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; + final NameMatcher nameMatcher = validator.snapshot.nameMatcher; final AlgDataTypeField field = nameMatcher.field( rowType, name ); if ( field != null ) { return field.getType(); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/PolyphenyDbSqlValidator.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/PolyphenyDbSqlValidator.java index 0c18ed2a2e..895e7deb1a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/PolyphenyDbSqlValidator.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/PolyphenyDbSqlValidator.java @@ -20,6 +20,7 @@ import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.sql.language.SqlInsert; import org.polypheny.db.util.Conformance; @@ -32,10 +33,10 @@ public class PolyphenyDbSqlValidator extends SqlValidatorImpl { public PolyphenyDbSqlValidator( OperatorTable opTab, - PolyphenyDbCatalogReader catalogReader, + Snapshot snapshot, JavaTypeFactory typeFactory, Conformance conformance ) { - super( opTab, catalogReader, typeFactory, conformance ); + super( opTab, snapshot, typeFactory, conformance ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SchemaNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SchemaNamespace.java index 5eadb88a97..2d00e905e9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SchemaNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SchemaNamespace.java @@ -22,9 +22,8 @@ import java.util.Objects; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.nodes.validate.ValidatorTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.sql.language.SqlNode; -import org.polypheny.db.util.Moniker; import org.polypheny.db.util.Util; @@ -53,12 +52,11 @@ class SchemaNamespace extends AbstractNamespace { @Override protected AlgDataType validateImpl( AlgDataType targetRowType ) { final AlgDataTypeFactory.Builder builder = validator.getTypeFactory().builder(); - for ( Moniker moniker : validator.catalogReader.getAllSchemaObjectNames( names ) ) { - final List names1 = moniker.getFullyQualifiedNames(); - final ValidatorTable table = validator.catalogReader.getTable( names1 ); + /*for ( LogicalTable table : validator.snapshot.getLogicalTables( List.of( names.get( 0 ), names.get( 1 ) ) ) ) { + //final List names1 = moniker.getFullyQualifiedNames(); builder.add( Util.last( names1 ), null, table.getRowType() ); - } - return builder.build(); + }*/// todo dl + return targetRowType; //builder.build(); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidator.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidator.java index 713b49a0b2..5f48b7f87e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidator.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidator.java @@ -24,6 +24,7 @@ import org.polypheny.db.algebra.constant.NullCollation; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.nodes.validate.ValidatorCatalogReader; @@ -104,7 +105,7 @@ public interface SqlValidator extends Validator { * * @return catalog reader */ - ValidatorCatalogReader getCatalogReader(); + Snapshot getSnapshot(); /** * Returns the operator table used by this validator. diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index 18f9612b85..1529c8835a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -69,8 +69,11 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordType; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryLanguage; @@ -85,12 +88,12 @@ import org.polypheny.db.nodes.NodeList; import org.polypheny.db.nodes.NodeVisitor; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorException; import org.polypheny.db.nodes.validate.ValidatorNamespace; import org.polypheny.db.nodes.validate.ValidatorScope; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptEntity; +import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; @@ -103,7 +106,6 @@ import org.polypheny.db.runtime.Resources.ExInst; import org.polypheny.db.schema.ColumnStrategy; import org.polypheny.db.schema.Entity; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.document.DocumentUtil; import org.polypheny.db.sql.language.SqlAggFunction; import org.polypheny.db.sql.language.SqlBasicCall; @@ -159,6 +161,7 @@ import org.polypheny.db.util.Moniker; import org.polypheny.db.util.MonikerImpl; import org.polypheny.db.util.NameMatcher; +import org.polypheny.db.util.NameMatchers; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Static; import org.polypheny.db.util.Util; @@ -172,6 +175,8 @@ */ public class SqlValidatorImpl implements SqlValidatorWithHints { + NameMatcher nameMatcher = NameMatchers.withCaseSensitive( RuntimeConfig.RELATIONAL_CASE_SENSITIVE.getBoolean() ); + public static final Logger TRACER = PolyphenyDbTrace.PARSER_LOGGER; /** @@ -191,7 +196,9 @@ public class SqlValidatorImpl implements SqlValidatorWithHints { private final OperatorTable opTab; - final ValidatorCatalogReader catalogReader; + + @Getter + final Snapshot snapshot; /** * Maps ParsePosition strings to the {@link SqlIdentifier} identifier objects at these positions @@ -294,13 +301,12 @@ public class SqlValidatorImpl implements SqlValidatorWithHints { * Creates a validator. * * @param opTab Operator table - * @param catalogReader Catalog reader * @param typeFactory Type factory * @param conformance Compatibility mode */ - public SqlValidatorImpl( OperatorTable opTab, ValidatorCatalogReader catalogReader, AlgDataTypeFactory typeFactory, Conformance conformance ) { + public SqlValidatorImpl( OperatorTable opTab, Snapshot snapshot, AlgDataTypeFactory typeFactory, Conformance conformance ) { this.opTab = Objects.requireNonNull( opTab ); - this.catalogReader = Objects.requireNonNull( catalogReader ); + this.snapshot = Objects.requireNonNull( snapshot ); this.typeFactory = Objects.requireNonNull( typeFactory ); this.conformance = Objects.requireNonNull( conformance ); @@ -323,12 +329,6 @@ public Conformance getConformance() { } - @Override - public ValidatorCatalogReader getCatalogReader() { - return catalogReader; - } - - @Override public OperatorTable getOperatorTable() { return opTab; @@ -369,7 +369,7 @@ public SqlNodeList expandStar( SqlNodeList selectList, SqlSelect select, boolean select, Util.first( originalType, unknownType ), list, - catalogReader.nameMatcher.createSet(), + nameMatcher.createSet(), types, includeSystemVars ); } @@ -546,7 +546,7 @@ private boolean expandStar( List selectItems, Set aliases, List default: final SqlIdentifier prefixId = identifier.skipLast( 1 ); final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); - final NameMatcher nameMatcher = scope.validator.catalogReader.nameMatcher; + final NameMatcher nameMatcher = scope.validator.nameMatcher; scope.resolve( prefixId.names, nameMatcher, true, resolved ); if ( resolved.count() == 0 ) { // e.g. "select s.t.* from e" or "select r.* from e" @@ -707,7 +707,7 @@ private void lookupFromHints( SqlNode node, SqlValidatorScope scope, ParserPos p if ( pos.toString().equals( id.getComponent( i ).getPos().toString() ) ) { final List objNames = new ArrayList<>(); SqlValidatorUtil.getSchemaObjectMonikers( - getCatalogReader(), + this.getSnapshot(), id.names.subList( 0, i + 1 ), objNames ); for ( Moniker objName : objNames ) { @@ -775,7 +775,7 @@ public final void lookupNameCompletionHints( SqlValidatorScope scope, List names, SqlValidator validator, Collection result ) { final List objNames = new ArrayList<>(); - SqlValidatorUtil.getSchemaObjectMonikers( validator.getCatalogReader(), names, objNames ); + SqlValidatorUtil.getSchemaObjectMonikers( validator.getSnapshot(), names, objNames ); for ( Moniker objName : objNames ) { if ( objName.getType() == MonikerType.FUNCTION ) { result.add( objName ); @@ -1023,7 +1023,7 @@ private SqlValidatorNamespace getNamespace( SqlNode node, SqlValidatorScope scop private SqlValidatorNamespace getNamespace( SqlIdentifier id, DelegatingScope scope ) { if ( id.isSimple() ) { - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve( id.names, nameMatcher, false, resolved ); if ( resolved.count() == 1 ) { @@ -1569,7 +1569,7 @@ public AlgDataType getValidatedNodeTypeIfKnown( SqlNode node ) { return getValidatedNodeType( original ); } if ( node instanceof SqlIdentifier ) { - return getCatalogReader().getNamedType( (SqlIdentifier) node ); + return SqlUtil.getNamedType( (SqlIdentifier) node, snapshot ); } return null; } @@ -1633,7 +1633,7 @@ AlgDataType deriveTypeImpl( SqlValidatorScope scope, SqlNode operand ) { public AlgDataType deriveConstructorType( SqlValidatorScope scope, SqlCall call, SqlFunction unresolvedConstructor, SqlFunction resolvedConstructor, List argTypes ) { SqlIdentifier sqlIdentifier = unresolvedConstructor.getSqlIdentifier(); assert sqlIdentifier != null; - AlgDataType type = catalogReader.getNamedType( sqlIdentifier ); + AlgDataType type = SqlUtil.getNamedType( sqlIdentifier, snapshot ); if ( type == null ) { // TODO: Proper type name formatting throw newValidationError( sqlIdentifier, RESOURCE.unknownDatatypeName( sqlIdentifier.toString() ) ); @@ -3003,9 +3003,8 @@ private void checkRollUpInUsing( SqlIdentifier identifier, SqlNode leftOrRight ) if ( leftOrRight instanceof SqlIdentifier ) { SqlIdentifier from = (SqlIdentifier) leftOrRight; CatalogEntity entity = findTable( - catalogReader.getRootSchema(), Util.last( from.names ), - catalogReader.nameMatcher.isCaseSensitive() ); + snapshot.nameMatcher.isCaseSensitive() ); String name = Util.last( identifier.names ); if ( entity != null && entity.isRolledUp( name ) ) { @@ -3073,7 +3072,7 @@ protected void validateJoin( SqlJoin join, SqlValidatorScope scope ) { // Join on fields that occur exactly once on each side. Ignore fields that occur more than once on either side. final AlgDataType leftRowType = getSqlNamespace( left ).getRowType(); final AlgDataType rightRowType = getSqlNamespace( right ).getRowType(); - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; List naturalColumnNames = SqlValidatorUtil.deriveNaturalJoinColumnList( nameMatcher, leftRowType, rightRowType ); // Check compatibility of the chosen columns. @@ -3144,7 +3143,7 @@ private AlgDataType validateUsingCol( SqlIdentifier id, SqlNode leftOrRight ) { String name = id.names.get( 0 ); final SqlValidatorNamespace namespace = getSqlNamespace( leftOrRight ); final AlgDataType rowType = namespace.getRowType(); - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; final AlgDataTypeField field = nameMatcher.field( rowType, name ); if ( field != null ) { if ( nameMatcher.frequency( rowType.getFieldNames(), name ) > 1 ) { @@ -3193,7 +3192,7 @@ protected void validateSelect( SqlSelect select, AlgDataType targetRowType ) { // Make sure that items in FROM clause have distinct aliases. final SelectScope fromScope = (SelectScope) getFromScope( select ); List names = fromScope.getChildNames(); - if ( !catalogReader.nameMatcher.isCaseSensitive() ) { + if ( !snapshot.nameMatcher.isCaseSensitive() ) { names = names.stream().map( s -> s.toUpperCase( Locale.ROOT ) ).collect( Collectors.toList() ); } final int duplicateAliasOrdinal = Util.firstDuplicate( names ); @@ -3395,8 +3394,8 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc } - private @Nullable CatalogEntity findTable( PolyphenyDbSchema schema, String tableName, boolean caseSensitive ) { - return schema.getTable( List.of( tableName ) ); + private @Nullable CatalogEntity findTable( String tableName, boolean caseSensitive ) { + return snapshot.getLogicalTable( List.of( tableName ) ); } @@ -3404,7 +3403,7 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc * Given a table alias, find the corresponding {@link Entity} associated with it */ private CatalogEntity findTable( String alias ) { - return findTable( catalogReader.getRootSchema(), alias, catalogReader.nameMatcher.isCaseSensitive() ); + return findTable( alias, snapshot.nameMatcher.isCaseSensitive() ); } @@ -3654,14 +3653,14 @@ public void validateWithItem( SqlWithItem withItem ) { public void validateSequenceValue( SqlValidatorScope scope, SqlIdentifier id ) { // Resolve identifier as a table. final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); - scope.resolveTable( id.names, catalogReader.nameMatcher, SqlValidatorScope.Path.EMPTY, resolved ); + scope.resolveTable( id.names, snapshot.nameMatcher, SqlValidatorScope.Path.EMPTY, resolved ); if ( resolved.count() != 1 ) { throw newValidationError( id, RESOURCE.tableNameNotFound( id.toString() ) ); } // We've found a table. But is it a sequence? //final SqlValidatorNamespace ns = resolved.only().namespace; /*if ( ns instanceof TableNamespace ) { - final Entity entity = ns.getTable().unwrap( Entity.class ); + final Entity entity = ns.getLogicalTable().unwrap( Entity.class ); switch ( entity.getJdbcTableType() ) { case SEQUENCE: case TEMPORARY_SEQUENCE: @@ -4000,7 +3999,7 @@ private void handleScalarSubQuery( SqlSelect parentSelect, SqlSelect selectItem, } - protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList targetColumnList, boolean append ) { + protected AlgDataType createTargetRowType( CatalogEntity table, SqlNodeList targetColumnList, boolean append ) { return createTargetRowType( table, targetColumnList, append, false ); } @@ -4013,7 +4012,7 @@ protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList tar * @param append Whether to append fields to those in baseRowType * @return Rowtype */ - protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList targetColumnList, boolean append, boolean allowDynamic ) { + protected AlgDataType createTargetRowType( CatalogEntity table, SqlNodeList targetColumnList, boolean append, boolean allowDynamic ) { AlgDataType baseRowType = table.getRowType(); if ( targetColumnList == null ) { return baseRowType; @@ -4034,7 +4033,7 @@ protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList tar baseRowType, typeFactory, id, - catalogReader, + snapshot, algOptEntity, allowDynamic ); @@ -4057,16 +4056,12 @@ protected AlgDataType createTargetRowType( ValidatorTable table, SqlNodeList tar public void validateInsert( SqlInsert insert ) { final SqlValidatorNamespace targetNamespace = getSqlNamespace( insert ); validateNamespace( targetNamespace, unknownType ); - final CatalogEntity algOptEntity = - SqlValidatorUtil.getAlgOptTable( + final CatalogEntity table = + SqlValidatorUtil.getLogicalEntity( targetNamespace, - catalogReader.unwrap( Prepare.CatalogReader.class ), + snapshot, null, null ); - final ValidatorTable table = - algOptEntity == null - ? targetNamespace.getTable() - : algOptEntity.unwrap( ValidatorTable.class ); boolean allowDynamic = false; if ( insert.getSchemaType() == NamespaceType.DOCUMENT ) { @@ -4111,7 +4106,7 @@ public void validateInsert( SqlInsert insert ) { } - private void checkFieldCount( SqlNode node, ValidatorTable table, SqlNode source, AlgDataType logicalSourceRowType, AlgDataType logicalTargetRowType ) { + private void checkFieldCount( SqlNode node, CatalogEntity table, SqlNode source, AlgDataType logicalSourceRowType, AlgDataType logicalTargetRowType ) { final int sourceFieldCount = logicalSourceRowType.getFieldCount(); final int targetFieldCount = logicalTargetRowType.getFieldCount(); if ( sourceFieldCount != targetFieldCount ) { @@ -4331,7 +4326,7 @@ public void validateDelete( SqlDelete call ) { final SqlValidatorNamespace targetNamespace = getSqlNamespace( call ); validateNamespace( targetNamespace, unknownType ); - final ValidatorTable table = targetNamespace.getTable(); + final CatalogEntity table = targetNamespace.getTable(); validateAccess( call.getTargetTable(), table, AccessEnum.DELETE ); } @@ -4341,16 +4336,12 @@ public void validateDelete( SqlDelete call ) { public void validateUpdate( SqlUpdate call ) { final SqlValidatorNamespace targetNamespace = getSqlNamespace( call ); validateNamespace( targetNamespace, unknownType ); - final CatalogEntity algOptEntity = - SqlValidatorUtil.getAlgOptTable( + final CatalogEntity table = + SqlValidatorUtil.getLogicalEntity( targetNamespace, - catalogReader.unwrap( Prepare.CatalogReader.class ), + snapshot, null, null ); - final ValidatorTable table = - algOptEntity == null - ? targetNamespace.getTable() - : algOptEntity.unwrap( ValidatorTable.class ); final AlgDataType targetRowType = createTargetRowType( @@ -4380,7 +4371,7 @@ public void validateMerge( SqlMerge call ) { IdentifierNamespace targetNamespace = (IdentifierNamespace) getSqlNamespace( call.getTargetTable() ); validateNamespace( targetNamespace, unknownType ); - ValidatorTable table = targetNamespace.getTable(); + CatalogEntity table = targetNamespace.getTable(); validateAccess( call.getTargetTable(), table, AccessEnum.UPDATE ); AlgDataType targetRowType = unknownType; @@ -4417,11 +4408,12 @@ public void validateMerge( SqlMerge call ) { * @param table Table * @param requiredAccess Access requested on table */ - private void validateAccess( SqlNode node, ValidatorTable table, AccessEnum requiredAccess ) { + private void validateAccess( SqlNode node, CatalogEntity table, AccessEnum requiredAccess ) { if ( table != null ) { - AccessType access = table.getAllowedAccess(); + AccessType access = AccessType.ALL; + ; if ( !access.allowsAccess( requiredAccess ) ) { - throw newValidationError( node, RESOURCE.accessNotAllowed( requiredAccess.name(), table.getQualifiedName().toString() ) ); + throw newValidationError( node, RESOURCE.accessNotAllowed( requiredAccess.name(), table.name ) ); } } } @@ -4625,7 +4617,7 @@ public void setOriginal( SqlNode expr, SqlNode original ) { SqlValidatorNamespace lookupFieldNamespace( AlgDataType rowType, String name ) { - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; final AlgDataTypeField field = nameMatcher.field( rowType, name ); if ( field == null ) { return null; @@ -4859,7 +4851,7 @@ private SqlNode navigationInMeasure( SqlNode node, boolean allRows ) { private void validateDefinitions( SqlMatchRecognize mr, MatchRecognizeScope scope ) { - final Set aliases = catalogReader.nameMatcher.createSet(); + final Set aliases = snapshot.nameMatcher.createSet(); for ( SqlNode item : mr.getPatternDefList().getSqlList() ) { final String alias = alias( item ); if ( !aliases.add( alias ) ) { @@ -5078,11 +5070,11 @@ private List getFieldOrigin( SqlNode sqlQuery, int i ) { if ( selectItem instanceof SqlIdentifier ) { final SqlQualified qualified = scope.fullyQualify( (SqlIdentifier) selectItem ); SqlValidatorNamespace namespace = qualified.namespace; - final ValidatorTable table = namespace.getTable(); + final CatalogEntity table = namespace.getTable(); if ( table == null ) { return null; } - final List origin = new ArrayList<>( table.getQualifiedName() ); + final List origin = List.of( table.name ); for ( String name : qualified.suffix() ) { namespace = namespace.lookupChild( name ); if ( namespace == null ) { @@ -5396,7 +5388,7 @@ public AlgDataType visit( Identifier id ) { // REVIEW jvs: The name resolution rules used here are supposed to match SQL:2003 Part 2 Section 6.6 (identifier chain), but we don't currently have enough // information to get everything right. In particular, routine parameters are currently looked up via resolve; we could do a better job if they were looked up via resolveColumn. - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve( id.getNames().subList( 0, i ), nameMatcher, false, resolved ); if ( resolved.count() == 1 ) { @@ -5435,7 +5427,7 @@ public AlgDataType visit( Identifier id ) { name = "*"; field = null; } else { - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; field = nameMatcher.field( type, name ); } if ( field == null ) { @@ -5614,7 +5606,7 @@ public SqlNode visit( Identifier id ) { String alias = id.getSimple(); final SqlValidatorNamespace selectNs = getSqlNamespace( select ); final AlgDataType rowType = selectNs.getRowTypeSansSystemColumns(); - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; AlgDataTypeField field = nameMatcher.field( rowType, alias ); if ( field != null ) { return nthSelectItem( field.getIndex(), id.getPos() ); @@ -5663,7 +5655,7 @@ public SqlNode visit( Identifier id ) { : validator.getConformance().isGroupByAlias()) ) { String name = id.getSimple(); SqlNode expr = null; - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; + final NameMatcher nameMatcher = validator.snapshot.nameMatcher; int n = 0; for ( Node s : select.getSqlSelectList() ) { final String alias = SqlValidatorUtil.getAlias( (SqlNode) s, -1 ); @@ -6140,7 +6132,7 @@ private class Permute { private AlgDataTypeField field( String name ) { - return catalogReader.nameMatcher.field( rowType, name ); + return snapshot.nameMatcher.field( rowType, name ); } @@ -6151,7 +6143,7 @@ private List usingNames( SqlJoin join ) { switch ( join.getConditionType() ) { case USING: final ImmutableList.Builder list = ImmutableList.builder(); - final Set names = catalogReader.nameMatcher.createSet(); + final Set names = snapshot.nameMatcher.createSet(); for ( Node node : (SqlNodeList) join.getCondition() ) { final String name = ((SqlIdentifier) node).getSimple(); if ( names.add( name ) ) { @@ -6163,7 +6155,7 @@ private List usingNames( SqlJoin join ) { if ( join.isNatural() ) { final AlgDataType t0 = getValidatedNodeType( join.getLeft() ); final AlgDataType t1 = getValidatedNodeType( join.getRight() ); - return SqlValidatorUtil.deriveNaturalJoinColumnList( catalogReader.nameMatcher, t0, t1 ); + return SqlValidatorUtil.deriveNaturalJoinColumnList( snapshot.nameMatcher, t0, t1 ); } } return null; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorNamespace.java index df6379fb96..20446b69c4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorNamespace.java @@ -21,6 +21,7 @@ import org.polypheny.db.algebra.constant.Modality; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.validate.ValidatorNamespace; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.sql.language.SqlIdentifier; @@ -54,7 +55,7 @@ public interface SqlValidatorNamespace extends ValidatorNamespace { /** * Returns the underlying table, or null if there is none. */ - ValidatorTable getTable(); + CatalogEntity getTable(); /** * Returns the type of this namespace. @@ -93,7 +94,7 @@ public interface SqlValidatorNamespace extends ValidatorNamespace { /** * Returns the parse tree node at the root of this namespace. * - * @return parse tree node; null for {@link TableNamespace} + * @return parse tree node; null for {@link EntityNamespace} */ SqlNode getNode(); @@ -150,7 +151,7 @@ public interface SqlValidatorNamespace extends ValidatorNamespace { /** * If this namespace resolves to another namespace, returns that namespace, following links to the end of the chain. * - * A {@code WITH}) clause defines table names that resolve to queries (the body of the with-item). An {@link IdentifierNamespace} typically resolves to a {@link TableNamespace}. + * A {@code WITH}) clause defines table names that resolve to queries (the body of the with-item). An {@link IdentifierNamespace} typically resolves to a {@link EntityNamespace}. * * You must not call this method before {@link #validate(AlgDataType)} has completed. */ diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java index aee7b7c4c2..fe61255f3e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java @@ -166,7 +166,7 @@ public interface SqlValidatorScope extends ValidatorScope { SqlValidatorNamespace getTableNamespace( List names ); /** - * Looks up a table in this scope from its name. If found, calls {@link Resolved#resolve(List, NameMatcher, boolean, Resolved)}. {@link TableNamespace} that wraps it. If the "table" is defined in a {@code WITH} clause it may be a query, not a table after all. + * Looks up a table in this scope from its name. If found, calls {@link Resolved#resolve(List, NameMatcher, boolean, Resolved)}. {@link EntityNamespace} that wraps it. If the "table" is defined in a {@code WITH} clause it may be a query, not a table after all. * * The name matcher is not null, and one typically uses {@link ValidatorCatalogReader#nameMatcher}. * diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 50c855e4a3..1dd550a143 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -38,17 +38,16 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; -import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.plan.AlgOptSchemaWithSampling; -import org.polypheny.db.prepare.Prepare; import org.polypheny.db.schema.CustomColumnResolvingEntity; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ExtensibleEntity; @@ -85,55 +84,55 @@ private SqlValidatorUtil() { * Otherwise, returns null. * * @param namespace Namespace - * @param catalogReader Schema * @param datasetName Name of sample dataset to substitute, or null to use the regular table * @param usedDataset Output parameter which is set to true if a sample dataset is found; may be null */ - public static CatalogEntity getAlgOptTable( SqlValidatorNamespace namespace, Prepare.CatalogReader catalogReader, String datasetName, boolean[] usedDataset ) { - final TableNamespace tableNamespace = namespace.unwrap( TableNamespace.class ); - return catalogReader.getRootSchema().getTable( tableNamespace.getTable().getQualifiedName() ); + public static CatalogEntity getLogicalEntity( SqlValidatorNamespace namespace, Snapshot snapshot, String datasetName, boolean[] usedDataset ) { + final EntityNamespace entityNamespace = namespace.unwrap( EntityNamespace.class ); + return entityNamespace.getTable(); /*if ( namespace.isWrapperFor( TableNamespace.class ) ) { final TableNamespace tableNamespace = namespace.unwrap( TableNamespace.class ); - return getAlgOptTable( tableNamespace, catalogReader, datasetName, usedDataset, tableNamespace.extendedFields ); + return getLogicalEntity( tableNamespace, catalogReader, datasetName, usedDataset, tableNamespace.extendedFields ); } else if ( namespace.isWrapperFor( SqlValidatorImpl.DmlNamespace.class ) ) { final SqlValidatorImpl.DmlNamespace dmlNamespace = namespace.unwrap( SqlValidatorImpl.DmlNamespace.class ); final SqlValidatorNamespace resolvedNamespace = dmlNamespace.resolve(); if ( resolvedNamespace.isWrapperFor( TableNamespace.class ) ) { final TableNamespace tableNamespace = resolvedNamespace.unwrap( TableNamespace.class ); - final ValidatorTable validatorTable = tableNamespace.getTable(); + final ValidatorTable validatorTable = tableNamespace.getLogicalTable(); final AlgDataTypeFactory typeFactory = AlgDataTypeFactory.DEFAULT; final List extendedFields = dmlNamespace.extendList == null ? ImmutableList.of() : getExtendedColumns( typeFactory, validatorTable, dmlNamespace.extendList ); - return getAlgOptTable( tableNamespace, catalogReader, datasetName, usedDataset, extendedFields ); + return getLogicalEntity( tableNamespace, catalogReader, datasetName, usedDataset, extendedFields ); } } return null;*/ } - private static AlgOptEntity getAlgOptTable( TableNamespace tableNamespace, Prepare.CatalogReader catalogReader, String datasetName, boolean[] usedDataset, List extendedFields ) { - final List names = tableNamespace.getTable().getQualifiedName(); - AlgOptEntity table; + private static LogicalTable getLogicalEntity( EntityNamespace entityNamespace, Snapshot snapshot, String datasetName, boolean[] usedDataset, List extendedFields ) { + // final List names = entityNamespace.getTable().getQualifiedName(); + /*AlgOptEntity table; if ( datasetName != null && catalogReader instanceof AlgOptSchemaWithSampling ) { final AlgOptSchemaWithSampling reader = (AlgOptSchemaWithSampling) catalogReader; - table = reader.getTableForMember( names, datasetName, usedDataset ); + table = snapshot.getTableForMember( names, datasetName, usedDataset ); } else { // Schema does not support substitution. Ignore the data set, if any. table = catalogReader.getTableForMember( names ); - } - if ( !extendedFields.isEmpty() ) { + }*/ + + /*if ( !extendedFields.isEmpty() ) { // todo dl table = table.extend( extendedFields ); - } - return table; + }*/ + return entityNamespace.getTable().unwrap( LogicalTable.class ); } /** * Gets a list of extended columns with field indices to the underlying table. */ - public static List getExtendedColumns( AlgDataTypeFactory typeFactory, ValidatorTable table, SqlNodeList extendedColumns ) { + public static List getExtendedColumns( AlgDataTypeFactory typeFactory, CatalogEntity table, SqlNodeList extendedColumns ) { final ImmutableList.Builder extendedFields = ImmutableList.builder(); final ExtensibleEntity extTable = table.unwrap( ExtensibleEntity.class ); int extendedFieldOffset = @@ -264,13 +263,13 @@ public static String getAlias( SqlNode node, int ordinal ) { /** * Factory method for {@link SqlValidator}. */ - public static SqlValidatorWithHints newValidator( OperatorTable opTab, ValidatorCatalogReader catalogReader, AlgDataTypeFactory typeFactory, Conformance conformance ) { + public static SqlValidatorWithHints newValidator( OperatorTable opTab, Snapshot catalogReader, AlgDataTypeFactory typeFactory, Conformance conformance ) { return new SqlValidatorImpl( opTab, catalogReader, typeFactory, conformance ); } - public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, CatalogEntity table ) { - return getTargetField( rowType, typeFactory, id, catalogReader, table, false ); + public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, Snapshot snapshot, CatalogEntity table ) { + return getTargetField( rowType, typeFactory, id, snapshot, table, false ); } @@ -282,11 +281,11 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF * @param table the target table or null if it is not a RelOptTable instance * @return the target field or null if the name cannot be resolved */ - public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, ValidatorCatalogReader catalogReader, CatalogEntity table, boolean isDocument ) { + public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeFactory typeFactory, SqlIdentifier id, Snapshot snapshot, CatalogEntity table, boolean isDocument ) { final Entity t = table == null ? null : table.unwrap( Entity.class ); if ( !(t instanceof CustomColumnResolvingEntity) ) { - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; AlgDataTypeField typeField = nameMatcher.field( rowType, id.getSimple() ); if ( typeField == null && isDocument ) { @@ -309,7 +308,7 @@ public static AlgDataTypeField getTargetField( AlgDataType rowType, AlgDataTypeF } - public static void getSchemaObjectMonikers( ValidatorCatalogReader catalogReader, List names, List hints ) { + public static void getSchemaObjectMonikers( Snapshot snapshot, List names, List hints ) { // Assume that the last name is 'dummy' or similar. List subNames = Util.skipLast( names ); @@ -489,7 +488,7 @@ private static ImmutableBitSet analyzeGroupExpr( SqlValidatorScope scope, GroupA String originalRelName = expr.names.get( 0 ); String originalFieldName = expr.names.get( 1 ); - final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher; + final NameMatcher nameMatcher = scope.getValidator().getSnapshot().nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve( ImmutableList.of( originalRelName ), nameMatcher, false, resolved ); @@ -622,11 +621,11 @@ public static boolean isTableRelational( SqlValidatorImpl validator ) { return false; } SqlIdentifier id = ((SqlIdentifier) validator.getTableScope().getNode()); - LogicalGraph graph = validator.getCatalogReader().getRootSchema().getGraph( id.names ); + LogicalGraph graph = validator.getSnapshot().getLogicalGraph( id.names ); if ( graph != null ) { return false; } - LogicalCollection collection = validator.getCatalogReader().getRootSchema().getCollection( id.names ); + LogicalCollection collection = validator.getSnapshot().getLogicalCollection( id.names ); if ( collection != null ) { return false; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/UnnestNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/UnnestNamespace.java index 504a83c2c2..6b87854c12 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/UnnestNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/UnnestNamespace.java @@ -18,7 +18,7 @@ import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.nodes.validate.ValidatorTable; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlNode; @@ -45,7 +45,7 @@ class UnnestNamespace extends AbstractNamespace { @Override - public ValidatorTable getTable() { + public CatalogEntity getTable() { final SqlNode toUnnest = unnest.operand( 0 ); if ( toUnnest instanceof SqlIdentifier ) { // When operand of SqlIdentifier type does not have struct, fake a table for UnnestNamespace diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java index 5c4aec00ae..8d9127a2eb 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java @@ -69,7 +69,7 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path if ( names.size() == 1 && names.equals( withItem.name.names ) ) { //final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); //final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); - CatalogEntity entity = validator.catalogReader.getRootSchema().getTable( names ); + CatalogEntity entity = validator.snapshot.getLogicalTable( names ); resolved.found( entity ); return; } @@ -82,7 +82,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, if ( names.size() == 1 && names.equals( withItem.name.names ) ) { final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); final Step path = Path.EMPTY.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); - CatalogEntity entity = validator.catalogReader.getRootSchema().getTable( names ); + CatalogEntity entity = validator.snapshot.getLogicalTable( names ); resolved.found( entity ); return; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index f45f28f82c..5dfb6e5ae1 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -121,6 +121,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; @@ -149,7 +150,6 @@ import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.prepare.AlgOptEntityImpl; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexCallBinding; @@ -255,7 +255,7 @@ public class SqlToAlgConverter implements NodeToAlgConverter { protected final SqlValidator validator; protected final RexBuilder rexBuilder; - protected final CatalogReader catalogReader; + protected final Snapshot snapshot; protected final AlgOptCluster cluster; private SubQueryConverter subQueryConverter; protected final List leaves = new ArrayList<>(); @@ -285,13 +285,13 @@ public class SqlToAlgConverter implements NodeToAlgConverter { /* Creates a converter. */ - public SqlToAlgConverter( SqlValidator validator, CatalogReader catalogReader, AlgOptCluster cluster, SqlRexConvertletTable convertletTable, Config config ) { + public SqlToAlgConverter( SqlValidator validator, Snapshot snapshot, AlgOptCluster cluster, SqlRexConvertletTable convertletTable, Config config ) { this.opTab = (validator == null) ? SqlStdOperatorTable.instance() : validator.getOperatorTable(); this.validator = validator; - this.catalogReader = catalogReader; + this.snapshot = snapshot; this.subQueryConverter = new NoOpSubQueryConverter(); this.rexBuilder = cluster.getRexBuilder(); this.typeFactory = rexBuilder.getTypeFactory(); @@ -412,7 +412,7 @@ private void checkConvertedType( Node query, AlgNode result ) { Pair.right( validatedFields ), ValidatorUtil.uniquify( Pair.left( validatedFields ), - catalogReader.nameMatcher.isCaseSensitive() ) ); + false ) ); /*int diff = validatedFields.size() - result.getRowType().getFieldList().size(); if ( diff > 0 ) { for ( int i = 0; i < diff; i++ ) { @@ -1868,7 +1868,7 @@ protected void convertFrom( Blackboard bb, SqlNode from ) { if ( isNatural ) { final AlgDataType leftRowType = leftNamespace.getRowType(); final AlgDataType rightRowType = rightNamespace.getRowType(); - final List columnList = SqlValidatorUtil.deriveNaturalJoinColumnList( catalogReader.nameMatcher, leftRowType, rightRowType ); + final List columnList = SqlValidatorUtil.deriveNaturalJoinColumnList( snapshot.nameMatcher, leftRowType, rightRowType ); conditionExp = convertUsing( leftNamespace, rightNamespace, columnList ); } else { conditionExp = @@ -2119,11 +2119,11 @@ private void convertIdentifier( Blackboard bb, SqlIdentifier id, SqlNodeList ext } final String datasetName = datasetStack.isEmpty() ? null : datasetStack.peek(); final boolean[] usedDataset = { false }; - CatalogEntity table = SqlValidatorUtil.getAlgOptTable( fromNamespace, catalogReader, datasetName, usedDataset ); + CatalogEntity table = SqlValidatorUtil.getLogicalEntity( fromNamespace, snapshot, datasetName, usedDataset ); if ( extendedColumns != null && extendedColumns.size() > 0 ) { assert table != null; final ValidatorTable validatorTable = table.unwrap( ValidatorTable.class ); - final List extendedFields = SqlValidatorUtil.getExtendedColumns( validator.getTypeFactory(), validatorTable, extendedColumns ); + final List extendedFields = SqlValidatorUtil.getExtendedColumns( validator.getTypeFactory(), table, extendedColumns ); table = table; // table.extend( extendedFields ); todo dl } final AlgNode tableRel; @@ -2160,8 +2160,8 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { final SqlCallBinding callBinding = new SqlCallBinding( bb.scope.getValidator(), bb.scope, call ); if ( operator instanceof SqlUserDefinedTableMacro ) { final SqlUserDefinedTableMacro udf = (SqlUserDefinedTableMacro) operator; - //final TranslatableEntity table = udf.getTable( typeFactory, callBinding.sqlOperands() ); - //final LogicalTable catalogTable = Catalog.getInstance().getTable( table.getId() ); + //final TranslatableEntity table = udf.getLogicalTable( typeFactory, callBinding.sqlOperands() ); + //final LogicalTable catalogTable = Catalog.getInstance().getLogicalTable( table.getId() ); //final AlgDataType rowType = table.getRowType( typeFactory ); //AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, null ); //AlgNode converted = toAlg( algOptEntity ); @@ -2414,7 +2414,7 @@ private RexNode convertJoinCondition( Blackboard bb, SqlValidatorNamespace leftN */ private @Nonnull RexNode convertUsing( SqlValidatorNamespace leftNamespace, SqlValidatorNamespace rightNamespace, List nameList ) { - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; final List list = new ArrayList<>(); for ( String name : nameList ) { List operands = new ArrayList<>(); @@ -2945,10 +2945,10 @@ protected CatalogEntity getTargetTable( SqlNode call ) { final SqlValidatorNamespace targetNs = validator.getSqlNamespace( call ); if ( targetNs.isWrapperFor( SqlValidatorImpl.DmlNamespace.class ) ) { final SqlValidatorImpl.DmlNamespace dmlNamespace = targetNs.unwrap( SqlValidatorImpl.DmlNamespace.class ); - return SqlValidatorUtil.getAlgOptTable( dmlNamespace, catalogReader, null, null ); + return SqlValidatorUtil.getLogicalEntity( dmlNamespace, snapshot, null, null ); } final SqlValidatorNamespace resolvedNamespace = targetNs.resolve(); - return SqlValidatorUtil.getAlgOptTable( resolvedNamespace, catalogReader, null, null ); + return SqlValidatorUtil.getLogicalEntity( resolvedNamespace, snapshot, null, null ); } @@ -2989,7 +2989,7 @@ protected AlgNode convertColumnList( final SqlInsert call, AlgNode source ) { // Walk the name list and place the associated value in the expression list according to the ordinal value returned from the table construct, leaving nulls in the list for columns // that are not referenced. - final NameMatcher nameMatcher = catalogReader.nameMatcher; + final NameMatcher nameMatcher = snapshot.nameMatcher; for ( Pair p : Pair.zip( targetColumnNames, columnExprs ) ) { AlgDataTypeField field = nameMatcher.field( targetRowType, p.left ); @@ -3044,7 +3044,7 @@ private Blackboard createInsertBlackboard( CatalogEntity targetTable, RexNode so } - private InitializerExpressionFactory getInitializerFactory( ValidatorTable validatorTable ) { + private InitializerExpressionFactory getInitializerFactory( CatalogEntity validatorTable ) { // We might unwrap a null instead of a InitializerExpressionFactory. final Entity entity = unwrap( validatorTable, Entity.class ); if ( entity != null ) { @@ -3109,7 +3109,7 @@ protected void collectInsertTargets( SqlInsert call, final RexNode sourceRef, fi tableRowType, typeFactory, id, - catalogReader, + snapshot, targetTable, allowDynamic ); assert field != null : "column " + id.toString() + " not found"; @@ -3184,7 +3184,7 @@ private AlgNode convertUpdate( SqlUpdate call ) { final AlgDataType targetRowType = targetTable.getRowType(); for ( SqlNode node : call.getTargetColumnList().getSqlList() ) { SqlIdentifier id = (SqlIdentifier) node; - AlgDataTypeField field = SqlValidatorUtil.getTargetField( targetRowType, typeFactory, id, catalogReader, targetTable ); + AlgDataTypeField field = SqlValidatorUtil.getTargetField( targetRowType, typeFactory, id, snapshot, targetTable ); assert field != null : "column " + id.toString() + " not found"; targetColumnNameList.add( field.getName() ); } @@ -3216,7 +3216,7 @@ private AlgNode convertMerge( SqlMerge call ) { targetRowType, typeFactory, id, - catalogReader, + snapshot, targetTable ); assert field != null : "column " + id.toString() + " not found"; targetColumnNameList.add( field.getName() ); @@ -3504,7 +3504,7 @@ private void convertSelectList( Blackboard bb, SqlSelect select, List o fieldNames.add( deriveAlias( expr, aliases, i ) ); } - fieldNames = ValidatorUtil.uniquify( fieldNames, catalogReader.nameMatcher.isCaseSensitive() ); + fieldNames = ValidatorUtil.uniquify( fieldNames, snapshot.nameMatcher.isCaseSensitive() ); algBuilder.push( bb.root ).projectNamed( exprs, fieldNames, true ); bb.setRoot( algBuilder.build(), false ); @@ -3854,7 +3854,7 @@ Pair> lookupExp( SqlQualified qualified ) { } return Pair.of( node, null ); } - final NameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher; + final NameMatcher nameMatcher = scope.getValidator().getSnapshot().nameMatcher; final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve( qualified.prefix(), nameMatcher, false, resolved ); if ( !(resolved.count() == 1) ) { diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java index 12a6813203..ddf30628e3 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java @@ -51,6 +51,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgDataTypeSystemImpl; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.config.RuntimeConfig; @@ -164,7 +165,7 @@ public CatalogEntity getCatalogEntity() { */ @Test public void testCreateRootSchemaWithNoMetadataSchema() { - SchemaPlus rootSchema = Frameworks.createRootSchema( false ); + SchemaPlus rootSchema = Frameworks.createSnapshot( false ); assertThat( rootSchema.getSubNamespaceNames().size(), equalTo( 0 ) ); } @@ -180,7 +181,7 @@ public void testCreateRootSchemaWithNoMetadataSchema() { public void testTypeSystem() { checkTypeSystem( 19, Frameworks.newConfigBuilder() .prepareContext( new ContextImpl( - PolyphenyDbSchema.from( Frameworks.createRootSchema( false ) ), + PolyphenyDbSchema.from( Frameworks.createSnapshot( false ) ), new SlimDataContext() { @Override public JavaTypeFactory getTypeFactory() { @@ -194,7 +195,7 @@ public JavaTypeFactory getTypeFactory() { .build() ); checkTypeSystem( 25, Frameworks.newConfigBuilder().typeSystem( HiveLikeTypeSystem.INSTANCE ) .prepareContext( new ContextImpl( - PolyphenyDbSchema.from( Frameworks.createRootSchema( false ) ), + PolyphenyDbSchema.from( Frameworks.createSnapshot( false ) ), new SlimDataContext() { @Override public JavaTypeFactory getTypeFactory() { @@ -208,7 +209,7 @@ public JavaTypeFactory getTypeFactory() { .build() ); checkTypeSystem( 31, Frameworks.newConfigBuilder().typeSystem( new HiveLikeTypeSystem2() ) .prepareContext( new ContextImpl( - PolyphenyDbSchema.from( Frameworks.createRootSchema( false ) ), + PolyphenyDbSchema.from( Frameworks.createSnapshot( false ) ), new SlimDataContext() { @Override public JavaTypeFactory getTypeFactory() { @@ -246,7 +247,7 @@ public Void apply( AlgOptCluster cluster, AlgOptSchema algOptSchema, SchemaPlus @Test public void testFrameworksValidatorWithIdentifierExpansion() throws Exception { final SchemaPlus schema = Frameworks - .createRootSchema( true ) + .createSnapshot( true ) .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() @@ -281,13 +282,13 @@ public JavaTypeFactory getTypeFactory() { @Test public void testSchemaPath() { final SchemaPlus schema = Frameworks - .createRootSchema( true ) + .createSnapshot( true ) .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema( schema ) .build(); - final Path path = Schemas.path( config.getDefaultSchema() ); + final Path path = Schemas.path( config.getSnapshot() ); assertThat( path.size(), is( 2 ) ); assertThat( path.get( 0 ).left, is( "" ) ); assertThat( path.get( 1 ).left, is( "hr" ) ); @@ -318,7 +319,7 @@ public void testSchemaPath() { @Ignore // test is no longer needed? as the streamer prevents this error and uses different end implementation public void testUpdate() throws Exception { Entity entity = new EntityImpl(); - final SchemaPlus rootSchema = Frameworks.createRootSchema( true ); + final SchemaPlus rootSchema = Frameworks.createSnapshot( true ); SchemaPlus schema = rootSchema.add( "x", new AbstractNamespace( -1 ), NamespaceType.RELATIONAL ); schema.add( "MYTABLE", entity ); List traitDefs = new ArrayList<>(); @@ -429,7 +430,7 @@ public Type getElementType() { @Override - public Expression getExpression( PolyphenyDbSchema schema, String tableName, Class clazz ) { + public Expression getExpression( Snapshot snapshot, String tableName, Class clazz ) { throw new UnsupportedOperationException(); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java index 29d978bfcc..7fa389a341 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java @@ -154,7 +154,7 @@ public void setParameterTypes( Map types ) { @Before public void setUp() { - rootSchema = Frameworks.createRootSchema( true ).add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); + rootSchema = Frameworks.createSnapshot( true ).add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( ParserConfig.DEFAULT ) diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java index 984385f272..6bd7a764ce 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java @@ -233,7 +233,7 @@ public void testValidateFails() throws NodeParseException { @Test public void testValidateUserDefinedAggregate() throws Exception { final SchemaPlus schema = Frameworks - .createRootSchema( true ) + .createSnapshot( true ) .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final SqlStdOperatorTable stdOpTab = SqlStdOperatorTable.instance(); @@ -288,7 +288,7 @@ private Planner getPlanner( List traitDefs, Program... programs ) { private Planner getPlanner( List traitDefs, ParserConfig parserConfig, Program... programs ) { final SchemaPlus schema = Frameworks - .createRootSchema( true ) + .createSnapshot( true ) .add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() @@ -1002,7 +1002,7 @@ public void testBushyCrossJoin2() throws Exception { * Checks that a query returns a particular plan, using a planner with MultiJoinOptimizeBushyRule enabled. */ private void checkBushy( String sql, String expected ) throws Exception { - final SchemaPlus schema = Frameworks.createRootSchema( false ).add( "foodmart", new ReflectiveSchema( new FoodmartSchema(), -1 ), NamespaceType.RELATIONAL ); + final SchemaPlus schema = Frameworks.createSnapshot( false ).add( "foodmart", new ReflectiveSchema( new FoodmartSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( Parser.ParserConfig.DEFAULT ) @@ -1058,7 +1058,7 @@ public void testOldJoinStyleDeCorrelation() throws Exception { public String checkTpchQuery( String tpchTestQuery ) throws Exception { - final SchemaPlus schema = Frameworks.createRootSchema( false ).add( "tpch", new ReflectiveSchema( new TpchSchema(), -1 ), NamespaceType.RELATIONAL ); + final SchemaPlus schema = Frameworks.createSnapshot( false ).add( "tpch", new ReflectiveSchema( new TpchSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( Parser.configBuilder().setLex( Lex.MYSQL ).build() ) @@ -1117,7 +1117,7 @@ public AlgDataType deriveType( Validator validator, ValidatorScope scope, Call c @Test public void testOrderByNonSelectColumn() throws Exception { final SchemaPlus schema = Frameworks - .createRootSchema( true ) + .createSnapshot( true ) .add( "tpch", new ReflectiveSchema( new TpchSchema(), -1 ), NamespaceType.RELATIONAL ); String query = "select t.psPartkey from \n" @@ -1200,7 +1200,7 @@ public void testViewOnView() throws Exception { private void checkView( String sql, Matcher matcher ) throws NodeParseException, ValidationException, AlgConversionException { final SchemaPlus schema = Frameworks - .createRootSchema( true ) + .createSnapshot( true ) .add( "hr", new ReflectiveSchema( -1L, new HrSchema() ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java index 974cf1381c..061d3d7ce2 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java @@ -84,7 +84,7 @@ public RexExecutorTest() { protected void check( final Action action ) throws Exception { - PolyphenyDbSchema rootSchema = AbstractPolyphenyDbSchema.createRootSchema( "" ); + PolyphenyDbSchema rootSchema = AbstractPolyphenyDbSchema.createSnapshot( "" ); FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema( rootSchema.plus() ) .prepareContext( new ContextImpl( diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java index 985bbc36e3..e1e1f29d6e 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/SortRemoveRuleTest.java @@ -68,7 +68,7 @@ public final class SortRemoveRuleTest extends SqlLanguageDependent { * The default schema that is used in these tests provides tables sorted on the primary key. Due to this scan operators always come with a {@link AlgCollation} trait. */ private AlgNode transform( String sql, RuleSet prepareRules ) throws Exception { - final SchemaPlus rootSchema = Frameworks.createRootSchema( true ); + final SchemaPlus rootSchema = Frameworks.createSnapshot( true ); final SchemaPlus defSchema = rootSchema.add( "hr", new HrClusteredSchema( rootSchema.getId() ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( ParserConfig.DEFAULT ) diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/TestFixture.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/TestFixture.java index 5b10217118..77aa43980b 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/TestFixture.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/TestFixture.java @@ -144,7 +144,7 @@ public TestFixture() { final Holder holder = Holder.of( null ); - PolyphenyDbSchema rootSchema = AbstractPolyphenyDbSchema.createRootSchema( "" ); + PolyphenyDbSchema rootSchema = AbstractPolyphenyDbSchema.createSnapshot( "" ); FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema( rootSchema.plus() ) .prepareContext( new ContextImpl( diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java index e602662c3d..4c62b963e0 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/SqlToAlgTestBase.java @@ -49,6 +49,7 @@ import org.polypheny.db.catalog.MockCatalogReaderDynamic; import org.polypheny.db.catalog.MockCatalogReaderSimple; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.NodeToAlgConverter.Config; @@ -279,7 +280,7 @@ public MockRelOptSchema( ValidatorCatalogReader catalogReader, AlgDataTypeFactor @Override - public AlgOptEntity getTableForMember( List names ) { + public LogicalTable getTableForMember( List names ) { final ValidatorTable table = catalogReader.getTable( names ); final AlgDataType rowType = table.getRowType(); final List collationList = deduceMonotonicity( table ); diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java index add01a33c7..f682a914ea 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/language/validate/LexCaseSensitiveTest.java @@ -60,7 +60,7 @@ public class LexCaseSensitiveTest { private static Planner getPlanner( List traitDefs, ParserConfig parserConfig, Program... programs ) { - final SchemaPlus schema = Frameworks.createRootSchema( true ).add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); + final SchemaPlus schema = Frameworks.createSnapshot( true ).add( "hr", new ReflectiveSchema( new HrSchema(), -1 ), NamespaceType.RELATIONAL ); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig( parserConfig ) .defaultSchema( schema ) diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java index b26991a78f..0954dea7b4 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java @@ -113,7 +113,7 @@ public class PlannerImplMock implements Planner { */ public PlannerImplMock( FrameworkConfig config ) { this.config = config; - this.defaultSchema = config.getDefaultSchema(); + this.defaultSchema = config.getSnapshot(); this.programs = config.getPrograms(); this.parserConfig = config.getParserConfig(); this.sqlToRelConverterConfig = config.getSqlToRelConverterConfig(); diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 8a3d7926fe..91a7c5337c 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -261,7 +261,7 @@ private void crudRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/dropTruncateTable", crud::dropTruncateTable ); - webuiServer.post( "/createTable", crud::createTable ); + webuiServer.post( "/createPhysicalTable", crud::createTable ); webuiServer.post( "/createCollection", crud.languageCrud::createCollection ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 161330d48d..e978b5a391 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -37,7 +37,7 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.entity.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; From 08abbf34675d05c6c07a66ff097371d0616c55a1 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 1 Mar 2023 00:40:07 +0100 Subject: [PATCH 028/436] switched to only one create entity method --- .../org/polypheny/db/adapter/Adapter.java | 3 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 40 ++++---- .../adapter/cottontail/CottontailPlugin.java | 2 +- .../polypheny/db/adapter/csv/CsvSource.java | 2 +- .../polypheny/db/adapter/file/FilePlugin.java | 29 ++---- .../db/adapter/file/FileStoreSchema.java | 22 +---- .../adapter/file/FileTranslatableEntity.java | 7 +- .../polypheny/db/adapter/file/source/Qfs.java | 2 +- .../db/adapter/file/source/QfsSchema.java | 13 +-- .../db/hsqldb/stores/HsqldbStore.java | 5 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 91 +++++++++++-------- .../polypheny/db/adapter/jdbc/JdbcScan.java | 10 +- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 15 +-- .../jdbc/stores/AbstractJdbcStore.java | 10 +- .../monetdb/sources/MonetdbSource.java | 2 +- .../adapter/monetdb/stores/MonetdbStore.java | 2 +- .../db/adapter/mongodb/MongoEntity.java | 14 +-- .../db/adapter/mongodb/MongoPlugin.java | 14 +-- .../db/adapter/mongodb/MongoSchema.java | 18 +--- .../db/adapter/jdbc/MysqlSourcePlugin.java | 4 +- .../db/adapter/neo4j/Neo4jPlugin.java | 2 +- .../postgres/source/PostgresqlSource.java | 2 +- .../postgres/store/PostgresqlStore.java | 2 +- 23 files changed, 130 insertions(+), 181 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 7508094237..e2a54f1cd0 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -324,7 +324,8 @@ public Adapter( int adapterId, String uniqueName, Map settings ) public abstract void createNewSchema( Snapshot snapshot, String name, long id ); - public abstract PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ); + @Deprecated + public abstract PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ); public abstract Namespace getCurrentSchema(); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index f633f01c55..793ba02e28 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -50,19 +50,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.ConstraintType; -import org.polypheny.db.catalog.logistic.DataPlacementRole; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; -import org.polypheny.db.catalog.logistic.IndexType; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.PlacementType; -import org.polypheny.db.catalog.logistic.NameGenerator; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -70,7 +59,6 @@ import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -79,11 +67,13 @@ import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -101,6 +91,16 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.NameGenerator; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.exception.AlterSourceException; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -910,7 +910,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { PolySchemaBuilder.getInstance().getCurrent(); // Create table on store - dataStore.createPhysicalTable( statement.getPrepareContext(), catalogTable, , catalogTable.partitionProperty.partitionIds ); + dataStore.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); // Copy data to the newly added placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( dataStore.getAdapterId() ), addedColumns, partitionIds ); @@ -1468,7 +1468,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { null, DataPlacementRole.UPTODATE ) ); - storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, , newPartitionIdsOnDataPlacement ); + storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); } // Copy the data to the newly added column placements @@ -1523,7 +1523,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part DataPlacementRole.UPTODATE ); } - storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, , newPartitions ); + storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); @@ -1868,7 +1868,7 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR null, DataPlacementRole.UPTODATE ); - store.createPhysicalTable( statement.getPrepareContext(), catalogMaterializedView, , catalogMaterializedView.partitionProperty.partitionIds ); + store.createPhysicalTable( statement.getPrepareContext(), catalogMaterializedView, null ); } // Selected data from tables is added into the newly crated materialized view @@ -2240,7 +2240,7 @@ public void createTable( long schemaId, String name, List fiel null, DataPlacementRole.UPTODATE ); - store.createPhysicalTable( statement.getPrepareContext(), catalogTable, , catalogTable.partitionProperty.partitionIds ); + store.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); } } catch ( GenericCatalogException | UnknownColumnException | UnknownCollationException e ) { @@ -2682,7 +2682,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // First create new tables - store.createPhysicalTable( statement.getPrepareContext(), partitionedTable, , partitionedTable.partitionProperty.partitionIds ); + store.createPhysicalTable( statement.getPrepareContext(), partitionedTable, null ); // Copy data from unpartitioned to partitioned // Get only columns that are actually on that store @@ -2792,7 +2792,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme DataPlacementRole.UPTODATE ); // First create new tables - store.createPhysicalTable( statement.getPrepareContext(), mergedTable, , mergedTable.partitionProperty.partitionIds ); + store.createPhysicalTable( statement.getPrepareContext(), mergedTable, null ); // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java index 81b4ca0de7..fb499df506 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/CottontailPlugin.java @@ -210,7 +210,7 @@ public void createNewSchema( Snapshot snapshot, String name, long id ) { @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { return new CottontailEntity( this.currentSchema, this.dbName, diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index fa3b984b1f..5b4637e030 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -133,7 +133,7 @@ public void createNewSchema( Snapshot snapshot, String name, long id ) { @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { return currentSchema.createCsvTable( logical, allocationTable, this ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java index c18f55bad6..1bb18f5f44 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FilePlugin.java @@ -184,7 +184,7 @@ public void createNewSchema( Snapshot snapshot, String name, long id ) { @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { return currentSchema.createFileTable( logical, allocationTable ); } @@ -196,32 +196,17 @@ public Namespace getCurrentSchema() { @Override - public PhysicalTable createPhysicalTable( Context context, LogicalTable catalogTable, AllocationTable allocationTable ) { + public PhysicalTable createPhysicalTable( Context context, LogicalTable logicalTable, AllocationTable allocationTable ) { context.getStatement().getTransaction().registerInvolvedAdapter( this ); - for ( long partitionId : partitionIds ) { - catalog.updatePartitionPlacementPhysicalNames( - getAdapterId(), - partitionId, - "unused", - "unused" ); - - for ( Long colId : catalogTable.fieldIds ) { - File newColumnFolder = getColumnFolder( colId, partitionId ); - if ( !newColumnFolder.mkdir() ) { - throw new RuntimeException( "Could not create column folder " + newColumnFolder.getAbsolutePath() ); - } + for ( Long colId : logicalTable.fieldIds ) { + File newColumnFolder = getColumnFolder( colId, allocationTable.id ); + if ( !newColumnFolder.mkdir() ) { + throw new RuntimeException( "Could not create column folder " + newColumnFolder.getAbsolutePath() ); } } - for ( CatalogColumnPlacement placement : catalog.getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogTable.id ) ) { - catalog.updateColumnPlacementPhysicalNames( - getAdapterId(), - placement.columnId, - "unused", - "unused", - true ); - } + return this.currentSchema.createFileTable( logicalTable, allocationTable ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java index a931bab367..fc3310fa0e 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileStoreSchema.java @@ -19,7 +19,6 @@ import java.io.File; import java.util.ArrayList; -import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -32,27 +31,17 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.file.FileAlg.FileImplementor.Operation; import org.polypheny.db.adapter.file.FilePlugin.FileStore; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; public class FileStoreSchema extends AbstractNamespace implements FileSchema, Schema { @@ -89,21 +78,20 @@ public int getAdapterId() { public PhysicalTable createFileTable( - LogicalTable catalogTable, + LogicalTable logicalTable, AllocationTable allocationTable ) { List pkIds; - if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( catalogTable.primaryKey ); + if ( logicalTable.primaryKey != null ) { + CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( logicalTable.primaryKey ); pkIds = primaryKey.columnIds; } else { pkIds = new ArrayList<>(); } - FileTranslatableEntity table = new FileTranslatableEntity( + return new FileTranslatableEntity( this, + logicalTable, allocationTable, pkIds ); - tables.put( catalogTable.name + "_" + allocationTable.id, table ); - return table; } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java index 69590ba0a0..886819b985 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/FileTranslatableEntity.java @@ -18,8 +18,6 @@ import java.io.File; -import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -32,6 +30,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.catalog.refactor.TranslatableEntity; @@ -64,9 +63,9 @@ public class FileTranslatableEntity extends PhysicalTable implements Translatabl public FileTranslatableEntity( final FileSchema fileSchema, - final AllocationTable allocationTable, + LogicalTable logicalTable, final AllocationTable allocationTable, final List pkIds ) { - super( allocationTable ); + super( allocationTable, logicalTable.name, logicalTable.getNamespaceName(), logicalTable.getColumnNames() ); this.fileSchema = fileSchema; this.rootDir = fileSchema.getRootDir(); this.adapterId = (long) fileSchema.getAdapterId(); diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java index ba9daf10e1..41e75d5ee6 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java @@ -89,7 +89,7 @@ public void createNewSchema( Snapshot snapshot, String name, long id ) { @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocation, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocation ) { return currentSchema.createFileTable( logical, allocation ); } diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java index 25ceca171d..e1fecf97cb 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/QfsSchema.java @@ -19,7 +19,6 @@ import java.io.File; import java.util.ArrayList; -import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -36,25 +35,15 @@ import org.polypheny.db.adapter.file.FileSchema; import org.polypheny.db.adapter.file.FileTranslatableEntity; import org.polypheny.db.adapter.file.Value; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; public class QfsSchema extends AbstractNamespace implements FileSchema, Schema { @@ -100,7 +89,7 @@ public FileTranslatableEntity createFileTable( LogicalTable logicalTable, Alloca } FileTranslatableEntity table = new FileTranslatableEntity( this, - allocationTable, + logicalTable, allocationTable, pkIds ); tableMap.put( logicalTable.name + "_" + allocationTable.name, table ); return table; diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index dc76026e88..3f97c057ec 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -41,7 +41,6 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plugins.PolyPluginManager; @@ -107,8 +106,8 @@ protected ConnectionFactory deployEmbedded() { @Override - public JdbcEntity createAdapterTable( LogicalTable logicalTable, AllocationTable allocationTable, PhysicalTable physicalTable ) { - return currentJdbcSchema.createJdbcTable( logicalTable, allocationTable, physicalTable ); + public JdbcEntity createAdapterTable( LogicalTable logicalTable, AllocationTable allocationTable ) { + return currentJdbcSchema.createJdbcTable( logicalTable, allocationTable ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index ef497cbbd5..31690a4667 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -34,7 +34,6 @@ package org.polypheny.db.adapter.jdbc; -import com.google.common.collect.Lists; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -46,32 +45,32 @@ import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.adapter.jdbc.stores.AbstractJdbcStore; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.common.Modify.Operation; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.refactor.QueryableEntity; import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.TableType; import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.sql.language.SqlBasicCall; @@ -93,11 +92,10 @@ * applying Queryable operators such as {@link org.apache.calcite.linq4j.Queryable#where(org.apache.calcite.linq4j.function.Predicate2)}. * The resulting queryable can then be converted to a SQL query, which can be executed efficiently on the JDBC server. */ -public class JdbcEntity extends PhysicalTable implements TranslatableEntity, ScannableEntity, ModifiableEntity { +public class JdbcEntity extends PhysicalTable implements TranslatableEntity, ScannableEntity, ModifiableEntity, QueryableEntity { private final AllocationTable allocation; private final LogicalTable logical; - private final PhysicalTable physical; private JdbcSchema jdbcSchema; private final TableType jdbcTableType; @@ -107,22 +105,41 @@ public JdbcEntity( JdbcSchema jdbcSchema, LogicalTable logicalTable, AllocationTable allocationTable, - PhysicalTable physicalTable, @NonNull TableType jdbcTableType ) { - super( physicalTable ); + super( + allocationTable, + getPhysicalTableName( jdbcSchema.adapter, logicalTable, allocationTable ), + getPhysicalSchemaName( jdbcSchema.adapter ), + getPhysicalColumnNames( jdbcSchema.adapter, allocationTable ) ); this.logical = logicalTable; this.allocation = allocationTable; - this.physical = physicalTable; this.jdbcSchema = jdbcSchema; this.jdbcTableType = jdbcTableType; } - public String toString() { - return "JdbcTable {" + physical.namespaceName + "." + physical.name + "}"; + private static List getPhysicalColumnNames( Adapter adapter, AllocationTable allocationTable ) { + AbstractJdbcStore store = (AbstractJdbcStore) adapter; + return allocationTable.getColumns().values().stream().map( c -> store.getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ); + } + + + private static String getPhysicalSchemaName( Adapter adapter ) { + AbstractJdbcStore store = (AbstractJdbcStore) adapter; + return store.getDefaultPhysicalSchemaName(); + } + + + private static String getPhysicalTableName( Adapter adapter, LogicalTable logicalTable, AllocationTable allocationTable ) { + AbstractJdbcStore store = (AbstractJdbcStore) adapter; + return store.getPhysicalTableName( logicalTable.id, allocationTable.id ); } + public String toString() { + return "JdbcTable {" + namespaceName + "." + name + "}"; + } + private List> fieldClasses( final JavaTypeFactory typeFactory ) { final AlgDataType rowType = getRowType(); @@ -137,12 +154,12 @@ private List> fieldClasses( final JavaTypeFact SqlString generateSql() { List pcnl = Expressions.list(); - for ( String str : physical.columnNames ) { - pcnl.add( new SqlIdentifier( Arrays.asList( physical.name, str ), ParserPos.ZERO ) ); + for ( String str : columnNames ) { + pcnl.add( new SqlIdentifier( Arrays.asList( name, str ), ParserPos.ZERO ) ); } final SqlNodeList selectList = new SqlNodeList( pcnl, ParserPos.ZERO ); - SqlIdentifier physicalTableName = new SqlIdentifier( Arrays.asList( physical.namespaceName, physical.name ), ParserPos.ZERO ); + SqlIdentifier physicalTableName = new SqlIdentifier( Arrays.asList( namespaceName, name ), ParserPos.ZERO ); SqlSelect node = new SqlSelect( ParserPos.ZERO, SqlNodeList.EMPTY, @@ -162,28 +179,28 @@ SqlString generateSql() { public SqlIdentifier physicalTableName() { - return new SqlIdentifier( Arrays.asList( physical.namespaceName, physical.name ), ParserPos.ZERO ); + return new SqlIdentifier( Arrays.asList( namespaceName, name ), ParserPos.ZERO ); } public SqlIdentifier physicalColumnName( String logicalColumnName ) { - String physicalName = physicalColumnNames.get( logicalColumnNames.indexOf( logicalColumnName ) ); + String physicalName = columnNames.get( List.copyOf( allocation.getColumnNames().values() ).indexOf( logicalColumnName ) ); return new SqlIdentifier( Collections.singletonList( physicalName ), ParserPos.ZERO ); } public boolean hasPhysicalColumnName( String logicalColumnName ) { - return logicalColumnNames.contains( logicalColumnName ); + return List.copyOf( allocation.getColumnNames().values() ).contains( logicalColumnName ); } public SqlNodeList getNodeList() { List pcnl = Expressions.list(); int i = 0; - for ( String str : physicalColumnNames ) { + for ( String str : columnNames ) { SqlNode[] operands = new SqlNode[]{ - new SqlIdentifier( Arrays.asList( physicalSchemaName, physicalTableName, str ), ParserPos.ZERO ), - new SqlIdentifier( Arrays.asList( logicalColumnNames.get( i++ ) ), ParserPos.ZERO ) + new SqlIdentifier( Arrays.asList( namespaceName, name, str ), ParserPos.ZERO ), + new SqlIdentifier( Collections.singletonList( List.copyOf( allocation.getColumnNames().values() ).get( i++ ) ), ParserPos.ZERO ) }; pcnl.add( new SqlBasicCall( (SqlOperator) OperatorRegistry.get( OperatorName.AS ), operands, ParserPos.ZERO ) ); } @@ -192,14 +209,14 @@ public SqlNodeList getNodeList() { @Override - public AlgNode toAlg( ToAlgContext context, AlgOptEntity algOptEntity, AlgTraitSet traitSet ) { - return new JdbcScan( context.getCluster(), algOptEntity, this, jdbcSchema.getConvention() ); + public AlgNode toAlg( ToAlgContext context, AlgTraitSet traitSet ) { + return new JdbcScan( context.getCluster(), this, jdbcSchema.getConvention() ); } @Override - public Queryable asQueryable( DataContext dataContext, PolyphenyDbSchema schema, String tableName ) { - return new JdbcTableQueryable<>( dataContext, schema, tableName ); + public Queryable asQueryable( DataContext dataContext, Snapshot snapshot, long entityId ) { + return new JdbcTableQueryable<>( dataContext, snapshot, this ); } @@ -215,26 +232,22 @@ public Enumerable scan( DataContext root ) { @Override - public RelModify toModificationAlg( + public Modify toModificationAlg( AlgOptCluster cluster, - AlgOptEntity table, - CatalogReader catalogReader, + AlgTraitSet algTraits, + CatalogEntity table, AlgNode input, Operation operation, List updateColumnList, - List sourceExpressionList, - boolean flattened ) { + List sourceExpressionList ) { jdbcSchema.getConvention().register( cluster.getPlanner() ); return new LogicalRelModify( - cluster, cluster.traitSetOf( Convention.NONE ), table, - catalogReader, input, operation, updateColumnList, - sourceExpressionList, - flattened ); + sourceExpressionList ); } @@ -256,14 +269,14 @@ public void setSchema( JdbcSchema jdbcSchema ) { */ private class JdbcTableQueryable extends AbstractTableQueryable { - JdbcTableQueryable( DataContext dataContext, SchemaPlus schema, String tableName ) { - super( dataContext, schema, JdbcEntity.this, tableName ); + JdbcTableQueryable( DataContext dataContext, Snapshot snapshot, JdbcEntity entity ) { + super( dataContext, snapshot, entity ); } @Override public String toString() { - return "JdbcTableQueryable {table: " + physicalSchemaName + "." + tableName + "}"; + return "JdbcTableQueryable {table: " + table.namespaceName + "." + table.name + "}"; } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java index 4a0b0f78fd..5e7daf3405 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java @@ -41,7 +41,6 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.ModelTrait; @@ -50,7 +49,7 @@ /** * Relational expression representing a scan of a table in a JDBC data source. */ -public class JdbcScan extends RelScan implements JdbcAlg { +public class JdbcScan extends RelScan implements JdbcAlg { protected final JdbcEntity jdbcTable; @@ -61,17 +60,16 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) } - public JdbcScan( AlgOptCluster cluster, AlgOptEntity table, JdbcEntity jdbcTable, JdbcConvention jdbcConvention ) { - super( cluster, cluster.traitSetOf( jdbcConvention ).replace( ModelTrait.RELATIONAL ), table ); + public JdbcScan( AlgOptCluster cluster, JdbcEntity jdbcTable, JdbcConvention jdbcConvention ) { + super( cluster, cluster.traitSetOf( jdbcConvention ).replace( ModelTrait.RELATIONAL ), jdbcTable ); this.jdbcTable = jdbcTable; - assert jdbcTable != null; } @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { assert inputs.isEmpty(); - return new JdbcScan( getCluster(), table, jdbcTable, (JdbcConvention) getConvention() ); + return new JdbcScan( getCluster(), entity, (JdbcConvention) getConvention() ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index bde8761931..99b963c1f9 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -39,8 +39,6 @@ import com.google.common.collect.Multimap; import java.util.Collection; import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; import java.util.Map; import java.util.Set; import javax.sql.DataSource; @@ -55,27 +53,20 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalTable; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; -import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.SchemaVersion; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.TableType; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlDialectFactory; import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; /** @@ -96,7 +87,7 @@ public class JdbcSchema implements Namespace, Schema { private final Map tableMap; private final Map physicalToLogicalTableNameMap; - private final Adapter adapter; + public final Adapter adapter; @Getter private final long id; @@ -145,13 +136,11 @@ public JdbcSchema( public JdbcEntity createJdbcTable( LogicalTable logicalTable, - AllocationTable allocationTable, - PhysicalTable physicalTable ) { + AllocationTable allocationTable ) { return new JdbcEntity( this, logicalTable, allocationTable, - physicalTable, TableType.TABLE ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 7dedbec305..ad070b798b 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.pf4j.ExtensionPoint; @@ -137,7 +136,8 @@ public PhysicalTable createPhysicalTable( Context context, LogicalTable logicalT } executeUpdate( query, context ); - return new PhysicalTable( allocationTable, getDefaultPhysicalSchemaName(), physicalTableName, allocationTable.getColumns().values().stream().map( c -> getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ) ); + return this.currentJdbcSchema.createJdbcTable( logicalTable, allocationTable ); + //return new PhysicalTable( allocationTable, getDefaultPhysicalSchemaName(), physicalTableName, allocationTable.getColumns().values().stream().map( c -> getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ) ); } @@ -406,7 +406,7 @@ public void shutdown() { } - protected String getPhysicalTableName( long tableId, long partitionId ) { + public String getPhysicalTableName( long tableId, long partitionId ) { String physicalTableName = "tab" + tableId; if ( partitionId >= 0 ) { physicalTableName += "_part" + partitionId; @@ -415,7 +415,7 @@ protected String getPhysicalTableName( long tableId, long partitionId ) { } - protected String getPhysicalColumnName( long columnId ) { + public String getPhysicalColumnName( long columnId ) { return "col" + columnId; } @@ -425,6 +425,6 @@ protected String getPhysicalIndexName( long tableId, long indexId ) { } - protected abstract String getDefaultPhysicalSchemaName(); + public abstract String getDefaultPhysicalSchemaName(); } diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index ea46c23335..c60631d613 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -93,7 +93,7 @@ protected ConnectionFactory createConnectionFactory( final Map s @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java index 255ef45d91..d0c3449ef5 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/stores/MonetdbStore.java @@ -230,7 +230,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index 46e5ce4965..7303b6aebe 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -84,7 +84,6 @@ import org.polypheny.db.catalog.refactor.QueryableEntity; import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -117,19 +116,17 @@ public class MongoEntity extends PhysicalTable implements TranslatableEntity, Mo public final LogicalTable logical; public final AllocationTable allocation; - public final PhysicalTable physical; /** * Creates a MongoTable. */ - MongoEntity( LogicalTable logicalTable, AllocationTable allocationTable, PhysicalTable physicalTable, MongoSchema schema, TransactionProvider transactionProvider ) { - super( physicalTable ); - this.collectionName = physicalTable.name; + MongoEntity( LogicalTable logicalTable, AllocationTable allocationTable, MongoSchema schema, TransactionProvider transactionProvider ) { + super( allocationTable, MongoStore.getPhysicalTableName( logicalTable.id, allocationTable.id ), logicalTable.name, getColumnNames( allocationTable ) ); + this.collectionName = name; this.transactionProvider = transactionProvider; this.logical = logicalTable; this.allocation = allocationTable; - this.physical = physicalTable; this.catalogCollection = null; this.mongoSchema = schema; this.collection = schema.database.getCollection( collectionName ); @@ -137,6 +134,11 @@ public class MongoEntity extends PhysicalTable implements TranslatableEntity, Mo } + private static List getColumnNames( AllocationTable allocationTable ) { + return allocationTable.getColumns().values().stream().map( c -> MongoStore.getPhysicalColumnName( c.name, c.id ) ).collect( Collectors.toList() ); + } + + /*public MongoEntity( LogicalCollection catalogEntity, MongoSchema schema, AlgProtoDataType proto, TransactionProvider transactionProvider, long adapter, CatalogCollectionPlacement partitionPlacement ) { super( Object[].class, catalogEntity.id, partitionPlacement.id, adapter ); this.collectionName = MongoStore.getPhysicalTableName( catalogEntity.id, partitionPlacement.id ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java index f9c1c79db2..18992624f9 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java @@ -68,8 +68,8 @@ import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -240,8 +240,8 @@ public void createNewSchema( Snapshot snapshot, String name, long id ) { @Override - public PhysicalTable createAdapterTable( LogicalTable logicalTable, AllocationTable allocationTable, PhysicalTable physicalTable ) { - return currentSchema.createTable( logicalTable, allocationTable, physicalTable ); + public PhysicalTable createAdapterTable( LogicalTable logicalTable, AllocationTable allocationTable ) { + return currentSchema.createTable( logicalTable, allocationTable ); } @@ -306,17 +306,17 @@ protected void reloadSettings( List updatedSettings ) { @Override - public PhysicalTable createPhysicalTable( Context context, LogicalTable catalogTable, AllocationTable allocationTable ) { + public PhysicalTable createPhysicalTable( Context context, LogicalTable logicalTable, AllocationTable allocationTable ) { commitAll(); if ( this.currentSchema == null ) { - createNewSchema( null, catalogTable.getNamespaceName(), catalogTable.namespaceId ); + createNewSchema( null, logicalTable.getNamespaceName(), logicalTable.namespaceId ); } - String physicalTableName = getPhysicalTableName( catalogTable.id, allocationTable.id ); + String physicalTableName = getPhysicalTableName( logicalTable.id, allocationTable.id ); this.currentSchema.database.createCollection( physicalTableName ); - return new PhysicalTable( allocationTable, catalogTable.getNamespaceName(), physicalTableName, allocationTable.getColumns().values().stream().map( this::getPhysicalColumnName ).collect( Collectors.toList() ) ); + return this.currentSchema.createTable( logicalTable, allocationTable ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java index d12efb877e..4a11d6e444 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSchema.java @@ -39,29 +39,15 @@ import com.mongodb.client.gridfs.GridFSBucket; import com.mongodb.client.gridfs.GridFSBuckets; import java.util.HashMap; -import java.util.List; import java.util.Map; import lombok.Getter; import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.plan.Convention; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; /** @@ -105,8 +91,8 @@ public MongoSchema( long id, String database, MongoClient connection, Transactio } - public MongoEntity createTable( LogicalTable logicalTable, AllocationTable allocationTable, PhysicalTable physicalTable ) { - return new MongoEntity( logicalTable, allocationTable, physicalTable, this, transactionProvider ); + public MongoEntity createTable( LogicalTable logicalTable, AllocationTable allocationTable ) { + return new MongoEntity( logicalTable, allocationTable, this, transactionProvider ); } diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 267964dd26..e841fd4eb7 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -95,8 +95,8 @@ public MysqlSource( int storeId, String uniqueName, final Map se @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { - return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); + public PhysicalTable createAdapterTable( LogicalTable logicalTable, AllocationTable allocationTable ) { + return currentJdbcSchema.createJdbcTable( logicalTable, allocationTable ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java index 68a3c10f4c..e3e6b3fdda 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java @@ -497,7 +497,7 @@ public void createNewSchema( SchemaPlus rootSchema, String name ) { @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { return this.currentSchema.createTable( allocationTable ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 1fdb126fdc..1e14f06e89 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -84,7 +84,7 @@ public static void register() { @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java index a62301140f..ca20e254b7 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java @@ -196,7 +196,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable, PhysicalTable physicalTable ) { + public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { return currentJdbcSchema.createJdbcTable( catalogTable, columnPlacementsOnStore, partitionPlacement ); } From 5b5472d978aae63ff1b6af9b44ddca2cfcfe5a0c Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 1 Mar 2023 11:44:12 +0100 Subject: [PATCH 029/436] preparation for adjustement of catalog --- core/build.gradle | 2 + .../org/polypheny/db/StatisticsManager.java | 4 +- .../org/polypheny/db/adapter/DataStore.java | 6 +- .../algebra/core/document/DocumentModify.java | 9 +- .../document/LogicalDocumentModify.java | 11 +- .../org/polypheny/db/catalog/Catalog.java | 136 +- .../org/polypheny/db/catalog/Snapshot.java | 3 + .../catalog/entity/CatalogDataPlacement.java | 32 +- .../db/catalog/entity/CatalogEntity.java | 13 +- .../db/catalog/entity/CatalogForeignKey.java | 14 +- .../db/catalog/entity/CatalogKey.java | 11 +- .../entity/CatalogMaterializedView.java | 107 +- .../db/catalog/entity/CatalogPartition.java | 3 - .../catalog/entity/CatalogPartitionGroup.java | 8 - .../db/catalog/entity/CatalogSchema.java | 22 +- .../db/catalog/entity/CatalogView.java | 106 +- .../allocation/AllocationCollection.java | 8 +- .../entity/allocation/AllocationGraph.java | 10 +- .../entity/allocation/AllocationTable.java | 25 +- .../entity/logical/LogicalCollection.java | 32 +- .../LogicalColumn.java} | 153 +- .../catalog/entity/logical/LogicalGraph.java | 18 +- .../catalog/entity/logical/LogicalTable.java | 183 +- .../entity/physical/PhysicalCollection.java | 8 +- .../entity/physical/PhysicalGraph.java | 8 +- .../entity/physical/PhysicalTable.java | 29 +- .../exceptions/UnknownColumnException.java | 4 +- .../exceptions/UnknownDatabaseException.java | 33 - .../exceptions/UnknownSchemaException.java | 9 +- .../exceptions/UnknownTableException.java | 10 +- .../db/partition/PartitionManager.java | 4 +- .../polypheny/db/processing/DataMigrator.java | 10 +- .../db/schema/PolySchemaBuilder.java | 38 +- .../db/schema/PolyphenyDbSchema.java | 12 +- .../db/view/MaterializedViewManager.java | 6 +- .../org/polypheny/db/catalog/MockCatalog.java | 70 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 200 +- .../partition/AbstractPartitionManager.java | 4 +- .../db/partition/FrequencyMapImpl.java | 24 +- .../db/partition/HashPartitionManager.java | 4 +- .../db/partition/ListPartitionManager.java | 4 +- .../db/partition/RangePartitionManager.java | 4 +- .../TemperatureAwarePartitionManager.java | 4 +- .../processing/ConstraintEnforceAttacher.java | 8 +- .../db/processing/DataMigratorImpl.java | 94 +- .../shuttles/QueryParameterizer.java | 1 - .../db/routing/UiRoutingPageUtil.java | 6 +- .../db/routing/routers/BaseRouter.java | 16 +- .../db/routing/routers/DmlRouterImpl.java | 14 +- .../CreateAllPlacementStrategy.java | 4 +- .../strategies/CreatePlacementStrategy.java | 4 +- .../CreateSinglePlacementStrategy.java | 4 +- .../db/view/MaterializedViewManagerImpl.java | 16 +- .../java/org/polypheny/db/cypher/DdlTest.java | 22 +- .../db/misc/HorizontalPartitioningTest.java | 14 +- .../db/misc/VerticalPartitioningTest.java | 6 +- .../db/statistics/StatisticsTest.java | 10 +- .../statistics/DashboardInformation.java | 4 +- .../db/monitoring/statistics/QueryResult.java | 8 +- .../statistics/StatisticQueryProcessor.java | 18 +- .../statistics/StatisticsManagerImpl.java | 16 +- .../org/polypheny/db/avatica/DbmsMeta.java | 27 +- .../org/polypheny/db/cql/ColumnIndex.java | 10 +- .../polypheny/db/cql/Cql2RelConverter.java | 6 +- .../org/polypheny/db/cql/Projections.java | 2 +- .../java/org/polypheny/db/cql/TableIndex.java | 2 +- .../cql/utils/helper/AlgBuildTestHelper.java | 4 +- .../polypheny/db/adapter/csv/CsvSchema.java | 11 +- .../polypheny/db/adapter/csv/CsvTable.java | 12 +- .../admin/CypherAlterDatabaseAlias.java | 4 +- .../admin/CypherCreateDatabaseAlias.java | 4 +- .../db/cypher/admin/CypherDropAlias.java | 4 +- .../db/cypher/admin/CypherDropDatabase.java | 4 +- .../db/cypher/ddl/CypherAddPlacement.java | 4 +- .../db/cypher/ddl/CypherDropPlacement.java | 4 +- .../db/hsqldb/stores/HsqldbStore.java | 2 +- .../db/adapter/jdbc/JdbcImplementor.java | 15 +- .../polypheny/db/adapter/jdbc/JdbcRules.java | 23 +- .../jdbc/rel2sql/AlgToSqlConverter.java | 26 +- .../adapter/jdbc/rel2sql/SqlImplementor.java | 7 +- .../jdbc/stores/AbstractJdbcStore.java | 72 +- .../org/polypheny/db/catalog/CatalogImpl.java | 635 +- .../db/catalog/CatalogImplBackup.java | 5218 +++++++++++++++++ .../polypheny/db/catalog/CatalogInfoPage.java | 10 +- .../org/polypheny/db/test/CatalogTest.java | 42 +- .../db/languages/MqlProcessorImpl.java | 4 +- .../languages/mql2alg/MqlToAlgConverter.java | 72 +- .../polypheny/db/mql/mql2alg/Mql2AlgTest.java | 8 +- .../org/polypheny/db/tools/PigAlgBuilder.java | 8 +- .../org/polypheny/db/catalog/IdBuilder.java | 24 +- .../org/polypheny/db/catalog/PolyCatalog.java | 76 +- .../logical/relational/CatalogColumn.java | 45 - .../logical/relational/CatalogTable.java | 78 - .../logical/relational/RelationalCatalog.java | 25 +- .../db/catalog/snapshot/FullSnapshot.java | 194 +- .../polypheny/db/restapi/RequestColumn.java | 10 +- .../polypheny/db/restapi/RequestParser.java | 32 +- .../exception/IllegalColumnException.java | 10 +- .../db/restapi/RequestParserTest.java | 4 +- .../polypheny/db/sql/SqlProcessorImpl.java | 12 +- .../org/polypheny/db/sql/language/SqlDdl.java | 15 +- .../ddl/SqlCreateMaterializedView.java | 7 +- .../db/sql/language/ddl/SqlCreateTable.java | 6 +- .../db/sql/language/ddl/SqlCreateView.java | 7 +- .../altertable/SqlAlterTableAddPlacement.java | 8 +- .../db/sql/web/SchemaToJsonMapper.java | 20 +- .../db/sql/map/NamespaceToJsonMapperTest.java | 14 +- settings.gradle | 24 +- .../java/org/polypheny/db/webui/Crud.java | 174 +- .../polypheny/db/webui/crud/LanguageCrud.java | 22 +- .../models/requests/BatchUpdateRequest.java | 12 +- 111 files changed, 6606 insertions(+), 2119 deletions(-) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogColumn.java => logical/LogicalColumn.java} (66%) delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownDatabaseException.java create mode 100644 plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogColumn.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogTable.java diff --git a/core/build.gradle b/core/build.gradle index a897c32f20..6bcd367851 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -68,6 +68,8 @@ dependencies { exclude group: "com.github.spotbugs" } + api group: 'io.activej', name: 'activej-serializer', version: '5.5-rc3' // Apache 2.0 + // --- Test Compile --- testImplementation group: "junit", name: "junit", version: junit_version testImplementation group: "org.hamcrest", name: "hamcrest-core", version: hamcrest_core_version // BSD 3-clause diff --git a/core/src/main/java/org/polypheny/db/StatisticsManager.java b/core/src/main/java/org/polypheny/db/StatisticsManager.java index 62243e753f..c25940e90c 100644 --- a/core/src/main/java/org/polypheny/db/StatisticsManager.java +++ b/core/src/main/java/org/polypheny/db/StatisticsManager.java @@ -19,8 +19,8 @@ import java.beans.PropertyChangeListener; import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -84,7 +84,7 @@ public static StatisticsManager getInstance() { public abstract void initializeStatisticSettings(); - public abstract void updateColumnName( CatalogColumn catalogColumn, String newName ); + public abstract void updateColumnName( LogicalColumn logicalColumn, String newName ); public abstract void updateTableName( LogicalTable catalogTable, String newName ); diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index 458505aae1..f5ceb95b66 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -30,13 +30,13 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; @@ -71,7 +71,7 @@ public List getSupportedSchemaType() { public abstract void dropTable( Context context, LogicalTable combinedTable, List partitionIds ); - public abstract void addColumn( Context context, LogicalTable catalogTable, CatalogColumn catalogColumn ); + public abstract void addColumn( Context context, LogicalTable catalogTable, LogicalColumn logicalColumn ); public abstract void dropColumn( Context context, CatalogColumnPlacement columnPlacement ); @@ -79,7 +79,7 @@ public List getSupportedSchemaType() { public abstract void dropIndex( Context context, CatalogIndex catalogIndex, List partitionIds ); - public abstract void updateColumnType( Context context, CatalogColumnPlacement columnPlacement, CatalogColumn catalogColumn, PolyType oldType ); + public abstract void updateColumnType( Context context, CatalogColumnPlacement columnPlacement, LogicalColumn logicalColumn, PolyType oldType ); public abstract List getAvailableIndexMethods(); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java index e9480b362f..b0eded6743 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentModify.java @@ -19,16 +19,12 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; @@ -39,20 +35,17 @@ public abstract class DocumentModify extends Modify private final List keys; @Getter private final List updates; - @Getter - private final CatalogReader catalogReader; /** * Creates a {@link DocumentModify}. * {@link org.polypheny.db.schema.ModelTrait#DOCUMENT} node, which modifies a collection. */ - protected DocumentModify( AlgTraitSet traits, E collection, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { + protected DocumentModify( AlgTraitSet traits, E collection, AlgNode input, Operation operation, List keys, List updates ) { super( input.getCluster(), input.getTraitSet(), collection, input ); this.operation = operation; this.keys = keys; this.updates = updates; - this.catalogReader = catalogReader; this.traitSet = traits; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java index 02a5d21ab3..a938159c61 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java @@ -24,7 +24,6 @@ import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexNode; @@ -33,19 +32,19 @@ public class LogicalDocumentModify extends DocumentModify impleme /** * Subclass of {@link DocumentModify} not targeted at any particular engine or calling convention. */ - public LogicalDocumentModify( AlgTraitSet traits, CatalogEntity entity, CatalogReader catalogReader, AlgNode input, Operation operation, List keys, List updates ) { - super( traits, entity, catalogReader, input, operation, keys, updates ); + public LogicalDocumentModify( AlgTraitSet traits, CatalogEntity entity, AlgNode input, Operation operation, List keys, List updates ) { + super( traits, entity, input, operation, keys, updates ); } - public static LogicalDocumentModify create( CatalogEntity entity, AlgNode input, CatalogReader catalogReader, Operation operation, List keys, List updates ) { - return new LogicalDocumentModify( input.getTraitSet(), entity, catalogReader, input, operation, keys, updates ); + public static LogicalDocumentModify create( CatalogEntity entity, AlgNode input, Operation operation, List keys, List updates ) { + return new LogicalDocumentModify( input.getTraitSet(), entity, input, operation, keys, updates ); } @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new LogicalDocumentModify( traitSet, entity, getCatalogReader(), inputs.get( 0 ), operation, getKeys(), getUpdates() ); + return new LogicalDocumentModify( traitSet, entity, inputs.get( 0 ), operation, getKeys(), getUpdates() ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 75e5321aaf..1c40b5299d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -34,11 +34,9 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; @@ -52,8 +50,9 @@ import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -61,7 +60,6 @@ import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; import org.polypheny.db.catalog.exceptions.UnknownIndexException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; @@ -91,7 +89,7 @@ public abstract class Catalog implements ExtensionPoint { public static boolean resetDocker; protected final PropertyChangeSupport listeners = new PropertyChangeSupport( this ); public boolean isPersistent = false; - public static Catalog INSTANCE = null; + private static Catalog INSTANCE = null; public static boolean resetCatalog; public static boolean memoryCatalog; public static boolean testMode; @@ -120,7 +118,6 @@ public static Catalog getInstance() { public abstract void rollback(); - public abstract Map getAlgTypeInfo(); public abstract Map getNodeInfo(); @@ -184,69 +181,14 @@ protected final boolean isValidIdentifier( final String str ) { public abstract int addUser( String name, String password ); - /** - * Adds a database - * - * @param name The name of the database - * @param ownerId The owner of this database - * @param ownerName The name of the owner - * @param defaultSchemaId The id of the default schema of this database - * @param defaultSchemaName The name of the default schema of this database - * @return the id of the newly inserted database - */ - public abstract long addDatabase( String name, int ownerId, String ownerName, long defaultSchemaId, String defaultSchemaName ); - - /** - * Delete a database from the catalog - * - * @param databaseId The id of the database to delete - */ - public abstract void deleteDatabase( long databaseId ); - - /** - * Get all databases - * - * @param pattern A pattern for the database name - * @return List of databases - */ - public abstract List getDatabases( Pattern pattern ); - - /** - * Returns the database with the given name. - * - * @param databaseName The name of the database - * @return The database - * @throws UnknownDatabaseException If there is no database with this name. - */ - public abstract CatalogDatabase getDatabase( String databaseName ) throws UnknownDatabaseException; - - /** - * Returns the database with the given name. - * - * @param databaseId The id of the database - * @return The database - */ - public abstract CatalogDatabase getDatabase( long databaseId ); - /** * Get all schemas which fit to the specified filter pattern. * getSchemas(xid, null, null) returns all schemas of all databases. * - * @param databaseNamePattern Pattern for the database name. null returns all. * @param schemaNamePattern Pattern for the schema name. null returns all. * @return List of schemas which fit to the specified filter. If there is no schema which meets the criteria, an empty list is returned. */ - public abstract List getSchemas( Pattern databaseNamePattern, Pattern schemaNamePattern ); - - /** - * Get all schemas of the specified database which fit to the specified filter pattern. - * getSchemas(xid, databaseName, null) returns all schemas of the database. - * - * @param databaseId The id of the database - * @param schemaNamePattern Pattern for the schema name. null returns all - * @return List of schemas which fit to the specified filter. If there is no schema which meets the criteria, an empty list is returned. - */ - public abstract List getSchemas( long databaseId, Pattern schemaNamePattern ); + public abstract List getSchemas( Pattern schemaNamePattern ); /** * Returns the schema with the specified id. @@ -259,42 +201,29 @@ protected final boolean isValidIdentifier( final String str ) { /** * Returns the schema with the given name in the specified database. * - * @param databaseName The name of the database * @param schemaName The name of the schema * @return The schema * @throws UnknownSchemaException If there is no schema with this name in the specified database. */ - public abstract CatalogSchema getSchema( String databaseName, String schemaName ) throws UnknownSchemaException, UnknownDatabaseException; - - /** - * Returns the schema with the given name in the specified database. - * - * @param databaseId The id of the database - * @param schemaName The name of the schema - * @return The schema - * @throws UnknownSchemaException If there is no schema with this name in the specified database. - */ - public abstract CatalogSchema getSchema( long databaseId, String schemaName ) throws UnknownSchemaException; + public abstract CatalogSchema getSchema( String schemaName ) throws UnknownSchemaException; /** * Adds a schema in a specified database * * @param name The name of the schema - * @param databaseId The id of the associated database * @param ownerId The owner of this schema * @param namespaceType The type of this schema * @return The id of the inserted schema */ - public abstract long addNamespace( String name, long databaseId, int ownerId, NamespaceType namespaceType ); + public abstract long addNamespace( String name, int ownerId, NamespaceType namespaceType ); /** * Checks weather a schema with the specified name exists in a database. * - * @param databaseId The if of the database * @param schemaName The name of the schema to check * @return True if there is a schema with this name. False if not. */ - public abstract boolean checkIfExistsSchema( long databaseId, String schemaName ); + public abstract boolean checkIfExistsSchema( String schemaName ); /** * Renames a schema @@ -315,7 +244,6 @@ protected final boolean isValidIdentifier( final String str ) { /** * Adds a new graph to the catalog, on the same layer as schema in relational. * - * @param databaseId ID of the graph, which is also the id of the database * @param name The name of the graph * @param stores The datastores on which the graph is placed * @param modifiable If the graph is modifiable @@ -323,7 +251,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param replace If the graph should replace an existing one * @return The id of the newly added graph */ - public abstract long addGraph( long databaseId, String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ); + public abstract long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ); /** * Additional operations for the creation of a graph entity. @@ -352,11 +280,10 @@ protected final boolean isValidIdentifier( final String str ) { /** * Get a collection of all graphs, which match the given conditions. * - * @param databaseId The id of the database to which the graph has to belong * @param graphName The pattern to which the name has to match, null if every name is matched * @return A collection of all graphs matching */ - public abstract List getGraphs( long databaseId, Pattern graphName ); + public abstract List getGraphs( Pattern graphName ); /** * Add a new alias for a given graph. @@ -405,33 +332,21 @@ protected final boolean isValidIdentifier( final String str ) { * Get all tables of the specified database which fit to the specified filters. * getTables(xid, databaseName, null, null, null) returns all tables of the database. * - * @param databaseId The id of the database * @param schemaNamePattern Pattern for the schema name. null returns all. * @param tableNamePattern Pattern for the table name. null returns all. * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. */ - public abstract List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ); + public abstract List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Returns the table with the given name in the specified database and schema. * - * @param databaseName The name of the database * @param schemaName The name of the schema * @param tableName The name of the table * @return The table */ - public abstract LogicalTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException; + public abstract LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException; - /** - * Get all tables of the specified database which fit to the specified filters. - * getTables(xid, databaseName, null, null, null) returns all tables of the database. - * - * @param databaseNamePattern Pattern for the database name. null returns all. - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns all. - * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. - */ - public abstract List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Returns the table with the given id @@ -451,17 +366,6 @@ protected final boolean isValidIdentifier( final String str ) { */ public abstract LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException; - /** - * Returns the table with the given name in the specified database and schema. - * - * @param databaseId The id of the database - * @param schemaName The name of the schema - * @param tableName The name of the table - * @return The table - * @throws UnknownTableException If there is no table with this name in the specified database and schema. - */ - public abstract LogicalTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException; - /** * Returns the table which is associated with a given partitionId * @@ -748,19 +652,18 @@ protected final boolean isValidIdentifier( final String str ) { * @param tableId The id of the table * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getColumns( long tableId ); + public abstract List getColumns( long tableId ); /** * Get all columns of the specified database which fit to the specified filter patterns. * getColumns(xid, databaseName, null, null, null) returns all columns of the database. * - * @param databaseNamePattern Pattern for the database name. null returns all. * @param schemaNamePattern Pattern for the schema name. null returns all. * @param tableNamePattern Pattern for the table name. null returns all. * @param columnNamePattern Pattern for the column name. null returns all. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getColumns( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ); + public abstract List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ); /** * Returns the column with the specified id. @@ -768,7 +671,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param columnId The id of the column * @return A CatalogColumn */ - public abstract CatalogColumn getColumn( long columnId ); + public abstract LogicalColumn getColumn( long columnId ); /** * Returns the column with the specified name in the specified table of the specified database and schema. @@ -778,18 +681,17 @@ protected final boolean isValidIdentifier( final String str ) { * @return A CatalogColumn * @throws UnknownColumnException If there is no column with this name in the specified table of the database and schema. */ - public abstract CatalogColumn getColumn( long tableId, String columnName ) throws UnknownColumnException; + public abstract LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException; /** * Returns the column with the specified name in the specified table of the specified database and schema. * - * @param databaseName The name of the database * @param schemaName The name of the schema * @param tableName The name of the table * @param columnName The name of the column * @return A CatalogColumn */ - public abstract CatalogColumn getColumn( String databaseName, String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownDatabaseException, UnknownTableException; + public abstract LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; /** * Adds a column. @@ -1322,12 +1224,11 @@ protected final boolean isValidIdentifier( final String str ) { * Get all partitions of the specified database which fit to the specified filter patterns. * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. * - * @param databaseNamePattern Pattern for the database name. null returns all. * @param schemaNamePattern Pattern for the schema name. null returns all. * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getPartitionGroups( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ); + public abstract List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Updates the specified partition group with the attached partitionIds @@ -1373,12 +1274,11 @@ protected final boolean isValidIdentifier( final String str ) { * Get all partitions of the specified database which fit to the specified filter patterns. * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. * - * @param databaseNamePattern Pattern for the database name. null returns all. * @param schemaNamePattern Pattern for the schema name. null returns all. * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getPartitions( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ); + public abstract List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Get a list of all partition name belonging to a specific table diff --git a/core/src/main/java/org/polypheny/db/catalog/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/Snapshot.java index 6cfc3a9fdc..ac29ea85b5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/Snapshot.java @@ -67,6 +67,9 @@ default Expression getSnapshotExpression( long id ) { CatalogEntity getEntity( long namespaceId, Pattern name ); + @Deprecated + CatalogEntity getEntity( List names ); + //// LOGICAL ENTITIES @Deprecated LogicalTable getLogicalTable( List names ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java index 08bea250f4..de4abb657d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java @@ -26,8 +26,10 @@ import java.util.Map; import java.util.stream.Collectors; import lombok.NonNull; -import lombok.Setter; import lombok.SneakyThrows; +import lombok.Value; +import lombok.With; +import lombok.experimental.NonFinal; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PlacementType; @@ -36,25 +38,27 @@ /** * Serves as a container, which holds all information related to a table entity placed on physical store. */ +@With +@Value public class CatalogDataPlacement implements CatalogObject { private static final long serialVersionUID = 5192378654968316873L; - public final long tableId; - public final int adapterId; + public long tableId; + public int adapterId; - public final PlacementType placementType; + public PlacementType placementType; // Is present at the DataPlacement && the PartitionPlacement // Although, partitionPlacements are those that get effectively updated // A DataPlacement can directly forbid that any Placements within this DataPlacement container can get outdated. // Therefore, the role at the DataPlacement specifies if underlying placements can even be outdated. - public final DataPlacementRole dataPlacementRole; + public DataPlacementRole dataPlacementRole; - public final ImmutableList columnPlacementsOnAdapter; + public ImmutableList columnPlacementsOnAdapter; // Serves as a pre-aggregation to apply filters more easily. In that case reads are more important // and frequent than writes - public final ImmutableMap> partitionPlacementsOnAdapterByRole; + public ImmutableMap> partitionPlacementsOnAdapterByRole; // The newest commit timestamp when any partitions inside this placement has been updated or refreshed @@ -62,7 +66,7 @@ public class CatalogDataPlacement implements CatalogObject { // Technically other linked attachments could still have older update timestamps. // This should help to quickly identify placements that can fulfil certain conditions. // Without having to traverse all partition placements one-by-one - @Setter + @NonFinal public Timestamp updateTimestamp; @@ -71,14 +75,14 @@ public CatalogDataPlacement( int adapterId, PlacementType placementType, DataPlacementRole dataPlacementRole, - @NonNull final ImmutableList columnPlacementsOnAdapter, - @NonNull final ImmutableList partitionPlacementsOnAdapter ) { + @NonNull final List columnPlacementsOnAdapter, + @NonNull final List partitionPlacementsOnAdapter ) { this.tableId = tableId; this.adapterId = adapterId; this.placementType = placementType; this.dataPlacementRole = dataPlacementRole; - this.columnPlacementsOnAdapter = columnPlacementsOnAdapter; - this.partitionPlacementsOnAdapterByRole = structurizeDataPlacements( partitionPlacementsOnAdapter ); + this.columnPlacementsOnAdapter = ImmutableList.copyOf( columnPlacementsOnAdapter ); + this.partitionPlacementsOnAdapterByRole = ImmutableMap.copyOf( structurizeDataPlacements( partitionPlacementsOnAdapter ) ); } @@ -124,7 +128,7 @@ public Serializable[] getParameterArray() { } - private ImmutableMap> structurizeDataPlacements( @NonNull final ImmutableList unsortedPartitionIds ) { + private Map> structurizeDataPlacements( @NonNull final List unsortedPartitionIds ) { // Since this shall only be called after initialization of dataPlacement object, // we need to clear the contents of partitionPlacementsOnAdapterByRole Map> partitionsPerRole = new HashMap<>(); @@ -156,7 +160,7 @@ private ImmutableMap> structurizeDataPlac } // Finally, overwrite entire partitionPlacementsOnAdapterByRole at Once - return ImmutableMap.copyOf( partitionsPerRole ); + return partitionsPerRole; } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index a46da60dc8..67c08571db 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -18,6 +18,8 @@ import java.io.Serializable; import java.util.List; +import lombok.Value; +import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; import org.polypheny.db.StatisticsManager; import org.polypheny.db.algebra.AlgDistribution; @@ -30,17 +32,18 @@ import org.polypheny.db.catalog.refactor.Expressible; import org.polypheny.db.plan.AlgMultipleTrait; import org.polypheny.db.schema.Statistic; -import org.polypheny.db.schema.Statistics; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.ImmutableBitSet; @SuperBuilder(toBuilder = true) +@Value +@NonFinal public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializable, CatalogType, Expressible { - public final long id; - public final EntityType entityType; - public final NamespaceType namespaceType; - public final String name; + public long id; + public EntityType entityType; + public NamespaceType namespaceType; + public String name; protected CatalogEntity( long id, String name, EntityType type, NamespaceType namespaceType ) { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java index 037a5e743c..fc0c35b7eb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java @@ -34,7 +34,6 @@ public final class CatalogForeignKey extends CatalogKey { public final String name; public final long referencedKeyId; - public final long referencedKeyDatabaseId; public final long referencedKeySchemaId; public final long referencedKeyTableId; public final ForeignKeyOption updateRule; @@ -47,33 +46,24 @@ public CatalogForeignKey( @NonNull final String name, final long tableId, final long schemaId, - final long databaseId, final long referencedKeyId, final long referencedKeyTableId, final long referencedKeySchemaId, - final long referencedKeyDatabaseId, final List columnIds, final List referencedKeyColumnIds, final ForeignKeyOption updateRule, final ForeignKeyOption deleteRule ) { - super( id, tableId, schemaId, databaseId, columnIds, EnforcementTime.ON_COMMIT ); + super( id, tableId, schemaId, columnIds, EnforcementTime.ON_COMMIT ); this.name = name; this.referencedKeyId = referencedKeyId; this.referencedKeyTableId = referencedKeyTableId; this.referencedKeySchemaId = referencedKeySchemaId; - this.referencedKeyDatabaseId = referencedKeyDatabaseId; this.referencedKeyColumnIds = ImmutableList.copyOf( referencedKeyColumnIds ); this.updateRule = updateRule; this.deleteRule = deleteRule; } - @SneakyThrows - public String getReferencedKeyDatabaseName() { - return Catalog.getInstance().getDatabase( referencedKeyDatabaseId ).name; - } - - @SneakyThrows public String getReferencedKeySchemaName() { return Catalog.getInstance().getSchema( referencedKeySchemaId ).name; @@ -112,11 +102,9 @@ public List getCatalogForeignKeyColumns() { public Serializable[] getParameterArray( String referencedKeyColumnName, String foreignKeyColumnName, int keySeq ) { return new Serializable[]{ - getReferencedKeyDatabaseName(), getReferencedKeySchemaName(), getReferencedKeyTableName(), referencedKeyColumnName, - getDatabaseName(), getSchemaName(), getTableName(), foreignKeyColumnName, diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java index 8cc24ed7d9..1db08da52e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java @@ -33,7 +33,6 @@ public class CatalogKey implements CatalogObject, Comparable { public final long id; public final long tableId; public final long schemaId; - public final long databaseId; public final ImmutableList columnIds; public final EnforcementTime enforcementTime; @@ -42,24 +41,16 @@ public CatalogKey( final long id, final long tableId, final long schemaId, - final long databaseId, final List columnIds, EnforcementTime enforcementTime ) { this.id = id; this.tableId = tableId; this.schemaId = schemaId; - this.databaseId = databaseId; this.columnIds = ImmutableList.copyOf( columnIds ); this.enforcementTime = enforcementTime; } - @SneakyThrows - public String getDatabaseName() { - return Catalog.getInstance().getDatabase( databaseId ).name; - } - - @SneakyThrows public String getSchemaName() { return Catalog.getInstance().getSchema( schemaId ).name; @@ -85,7 +76,7 @@ public List getColumnNames() { @Override public Serializable[] getParameterArray() { - return new Serializable[]{ id, tableId, getTableName(), schemaId, getSchemaName(), databaseId, getDatabaseName(), null, null }; + return new Serializable[]{ id, tableId, getTableName(), schemaId, getSchemaName(), null, null }; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java index dc262bd092..ea10a0da0f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java @@ -16,54 +16,52 @@ package org.polypheny.db.catalog.entity; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import lombok.Getter; +import java.util.List; +import java.util.Map; +import lombok.EqualsAndHashCode; import lombok.NonNull; +import lombok.Value; +import lombok.With; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; - +@EqualsAndHashCode(callSuper = true) +@With +@Value public class CatalogMaterializedView extends CatalogView { private static final long serialVersionUID = 4728996184367206274L; - private final String language; + public String language; - @Getter - private final AlgCollation algCollation; + public AlgCollation algCollation; - @Getter - private final String query; + public String query; - @Getter - private final MaterializedCriteria materializedCriteria; + public MaterializedCriteria materializedCriteria; - @Getter - private final boolean ordered; + public boolean ordered; public CatalogMaterializedView( long id, String name, - ImmutableList columnIds, + List columnIds, long schemaId, - long databaseId, int ownerId, EntityType entityType, String query, Long primaryKey, - @NonNull ImmutableList dataPlacements, + @NonNull List dataPlacements, boolean modifiable, PartitionProperty partitionProperty, AlgCollation algCollation, - ImmutableList connectedViews, - ImmutableMap> underlyingTables, + List connectedViews, + Map> underlyingTables, String language, MaterializedCriteria materializedCriteria, boolean ordered @@ -73,7 +71,6 @@ public CatalogMaterializedView( name, columnIds, schemaId, - databaseId, ownerId, entityType, query, @@ -93,76 +90,6 @@ public CatalogMaterializedView( } - @Override - public LogicalTable getTableWithColumns( ImmutableList newColumnIds ) { - return new CatalogMaterializedView( - id, - name, - newColumnIds, - namespaceId, - databaseId, - ownerId, - entityType, - query, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - algCollation, - connectedViews, - underlyingTables, - language, - materializedCriteria, - ordered ); - } - - - @Override - public LogicalTable getConnectedViews( ImmutableList newConnectedViews ) { - return new CatalogMaterializedView( - id, - name, - fieldIds, - namespaceId, - databaseId, - ownerId, - entityType, - query, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - algCollation, - newConnectedViews, - underlyingTables, - language, - materializedCriteria, - ordered ); - } - - - @Override - public LogicalTable getRenamed( String newName ) { - return new CatalogMaterializedView( - id, - newName, - fieldIds, - namespaceId, - databaseId, - ownerId, - entityType, - query, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - algCollation, - connectedViews, - underlyingTables, - language, - materializedCriteria, - ordered ); - } @Override diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartition.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartition.java index f26ece7223..e4848f3ccd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartition.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartition.java @@ -38,7 +38,6 @@ public class CatalogPartition implements CatalogObject { public final long partitionGroupId; public final long tableId; public final long schemaId; - public final long databaseId; public final boolean isUnbound; @@ -46,14 +45,12 @@ public CatalogPartition( final long id, final long tableId, final long schemaId, - final long databaseId, final List partitionQualifiers, final boolean isUnbound, final long partitionGroupId ) { this.id = id; this.tableId = tableId; this.schemaId = schemaId; - this.databaseId = databaseId; this.partitionQualifiers = ImmutableList.copyOf( partitionQualifiers ); this.isUnbound = isUnbound; this.partitionGroupId = partitionGroupId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java index 9ea7e838b5..dbe504f3ba 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java @@ -33,7 +33,6 @@ public final class CatalogPartitionGroup implements CatalogObject { public final String partitionGroupName; public final long tableId; public final long schemaId; - public final long databaseId; public final ImmutableList partitionQualifiers; public final ImmutableList partitionIds; public final boolean isUnbound; @@ -46,7 +45,6 @@ public CatalogPartitionGroup( final String partitionGroupName, final long tableId, final long schemaId, - final long databaseId, final long partitionKey, final List partitionQualifiers, final List partitionIds, @@ -55,7 +53,6 @@ public CatalogPartitionGroup( this.partitionGroupName = partitionGroupName; this.tableId = tableId; this.schemaId = schemaId; - this.databaseId = databaseId; this.partitionKey = partitionKey; // TODO @HENNLO Although the qualifiers are now part of CatalogPartitions, it might be a good improvement to // accumulate all qualifiers of all internal partitions here to speed up query time. @@ -75,11 +72,6 @@ public String getTableName() { } - @SneakyThrows - public String getDatabaseName() { - return Catalog.getInstance().getDatabase( databaseId ).name; - } - @SneakyThrows public String getSchemaName() { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java index db979c6d4d..a351893169 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java @@ -22,8 +22,6 @@ import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import lombok.SneakyThrows; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -35,7 +33,6 @@ public final class CatalogSchema extends CatalogNamespace implements CatalogObje public final long id; @Getter public final String name; - public final long databaseId; public final int ownerId; public final String ownerName; @Getter @@ -48,7 +45,6 @@ public final class CatalogSchema extends CatalogNamespace implements CatalogObje public CatalogSchema( final long id, @NonNull final String name, - final long databaseId, final int ownerId, @NonNull final String ownerName, @NonNull final NamespaceType namespaceType, @@ -56,7 +52,6 @@ public CatalogSchema( super( id, name, namespaceType ); this.id = id; this.name = name; - this.databaseId = databaseId; this.ownerId = ownerId; this.ownerName = ownerName; this.namespaceType = namespaceType; @@ -64,30 +59,19 @@ public CatalogSchema( } - @SneakyThrows - public String getDatabaseName() { - return Catalog.getInstance().getDatabase( databaseId ).name; - } - - // Used for creating ResultSets @Override public Serializable[] getParameterArray() { - return new Serializable[]{ name, getDatabaseName(), ownerName, CatalogObject.getEnumNameOrNull( namespaceType ) }; + return new Serializable[]{ name, ownerName, CatalogObject.getEnumNameOrNull( namespaceType ) }; } @Override public int compareTo( CatalogSchema o ) { if ( o != null ) { - int comp = (int) (this.databaseId - o.databaseId); - if ( comp == 0 ) { - return (int) (this.id - o.id); - } else { - return comp; - } - + return (int) (this.id - o.id); } + return -1; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index 32a5fe5fe4..b129c184d8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -19,7 +19,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import lombok.Getter; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.With; +import lombok.experimental.NonFinal; import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; @@ -27,120 +34,53 @@ import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.view.ViewManager.ViewVisitor; - +@EqualsAndHashCode(callSuper = true) +@With +@Value +@NonFinal public class CatalogView extends LogicalTable { private static final long serialVersionUID = -4771308114962700515L; - @Getter - protected final ImmutableMap> underlyingTables; - private final String language; - @Getter - private final AlgCollation algCollation; - @Getter - private final String query; + public ImmutableMap> underlyingTables; + public String language; + public AlgCollation algCollation; + public String query; public CatalogView( long id, String name, - ImmutableList columnIds, + List columnIds, long schemaId, - long databaseId, int ownerId, EntityType entityType, String query, Long primaryKey, - ImmutableList dataPlacements, + List dataPlacements, boolean modifiable, PartitionProperty partitionProperty, AlgCollation algCollation, - ImmutableList connectedViews, - ImmutableMap> underlyingTables, + List connectedViews, + Map> underlyingTables, String language ) { - super( id, name, columnIds, schemaId, databaseId, ownerId, entityType, primaryKey, dataPlacements, + super( id, name, columnIds, schemaId, ownerId, entityType, primaryKey, dataPlacements, modifiable, partitionProperty, connectedViews ); this.query = query; this.algCollation = algCollation; - this.underlyingTables = underlyingTables; + this.underlyingTables = ImmutableMap.copyOf( underlyingTables.entrySet().stream().collect( Collectors.toMap( Entry::getKey, t -> ImmutableList.copyOf( t.getValue() ) ) ) ); // mapdb cannot handle the class QueryLanguage, therefore we use the String here this.language = language; } - @Override - public LogicalTable getConnectedViews( ImmutableList newConnectedViews ) { - return new CatalogView( - id, - name, - fieldIds, - namespaceId, - databaseId, - ownerId, - entityType, - query, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - algCollation, - newConnectedViews, - underlyingTables, - language ); - } - - - @Override - public LogicalTable getRenamed( String newName ) { - return new CatalogView( - id, - newName, - fieldIds, - namespaceId, - databaseId, - ownerId, - entityType, - query, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - algCollation, - connectedViews, - underlyingTables, - language ); - } - - - @Override - public LogicalTable getTableWithColumns( ImmutableList newColumnIds ) { - return new CatalogView( - id, - name, - newColumnIds, - namespaceId, - databaseId, - ownerId, - entityType, - query, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - algCollation, - connectedViews, - underlyingTables, - language ); - } - - public QueryLanguage getLanguage() { return QueryLanguage.from( language ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java index db2a5b26ed..ab4ae9184b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java @@ -17,6 +17,9 @@ package org.polypheny.db.catalog.entity.allocation; import java.io.Serializable; +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; @@ -24,9 +27,12 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +@EqualsAndHashCode(callSuper = true) +@Value +@NonFinal public class AllocationCollection extends CatalogEntity implements Allocation { - protected AllocationCollection( long id, String name, EntityType type, NamespaceType namespaceType ) { + public AllocationCollection( long id, String name, EntityType type, NamespaceType namespaceType ) { super( id, name, type, namespaceType ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java index 0611d31f0a..09ea32356f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -17,17 +17,23 @@ package org.polypheny.db.catalog.entity.allocation; import java.io.Serializable; +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +@EqualsAndHashCode(callSuper = true) +@Value +@NonFinal public class AllocationGraph extends CatalogEntity implements Allocation { - public final LogicalGraph logical; - public final long id; + public LogicalGraph logical; + public long id; public AllocationGraph( long id, LogicalGraph graph ) { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 7fdeeec877..fcc096bdac 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -21,27 +21,29 @@ import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; +import lombok.EqualsAndHashCode; +import lombok.Value; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; -import org.polypheny.db.PolyImplementation; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.type.PolyType; +@EqualsAndHashCode(callSuper = true) +@Value public class AllocationTable extends CatalogEntity implements Allocation { - public final List placements; - public final long adapterId; + public List placements; + public long adapterId; + public long logicalId; - protected AllocationTable( long id, String name, long adapterId, List placements ) { + public AllocationTable( long id, long logicalId, String name, long adapterId, List placements ) { super( id, name, EntityType.ENTITY, NamespaceType.RELATIONAL ); + this.logicalId = logicalId; this.adapterId = adapterId; this.placements = placements; } @@ -64,7 +66,7 @@ public Map getColumnNames() { } - public Map getColumns() { + public Map getColumns() { return null; } @@ -73,4 +75,9 @@ public Map getColumnNamesIds() { return getColumnNames().entrySet().stream().collect( Collectors.toMap( Entry::getValue, Entry::getKey ) ); } + + public String getNamespaceName() { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index fd9b6bb6d9..bb66fa6b48 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -22,9 +22,11 @@ import java.util.Collection; import java.util.List; import java.util.stream.Collectors; +import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NonNull; import lombok.SneakyThrows; +import lombok.Value; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; @@ -33,22 +35,30 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; - +@EqualsAndHashCode(callSuper = true) +@Value public class LogicalCollection extends CatalogEntity implements CatalogObject, Logical { private static final long serialVersionUID = -6490762948368178584L; @Getter - public final long id; - public final ImmutableList placements; - public final String name; - public final long databaseId; - public final long namespaceId; - public final EntityType entityType; - public final String physicalName; - - - public LogicalCollection( long databaseId, long namespaceId, long id, String name, @NonNull Collection placements, EntityType type, String physicalName ) { + public long id; + public ImmutableList placements; + public String name; + public long databaseId; + public long namespaceId; + public EntityType entityType; + public String physicalName; + + + public LogicalCollection( + long databaseId, + long namespaceId, + long id, + String name, + @NonNull Collection placements, + EntityType type, + String physicalName ) { super( id, name, EntityType.ENTITY, NamespaceType.DOCUMENT ); this.id = id; this.databaseId = databaseId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java similarity index 66% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumn.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java index a34d9666be..f34dc13c89 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,68 +14,102 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; +import lombok.Value; +import lombok.experimental.NonFinal; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogDefaultValue; +import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.type.PolyType; -@EqualsAndHashCode -public final class CatalogColumn implements CatalogObject, Comparable { +@EqualsAndHashCode() +@Value +@NonFinal +public class LogicalColumn implements CatalogObject, Comparable { private static final long serialVersionUID = -4792846455300897399L; - public final long id; - public final String name; - public final long tableId; - public final long schemaId; - public final long databaseId; - public final int position; - public final PolyType type; - public final PolyType collectionsType; - public final Integer length; // JDBC length or precision depending on type - public final Integer scale; // decimal digits - public final Integer dimension; - public final Integer cardinality; - public final boolean nullable; - public final Collation collation; - public final CatalogDefaultValue defaultValue; + @Serialize + public long id; + + @Serialize + public String name; + + @Serialize + public long tableId; + + @Serialize + public long schemaId; + + @Serialize + public int position; + + @Serialize + public PolyType type; + + @Serialize + public PolyType collectionsType; + + @Serialize + public Integer length; // JDBC length or precision depending on type + + @Serialize + public Integer scale; // decimal digits + + @Serialize + public Integer dimension; + + @Serialize + public Integer cardinality; + + @Serialize + public boolean nullable; + + @Serialize + public Collation collation; + + @Serialize + public CatalogDefaultValue defaultValue; + + @Serialize @EqualsAndHashCode.Exclude // lombok uses getter methods to compare objects // and this method depends on the catalog, which can lead to nullpointers -> doNotUseGetters - public NamespaceType namespaceType; - - - public CatalogColumn( - final long id, - @NonNull final String name, - final long tableId, - final long schemaId, - final long databaseId, - final int position, - @NonNull final PolyType type, - final PolyType collectionsType, - final Integer length, - final Integer scale, - final Integer dimension, - final Integer cardinality, - final boolean nullable, - final Collation collation, - CatalogDefaultValue defaultValue ) { + public NamespaceType namespaceType = NamespaceType.RELATIONAL; + + + public LogicalColumn( + @Deserialize("id") final long id, + @Deserialize("name") @NonNull final String name, + @Deserialize("tableId") final long tableId, + @Deserialize("schemaId") final long schemaId, + @Deserialize("position") final int position, + @Deserialize("type") @NonNull final PolyType type, + @Deserialize("collectionsType") final PolyType collectionsType, + @Deserialize("length") final Integer length, + @Deserialize("scale") final Integer scale, + @Deserialize("dimension") final Integer dimension, + @Deserialize("cardinality") final Integer cardinality, + @Deserialize("nullable") final boolean nullable, + @Deserialize("collation") final Collation collation, + @Deserialize("defaultValue") CatalogDefaultValue defaultValue ) { this.id = id; this.name = name; this.tableId = tableId; this.schemaId = schemaId; - this.databaseId = databaseId; this.position = position; this.type = type; this.collectionsType = collectionsType; @@ -109,20 +143,6 @@ public AlgDataType getAlgDataType( final AlgDataTypeFactory typeFactory ) { } - public NamespaceType getNamespaceType() { - if ( namespaceType == null ) { - namespaceType = Catalog.getInstance().getSchema( schemaId ).namespaceType; - } - return namespaceType; - } - - - @SneakyThrows - public String getDatabaseName() { - return Catalog.getInstance().getDatabase( databaseId ).name; - } - - @SneakyThrows public String getSchemaName() { return Catalog.getInstance().getSchema( schemaId ).name; @@ -138,7 +158,6 @@ public String getTableName() { @Override public Serializable[] getParameterArray() { return new Serializable[]{ - getDatabaseName(), getSchemaName(), getTableName(), name, @@ -161,28 +180,20 @@ public Serializable[] getParameterArray() { @Override - public int compareTo( CatalogColumn o ) { - if ( o != null ) { - int comp = (int) (this.databaseId - o.databaseId); + public int compareTo( LogicalColumn o ) { + int comp = (int) (this.schemaId - o.schemaId); + if ( comp == 0 ) { + comp = (int) (this.tableId - o.tableId); if ( comp == 0 ) { - comp = (int) (this.schemaId - o.schemaId); - if ( comp == 0 ) { - comp = (int) (this.tableId - o.tableId); - if ( comp == 0 ) { - return (int) (this.id - o.id); - } else { - return comp; - } - - } else { - return comp; - } - + return (int) (this.id - o.id); } else { return comp; } + + } else { + return comp; } - return -1; + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index 42e8358767..8acfb8e1fb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -25,6 +25,7 @@ import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.NonNull; +import lombok.Value; import lombok.experimental.SuperBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; @@ -34,23 +35,22 @@ import org.polypheny.db.catalog.logistic.NamespaceType; @SuperBuilder(toBuilder = true) -@EqualsAndHashCode(callSuper = false) +@EqualsAndHashCode(callSuper = true) +@Value public class LogicalGraph extends CatalogEntity implements Comparable, Logical { private static final long serialVersionUID = 7343856827901459672L; - public final long databaseId; - public final ImmutableList placements; - public final int ownerId; - public final boolean modifiable; + public ImmutableList placements; + public int ownerId; + public boolean modifiable; - public final boolean caseSensitive; + public boolean caseSensitive; - public LogicalGraph( long databaseId, long id, String name, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { + public LogicalGraph( long id, String name, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { super( id, name, EntityType.ENTITY, NamespaceType.GRAPH ); this.ownerId = ownerId; - this.databaseId = databaseId; this.modifiable = modifiable; this.placements = ImmutableList.copyOf( placements ); this.caseSensitive = caseSensitive; @@ -58,7 +58,7 @@ public LogicalGraph( long databaseId, long id, String name, int ownerId, boolean public LogicalGraph( LogicalGraph graph ) { - this( graph.databaseId, graph.id, graph.name, graph.ownerId, graph.modifiable, graph.placements, graph.caseSensitive ); + this( graph.id, graph.name, graph.ownerId, graph.modifiable, graph.placements, graph.caseSensitive ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 2d364fd1ee..6c7d0c9c50 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -18,50 +18,60 @@ import com.google.common.collect.ImmutableList; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.LinkedList; import java.util.List; import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; +import lombok.Value; +import lombok.With; +import lombok.experimental.NonFinal; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.schema.ColumnStrategy; - +@Value +@With @EqualsAndHashCode(callSuper = false) +@NonFinal public class LogicalTable extends CatalogEntity implements Comparable, Logical { private static final long serialVersionUID = 4653390333258552102L; - @Getter - public final long id; - public final String name; - public final ImmutableList fieldIds; - public final long namespaceId; - public final long databaseId; - public final int ownerId; - public final EntityType entityType; - public final Long primaryKey; - public final boolean modifiable; - - public final PartitionProperty partitionProperty; - - public final ImmutableList dataPlacements; - - @Getter - public final ImmutableList connectedViews; + @Serialize + public long id; + @Serialize + public String name; + @Serialize + public ImmutableList fieldIds; + @Serialize + public long namespaceId; + @Serialize + public int ownerId; + @Serialize + public EntityType entityType; + @Serialize + public Long primaryKey; + @Serialize + public boolean modifiable; + @Serialize + public PartitionProperty partitionProperty; + @Serialize + public ImmutableList dataPlacements; + @Serialize + public ImmutableList connectedViews; public LogicalTable( @@ -69,54 +79,33 @@ public LogicalTable( @NonNull final String name, final ImmutableList fieldIds, final long namespaceId, - final long databaseId, final int ownerId, @NonNull final EntityType type, final Long primaryKey, - @NonNull final ImmutableList dataPlacements, + @NonNull final List dataPlacements, boolean modifiable, PartitionProperty partitionProperty ) { - super( id, name, type, NamespaceType.RELATIONAL ); - this.id = id; - this.name = name; - this.fieldIds = fieldIds; - this.namespaceId = namespaceId; - this.databaseId = databaseId; - this.ownerId = ownerId; - this.entityType = type; - this.primaryKey = primaryKey; - this.modifiable = modifiable; - - this.partitionProperty = partitionProperty; - this.connectedViews = ImmutableList.of(); - - this.dataPlacements = ImmutableList.copyOf( dataPlacements ); - - if ( type == EntityType.ENTITY && !modifiable ) { - throw new RuntimeException( "Tables of table type TABLE must be modifiable!" ); - } + this( id, name, fieldIds, namespaceId, ownerId, type, primaryKey, dataPlacements, modifiable, partitionProperty, ImmutableList.of() ); } public LogicalTable( - final long id, - @NonNull final String name, - final ImmutableList fieldIds, - final long namespaceId, - final long databaseId, - final int ownerId, - @NonNull final EntityType type, - final Long primaryKey, - @NonNull final ImmutableList dataPlacements, - boolean modifiable, - PartitionProperty partitionProperty, - ImmutableList connectedViews ) { + @Deserialize("id") final long id, + @Deserialize("name") @NonNull final String name, + @Deserialize("fieldIds") final List fieldIds, + @Deserialize("namespaceId") final long namespaceId, + @Deserialize("ownerId") final int ownerId, + @Deserialize("type") @NonNull final EntityType type, + @Deserialize("primaryKey") final Long primaryKey, + @Deserialize("dataPlacements") @NonNull final List dataPlacements, + @Deserialize("modifiable") boolean modifiable, + @Deserialize("partitionProperty") PartitionProperty partitionProperty, + @Deserialize("connectedViews") List connectedViews ) { super( id, name, type, NamespaceType.RELATIONAL ); this.id = id; this.name = name; - this.fieldIds = fieldIds; + this.fieldIds = ImmutableList.copyOf( fieldIds ); this.namespaceId = namespaceId; - this.databaseId = databaseId; this.ownerId = ownerId; this.entityType = type; this.primaryKey = primaryKey; @@ -124,7 +113,7 @@ public LogicalTable( this.partitionProperty = partitionProperty; - this.connectedViews = connectedViews; + this.connectedViews = ImmutableList.copyOf( connectedViews ); this.dataPlacements = ImmutableList.copyOf( dataPlacements ); @@ -134,11 +123,6 @@ public LogicalTable( } - @SneakyThrows - public String getDatabaseName() { - return Catalog.getInstance().getDatabase( databaseId ).name; - } - @SneakyThrows public String getNamespaceName() { @@ -173,7 +157,6 @@ public List getColumnNames() { @Override public Serializable[] getParameterArray() { return new Serializable[]{ - getDatabaseName(), getNamespaceName(), name, entityType.name(), @@ -192,15 +175,9 @@ public Serializable[] getParameterArray() { @Override public int compareTo( LogicalTable o ) { if ( o != null ) { - int comp = (int) (this.databaseId - o.databaseId); + int comp = (int) (this.namespaceId - o.namespaceId); if ( comp == 0 ) { - comp = (int) (this.namespaceId - o.namespaceId); - if ( comp == 0 ) { - return (int) (this.id - o.id); - } else { - return comp; - } - + return (int) (this.id - o.id); } else { return comp; } @@ -209,80 +186,20 @@ public int compareTo( LogicalTable o ) { } - static String getEnumNameOrNull( Enum theEnum ) { - if ( theEnum == null ) { - return null; - } else { - return theEnum.name(); - } - } - - - public LogicalTable getRenamed( String newName ) { - return new LogicalTable( - id, - newName, - fieldIds, - namespaceId, - databaseId, - ownerId, - entityType, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - connectedViews ); - } - - @Override public AlgDataType getRowType() { final AlgDataTypeFactory.Builder fieldInfo = AlgDataTypeFactory.DEFAULT.builder(); for ( Long id : fieldIds ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( id ); - AlgDataType sqlType = catalogColumn.getAlgDataType( AlgDataTypeFactory.DEFAULT ); - fieldInfo.add( catalogColumn.name, null, sqlType ).nullable( catalogColumn.nullable ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( id ); + AlgDataType sqlType = logicalColumn.getAlgDataType( AlgDataTypeFactory.DEFAULT ); + fieldInfo.add( logicalColumn.name, null, sqlType ).nullable( logicalColumn.nullable ); } return AlgDataTypeImpl.proto( fieldInfo.build() ).apply( AlgDataTypeFactory.DEFAULT ); } - public LogicalTable getConnectedViews( ImmutableList newConnectedViews ) { - return new LogicalTable( - id, - name, - fieldIds, - namespaceId, - databaseId, - ownerId, - entityType, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - newConnectedViews ); - } - - - public LogicalTable getTableWithColumns( ImmutableList newColumnIds ) { - return new LogicalTable( - id, - name, - newColumnIds, - namespaceId, - databaseId, - ownerId, - entityType, - primaryKey, - dataPlacements, - modifiable, - partitionProperty, - connectedViews ); - } - - @Override public Expression asExpression() { return Expressions.call( Expressions.call( Catalog.class, "getInstance" ), "getLogicalTable", Expressions.constant( id ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java index 6d93d00a7a..9d02f263f7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -17,6 +17,9 @@ package org.polypheny.db.catalog.entity.physical; import java.io.Serializable; +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; @@ -24,9 +27,12 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +@EqualsAndHashCode(callSuper = true) +@Value +@NonFinal public class PhysicalCollection extends CatalogEntity implements Physical { - protected PhysicalCollection( long id, String name, EntityType type, NamespaceType namespaceType ) { + public PhysicalCollection( long id, String name, EntityType type, NamespaceType namespaceType ) { super( id, name, type, namespaceType ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index e69de629b7..8ebd3f7215 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -17,6 +17,9 @@ package org.polypheny.db.catalog.entity.physical; import java.io.Serializable; +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; @@ -24,9 +27,12 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +@EqualsAndHashCode(callSuper = true) +@Value +@NonFinal public class PhysicalGraph extends CatalogEntity implements Physical { - protected PhysicalGraph( long id, String name, EntityType type, NamespaceType namespaceType ) { + public PhysicalGraph( long id, String name, EntityType type, NamespaceType namespaceType ) { super( id, name, type, namespaceType ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index bffc845c88..397982fb8f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -20,6 +20,9 @@ import java.io.Serializable; import java.util.List; import java.util.stream.Collectors; +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.type.AlgDataType; @@ -28,23 +31,26 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.type.PolyTypeFactoryImpl; +@EqualsAndHashCode(callSuper = true) +@Value +@NonFinal public class PhysicalTable extends CatalogEntity implements Physical { - public final ImmutableList placements; - public final ImmutableList columnIds; - public final ImmutableList columnNames; - public final String namespaceName; + public ImmutableList placements; + public ImmutableList columnIds; + public ImmutableList columnNames; + public String namespaceName; - protected PhysicalTable( long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { + public PhysicalTable( long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { super( id, name, type, namespaceType ); this.namespaceName = namespaceName; this.placements = ImmutableList.copyOf( placements ); @@ -58,11 +64,6 @@ public PhysicalTable( AllocationTable table, String name, String namespaceName, } - public PhysicalTable( PhysicalTable physicalTable ) { - this( physicalTable.id, physicalTable.name, physicalTable.namespaceName, physicalTable.entityType, physicalTable.namespaceType, physicalTable.placements, physicalTable.columnNames ); - } - - @Override public AlgDataType getRowType() { return buildProto().apply( AlgDataTypeFactory.DEFAULT ); @@ -74,9 +75,9 @@ public AlgProtoDataType buildProto() { final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); for ( CatalogColumnPlacement placement : placements ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); - AlgDataType sqlType = catalogColumn.getAlgDataType( typeFactory ); - fieldInfo.add( catalogColumn.name, placement.physicalColumnName, sqlType ).nullable( catalogColumn.nullable ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( placement.columnId ); + AlgDataType sqlType = logicalColumn.getAlgDataType( typeFactory ); + fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); } return AlgDataTypeImpl.proto( fieldInfo.build() ); diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnException.java index 10f93ea218..84dd7dc904 100644 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnException.java +++ b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnException.java @@ -25,8 +25,8 @@ public class UnknownColumnException extends CatalogException { private final String columnName; - public UnknownColumnException( String databaseName, String schemaName, String tableName, String columnName ) { - super( "There is no column with name '" + columnName + "' in table '" + tableName + "' of schema '" + schemaName + "' in database '" + databaseName + "'" ); + public UnknownColumnException( String schemaName, String tableName, String columnName ) { + super( "There is no column with name '" + columnName + "' in table '" + tableName + "' of schema '" + schemaName + "'" ); this.columnName = columnName; } diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownDatabaseException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownDatabaseException.java deleted file mode 100644 index e57bcaf2d5..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownDatabaseException.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -import lombok.Getter; - -public class UnknownDatabaseException extends CatalogException { - - @Getter - private final String databaseName; - - - public UnknownDatabaseException( String databaseName ) { - super( "There is no database with name " + databaseName ); - this.databaseName = databaseName; - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaException.java index 4e8e045c17..77469c7cd9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaException.java +++ b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaException.java @@ -25,15 +25,10 @@ public class UnknownSchemaException extends CatalogException { private final String schemaName; - public UnknownSchemaException( String databaseName, String schemaName ) { - super( "There is no schema with name '" + schemaName + "' in the database '" + databaseName + "'" ); + public UnknownSchemaException( String schemaName ) { + super( "There is no schema with name '" + schemaName + "'" ); this.schemaName = schemaName; } - public UnknownSchemaException( long databaseId, String schemaName ) { - super( "There is no schema with name '" + schemaName + "' in the database with the id '" + databaseId + "'" ); - this.schemaName = schemaName; - } - } diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableException.java index dc85c9b288..146ec5fef0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableException.java +++ b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableException.java @@ -25,14 +25,8 @@ public class UnknownTableException extends CatalogException { private final String tableName; - public UnknownTableException( String databaseName, String schemaName, String tableName ) { - super( "There is no table with name '" + tableName + "' in schema '" + schemaName + "' of database '" + databaseName + "'." ); - this.tableName = tableName; - } - - - public UnknownTableException( long databaseId, String schemaName, String tableName ) { - super( "There is no table with name '" + tableName + "' in schema '" + schemaName + "' of database with the id '" + databaseId + "'." ); + public UnknownTableException( String schemaName, String tableName ) { + super( "There is no table with name '" + tableName + "' in schema '" + schemaName + "'." ); this.tableName = tableName; } diff --git a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java index ad5ecdb5f4..7766d99ef9 100644 --- a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java +++ b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java @@ -18,8 +18,8 @@ import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.type.PolyType; @@ -35,7 +35,7 @@ public interface PartitionManager { Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ); - boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, CatalogColumn partitionColumn ); + boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, LogicalColumn partitionColumn ); /** * Returns all placements of catalogEntity and partitionIds diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 51ad4cb73e..af7d2d9172 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -20,8 +20,8 @@ import java.util.Map; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.transaction.Statement; @@ -33,7 +33,7 @@ public interface DataMigrator { void copyData( Transaction transaction, CatalogAdapter store, - List columns, + List columns, List partitionIds ); /** @@ -51,7 +51,7 @@ void copyData( void copySelectiveData( Transaction transaction, CatalogAdapter store, - LogicalTable sourceTable, LogicalTable targetTable, List columns, + LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ); @@ -72,14 +72,14 @@ void copyPartitionData( CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, - List columns, + List columns, List sourcePartitionIds, List targetPartitionIds ); AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); //is used within copyData - void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); diff --git a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java index 2bc4e1b7fa..5616b13537 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java @@ -36,7 +36,6 @@ import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogEntityPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; @@ -84,25 +83,24 @@ public Snapshot getCurrent() { private synchronized Snapshot buildSchema() { Catalog catalog = Catalog.getInstance(); - CatalogDatabase catalogDatabase = catalog.getDatabase( Catalog.defaultDatabaseId ); // Build logical namespaces - Map, CatalogEntity> logicalRelational = buildRelationalLogical( catalog, catalogDatabase ); + Map, CatalogEntity> logicalRelational = buildRelationalLogical( catalog ); - Map, CatalogEntity> logicalDocument = buildDocumentLogical( catalog, catalogDatabase ); + Map, CatalogEntity> logicalDocument = buildDocumentLogical( catalog ); - Map, CatalogEntity> logicalGraph = buildGraphLogical( catalog, catalogDatabase ); + Map, CatalogEntity> logicalGraph = buildGraphLogical( catalog ); // Build mapping structures // Build physical namespaces List adapters = Catalog.getInstance().getAdapters(); - Map, CatalogEntityPlacement> physicalRelational = buildPhysicalTables( catalog, catalogDatabase, adapters ); + Map, CatalogEntityPlacement> physicalRelational = buildPhysicalTables( catalog, adapters ); - Map, CatalogEntityPlacement> physicalDocument = buildPhysicalDocuments( catalog, catalogDatabase, adapters ); + Map, CatalogEntityPlacement> physicalDocument = buildPhysicalDocuments( catalog, adapters ); - Map, CatalogEntityPlacement> physicalGraph = buildPhysicalGraphs( catalog, catalogDatabase ); + Map, CatalogEntityPlacement> physicalGraph = buildPhysicalGraphs( catalog ); isOutdated = false; return null; @@ -110,14 +108,14 @@ private synchronized Snapshot buildSchema() { } - private Map, CatalogEntity> buildGraphLogical( Catalog catalog, CatalogDatabase catalogDatabase ) { - return catalog.getGraphs( catalogDatabase.id, null ).stream().collect( Collectors.toMap( e -> Pair.of( e.id, e.id ), e -> e ) ); + private Map, CatalogEntity> buildGraphLogical( Catalog catalog ) { + return catalog.getGraphs( null ).stream().collect( Collectors.toMap( e -> Pair.of( e.id, e.id ), e -> e ) ); } - private Map, CatalogEntity> buildRelationalLogical( Catalog catalog, CatalogDatabase catalogDatabase ) { + private Map, CatalogEntity> buildRelationalLogical( Catalog catalog ) { Map, CatalogEntity> entities = new HashMap<>(); - for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ) ) { + for ( CatalogSchema catalogSchema : catalog.getSchemas( null ) ) { if ( catalogSchema.namespaceType != NamespaceType.RELATIONAL ) { continue; } @@ -130,9 +128,9 @@ private Map, CatalogEntity> buildRelationalLogical( Catalog cat } - private Map, CatalogEntity> buildDocumentLogical( Catalog catalog, CatalogDatabase catalogDatabase ) { + private Map, CatalogEntity> buildDocumentLogical( Catalog catalog ) { Map, CatalogEntity> entities = new HashMap<>(); - for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ) ) { + for ( CatalogSchema catalogSchema : catalog.getSchemas( null ) ) { if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT ) { continue; } @@ -146,10 +144,10 @@ private Map, CatalogEntity> buildDocumentLogical( Catalog catal } - private Map, CatalogEntityPlacement> buildPhysicalGraphs( Catalog catalog, CatalogDatabase catalogDatabase ) { + private Map, CatalogEntityPlacement> buildPhysicalGraphs( Catalog catalog ) { Map, CatalogEntityPlacement> placements = new HashMap<>(); // Build adapter schema (physical schema) GRAPH - for ( LogicalGraph graph : catalog.getGraphs( catalogDatabase.id, null ) ) { + for ( LogicalGraph graph : catalog.getGraphs( null ) ) { for ( int adapterId : graph.placements ) { CatalogGraphPlacement placement = catalog.getGraphPlacement( graph.id, adapterId ); @@ -168,10 +166,10 @@ private Map, CatalogEntityPlacement> buildPhysicalGraph } - private Map, CatalogEntityPlacement> buildPhysicalDocuments( Catalog catalog, CatalogDatabase catalogDatabase, List adapters ) { + private Map, CatalogEntityPlacement> buildPhysicalDocuments( Catalog catalog, List adapters ) { Map, CatalogEntityPlacement> placements = new HashMap<>(); // Build adapter schema (physical schema) DOCUMENT - for ( CatalogSchema catalogSchema : catalog.getSchemas( catalogDatabase.id, null ).stream().filter( s -> s.namespaceType == NamespaceType.DOCUMENT ).collect( Collectors.toList() ) ) { + for ( CatalogSchema catalogSchema : catalog.getSchemas( null ).stream().filter( s -> s.namespaceType == NamespaceType.DOCUMENT ).collect( Collectors.toList() ) ) { for ( CatalogAdapter catalogAdapter : adapters ) { Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); @@ -211,10 +209,10 @@ private Map, CatalogEntityPlacement> buildPhysicalDocum } - private Map, CatalogEntityPlacement> buildPhysicalTables( Catalog catalog, CatalogDatabase catalogDatabase, List adapters ) { + private Map, CatalogEntityPlacement> buildPhysicalTables( Catalog catalog, List adapters ) { Map, CatalogEntityPlacement> placements = new HashMap<>(); // Build adapter schema (physical schema) RELATIONAL - for ( CatalogSchema catalogSchema : new ArrayList<>( catalog.getSchemas( catalogDatabase.id, null ) ) ) { + for ( CatalogSchema catalogSchema : new ArrayList<>( catalog.getSchemas( null ) ) ) { for ( CatalogAdapter catalogAdapter : adapters ) { // Get list of tables on this adapter Map> tableIdsPerSchema = new HashMap<>(); diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 235bb3ec89..07cfd580ca 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -20,10 +20,10 @@ import java.util.stream.Collectors; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; @@ -38,11 +38,11 @@ public interface PolyphenyDbSchema { default LogicalTable getTable( List names ) { switch ( names.size() ) { case 3: - return Catalog.getInstance().getTables( Pattern.of( names.get( 0 ) ), Pattern.of( names.get( 1 ) ), Pattern.of( names.get( 2 ) ) ).get( 0 ); + return Catalog.getInstance().getTables( Pattern.of( names.get( 1 ) ), Pattern.of( names.get( 2 ) ) ).get( 0 ); case 2: - return Catalog.getInstance().getTables( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ), Pattern.of( names.get( 1 ) ) ).get( 0 ); + return Catalog.getInstance().getTables( Pattern.of( names.get( 0 ) ), Pattern.of( names.get( 1 ) ) ).get( 0 ); case 1: - return Catalog.getInstance().getTables( Catalog.defaultDatabaseId, null, Pattern.of( names.get( 0 ) ) ).get( 0 ); + return Catalog.getInstance().getTables( null, Pattern.of( names.get( 0 ) ) ).get( 0 ); default: return null; } @@ -65,7 +65,7 @@ default LogicalCollection getCollection( List names ) { CatalogNamespace namespace; switch ( names.size() ) { case 3: - namespace = Catalog.getInstance().getSchemas( Pattern.of( names.get( 0 ) ), Pattern.of( names.get( 1 ) ) ).get( 0 ); + namespace = Catalog.getInstance().getSchemas( Pattern.of( names.get( 1 ) ) ).get( 0 ); return Catalog.getInstance().getCollections( namespace.id, Pattern.of( names.get( 2 ) ) ).get( 0 ); case 2: namespace = Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ) ).get( 0 ); @@ -93,7 +93,7 @@ default PhysicalCollection getPhysicalCollection( long id ){ default LogicalGraph getGraph( List names ) { if ( names.size() == 1 ) {// TODO add methods - return Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ) ).get( 0 ); + return Catalog.getInstance().getGraphs( Pattern.of( names.get( 0 ) ) ).get( 0 ); } return null; } diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index 2c849eefe5..fb0f572952 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -26,10 +26,10 @@ import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogMaterializedView; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.transaction.Transaction; @@ -64,7 +64,7 @@ public static MaterializedViewManager getInstance() { public abstract void addData( Transaction transaction, List stores, - Map> addedColumns, + Map> addedColumns, AlgRoot algRoot, CatalogMaterializedView materializedView ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 4bec842376..db9726b028 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -27,16 +27,13 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -47,10 +44,13 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; @@ -134,12 +134,6 @@ public void restoreColumnPlacements( Transaction transaction ) { } - @Override - public long addDatabase( String name, int ownerId, String ownerName, long defaultSchemaId, String defaultSchemaName ) { - throw new NotImplementedException(); - } - - @Override public Map getNodeInfo() { throw new NotImplementedException(); @@ -158,26 +152,18 @@ public void restoreViews( Transaction transaction ) { } - @Override - public void deleteDatabase( long databaseId ) { - throw new NotImplementedException(); - } - - - @Override - public List getDatabases( Pattern pattern ) { + private List getDatabases( Pattern pattern ) { throw new NotImplementedException(); } - @Override - public CatalogDatabase getDatabase( String databaseName ) throws UnknownDatabaseException { + private CatalogDatabase getDatabase( String databaseName ) throws UnknownDatabaseException { throw new NotImplementedException(); } @Override - public long addGraph( long databaseId, String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { + public long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { throw new NotImplementedException(); } @@ -237,25 +223,23 @@ public LogicalGraph getGraph( long id ) { @Override - public List getGraphs( long databaseId, Pattern graphName ) { + public List getGraphs( Pattern graphName ) { throw new NotImplementedException(); } - @Override - public CatalogDatabase getDatabase( long databaseId ) { + private CatalogDatabase getDatabase( long databaseId ) { throw new NotImplementedException(); } @Override - public List getSchemas( Pattern databaseNamePattern, Pattern schemaNamePattern ) { + public List getSchemas( Pattern schemaNamePattern ) { throw new NotImplementedException(); } - @Override - public List getSchemas( long databaseId, Pattern schemaNamePattern ) { + private List getSchemas( long databaseId, Pattern schemaNamePattern ) { throw new NotImplementedException(); } @@ -267,25 +251,24 @@ public CatalogSchema getSchema( long schemaId ) { @Override - public CatalogSchema getSchema( String databaseName, String schemaName ) throws UnknownSchemaException, UnknownDatabaseException { + public CatalogSchema getSchema( String schemaName ) throws UnknownSchemaException { throw new NotImplementedException(); } - @Override - public CatalogSchema getSchema( long databaseId, String schemaName ) throws UnknownSchemaException { + private CatalogSchema getSchema( long databaseId, String schemaName ) throws UnknownSchemaException { throw new NotImplementedException(); } @Override - public long addNamespace( String name, long databaseId, int ownerId, NamespaceType namespaceType ) { + public long addNamespace( String name, int ownerId, NamespaceType namespaceType ) { throw new NotImplementedException(); } @Override - public boolean checkIfExistsSchema( long databaseId, String schemaName ) { + public boolean checkIfExistsSchema( String schemaName ) { throw new NotImplementedException(); } @@ -315,13 +298,13 @@ public List getTables( long schemaId, Pattern tableNamePattern ) { @Override - public List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { throw new NotImplementedException(); } @Override - public LogicalTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException { + public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { throw new NotImplementedException(); } @@ -333,7 +316,7 @@ public LogicalTable getTableFromPartition( long partitionId ) { @Override - public List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { throw new NotImplementedException(); } @@ -351,7 +334,7 @@ public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTa @Override - public LogicalTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException { + public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException { throw new NotImplementedException(); } @@ -567,31 +550,31 @@ public CatalogCollectionPlacement getCollectionPlacement( long collectionId, int @Override - public List getColumns( long tableId ) { + public List getColumns( long tableId ) { throw new NotImplementedException(); } @Override - public List getColumns( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { + public List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { throw new NotImplementedException(); } @Override - public CatalogColumn getColumn( long columnId ) { + public LogicalColumn getColumn( long columnId ) { throw new NotImplementedException(); } @Override - public CatalogColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { + public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { throw new NotImplementedException(); } @Override - public CatalogColumn getColumn( String databaseName, String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownDatabaseException, UnknownTableException { + public LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { throw new NotImplementedException(); } @@ -939,7 +922,7 @@ public List getPartitionGroups( long tableId ) { @Override - public List getPartitionGroups( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { throw new NotImplementedException(); } @@ -1065,13 +1048,12 @@ public List getPartitions( long partitionGroupId ) { * Get all partitions of the specified database which fit to the specified filter patterns. * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. * - * @param databaseNamePattern Pattern for the database name. null returns all. * @param schemaNamePattern Pattern for the schema name. null returns all. * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ @Override - public List getPartitions( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { throw new NotImplementedException(); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 793ba02e28..8ac31f5556 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -54,7 +54,6 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; @@ -72,6 +71,7 @@ import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; @@ -196,7 +196,7 @@ protected DataStore getDataStoreInstance( int storeId ) throws DdlOnSourceExcept } - private CatalogColumn getCatalogColumn( long tableId, String columnName ) throws ColumnNotExistsException { + private LogicalColumn getCatalogColumn( long tableId, String columnName ) throws ColumnNotExistsException { try { return catalog.getColumn( tableId, columnName ); } catch ( UnknownColumnException e ) { @@ -209,7 +209,7 @@ private CatalogColumn getCatalogColumn( long tableId, String columnName ) throws public long createNamespace( String name, long databaseId, NamespaceType type, int userId, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException { name = name.toLowerCase(); // Check if there is already a schema with this name - if ( catalog.checkIfExistsSchema( databaseId, name ) ) { + if ( catalog.checkIfExistsSchema( name ) ) { if ( ifNotExists ) { // It is ok that there is already a schema with this name because "IF NOT EXISTS" was specified try { @@ -223,7 +223,7 @@ public long createNamespace( String name, long databaseId, NamespaceType type, i throw new NamespaceAlreadyExistsException(); } } else { - return catalog.addNamespace( name, databaseId, userId, type ); + return catalog.addNamespace( name, userId, type ); } } @@ -415,7 +415,7 @@ public void alterSchemaOwner( String schemaName, String ownerName, long database @Override public void renameSchema( String newName, String oldName, long databaseId ) throws NamespaceAlreadyExistsException, UnknownSchemaException { newName = newName.toLowerCase(); - if ( catalog.checkIfExistsSchema( databaseId, newName ) ) { + if ( catalog.checkIfExistsSchema( newName ) ) { throw new NamespaceAlreadyExistsException(); } CatalogSchema catalogSchema = catalog.getSchema( databaseId, oldName ); @@ -433,8 +433,8 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys throw new ColumnAlreadyExistsException( columnLogicalName, catalogTable.name ); } - CatalogColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.id, beforeColumnName ); - CatalogColumn afterColumn = afterColumnName == null ? null : getCatalogColumn( catalogTable.id, afterColumnName ); + LogicalColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.id, beforeColumnName ); + LogicalColumn afterColumn = afterColumnName == null ? null : getCatalogColumn( catalogTable.id, afterColumnName ); // Make sure that the table is of table type SOURCE if ( catalogTable.entityType != EntityType.SOURCE ) { @@ -488,7 +488,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys // Add default value addDefaultValue( defaultValue, columnId ); - CatalogColumn addedColumn = catalog.getColumn( columnId ); + LogicalColumn addedColumn = catalog.getColumn( columnId ); // Add column placement catalog.addColumnPlacement( @@ -508,8 +508,8 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } - private int updateAdjacentPositions( LogicalTable catalogTable, CatalogColumn beforeColumn, CatalogColumn afterColumn ) { - List columns = catalog.getColumns( catalogTable.id ); + private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn beforeColumn, LogicalColumn afterColumn ) { + List columns = catalog.getColumns( catalogTable.id ); int position = columns.size() + 1; if ( beforeColumn != null || afterColumn != null ) { if ( beforeColumn != null ) { @@ -538,8 +538,8 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo throw new ColumnAlreadyExistsException( columnName, catalogTable.name ); } // - CatalogColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.id, beforeColumnName ); - CatalogColumn afterColumn = afterColumnName == null ? null : getCatalogColumn( catalogTable.id, afterColumnName ); + LogicalColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.id, beforeColumnName ); + LogicalColumn afterColumn = afterColumnName == null ? null : getCatalogColumn( catalogTable.id, afterColumnName ); int position = updateAdjacentPositions( catalogTable, beforeColumn, afterColumn ); @@ -559,7 +559,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo // Add default value addDefaultValue( defaultValue, columnId ); - CatalogColumn addedColumn = catalog.getColumn( columnId ); + LogicalColumn addedColumn = catalog.getColumn( columnId ); // Ask router on which stores this column shall be placed List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); @@ -586,13 +586,13 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); - columnIds.add( catalogColumn.id ); + LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + columnIds.add( logicalColumn.id ); } List referencesIds = new LinkedList<>(); for ( String columnName : refColumnNames ) { - CatalogColumn catalogColumn = catalog.getColumn( refTable.id, columnName ); - referencesIds.add( catalogColumn.id ); + LogicalColumn logicalColumn = catalog.getColumn( refTable.id, columnName ); + referencesIds.add( logicalColumn.id ); } catalog.addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); } @@ -602,8 +602,8 @@ public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, Lis public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); - columnIds.add( catalogColumn.id ); + LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + columnIds.add( logicalColumn.id ); } IndexType type = IndexType.MANUAL; @@ -728,8 +728,8 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName indexName = indexName.toLowerCase(); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); - columnIds.add( catalogColumn.id ); + LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + columnIds.add( logicalColumn.id ); } IndexType type = IndexType.MANUAL; @@ -779,7 +779,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName @Override public void addDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) throws PlacementAlreadyExistsException { - List addedColumns = new LinkedList<>(); + List addedColumns = new LinkedList<>(); List tempPartitionGroupList = new ArrayList<>(); @@ -932,8 +932,8 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); - columnIds.add( catalogColumn.id ); + LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + columnIds.add( logicalColumn.id ); } catalog.addPrimaryKey( catalogTable.id, columnIds ); @@ -974,8 +974,8 @@ public void addUniqueConstraint( LogicalTable catalogTable, List columnN try { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); - columnIds.add( catalogColumn.id ); + LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + columnIds.add( logicalColumn.id ); } catalog.addUniqueConstraint( catalogTable.id, constraintName, columnIds ); } catch ( GenericCatalogException | UnknownColumnException e ) { @@ -996,7 +996,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement //check if views are dependent from this view checkViewDependencies( catalogTable ); - CatalogColumn column = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn column = getCatalogColumn( catalogTable.id, columnName ); // Check if column is part of a key for ( CatalogKey key : catalog.getTableKeys( catalogTable.id ) ) { @@ -1023,7 +1023,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } // Delete from catalog - List columns = catalog.getColumns( catalogTable.id ); + List columns = catalog.getColumns( catalogTable.id ); catalog.deleteColumn( column.id ); if ( column.position != columns.size() ) { // Update position of the other columns @@ -1167,22 +1167,22 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT // check if model permits operation checkModelLogic( catalogTable, columnName ); - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); catalog.setColumnType( - catalogColumn.id, + logicalColumn.id, type.type, type.collectionType, type.precision, type.scale, type.dimension, type.cardinality ); - for ( CatalogColumnPlacement placement : catalog.getColumnPlacement( catalogColumn.id ) ) { + for ( CatalogColumnPlacement placement : catalog.getColumnPlacement( logicalColumn.id ) ) { AdapterManager.getInstance().getStore( placement.adapterId ).updateColumnType( statement.getPrepareContext(), placement, - catalog.getColumn( catalogColumn.id ), - catalogColumn.type ); + catalog.getColumn( logicalColumn.id ), + logicalColumn.type ); } // Reset plan cache implementation cache & routing cache @@ -1192,7 +1192,7 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT @Override public void setColumnNullable( LogicalTable catalogTable, String columnName, boolean nullable, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException, GenericCatalogException { - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -1200,7 +1200,7 @@ public void setColumnNullable( LogicalTable catalogTable, String columnName, boo // Check if model permits operation checkModelLogic( catalogTable, columnName ); - catalog.setNullable( catalogColumn.id, nullable ); + catalog.setNullable( logicalColumn.id, nullable ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1212,10 +1212,10 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str // Check if model permits operation checkModelLogic( catalogTable, columnName ); - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); int targetPosition; - CatalogColumn refColumn; + LogicalColumn refColumn; if ( beforeColumnName != null ) { refColumn = getCatalogColumn( catalogTable.id, beforeColumnName ); targetPosition = refColumn.position; @@ -1223,31 +1223,31 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str refColumn = getCatalogColumn( catalogTable.id, afterColumnName ); targetPosition = refColumn.position + 1; } - if ( catalogColumn.id == refColumn.id ) { + if ( logicalColumn.id == refColumn.id ) { throw new RuntimeException( "Same column!" ); } - List columns = catalog.getColumns( catalogTable.id ); - if ( targetPosition < catalogColumn.position ) { // Walk from last column to first column + List columns = catalog.getColumns( catalogTable.id ); + if ( targetPosition < logicalColumn.position ) { // Walk from last column to first column for ( int i = columns.size(); i >= 1; i-- ) { - if ( i < catalogColumn.position && i >= targetPosition ) { + if ( i < logicalColumn.position && i >= targetPosition ) { catalog.setColumnPosition( columns.get( i - 1 ).id, i + 1 ); - } else if ( i == catalogColumn.position ) { - catalog.setColumnPosition( catalogColumn.id, columns.size() + 1 ); + } else if ( i == logicalColumn.position ) { + catalog.setColumnPosition( logicalColumn.id, columns.size() + 1 ); } if ( i == targetPosition ) { - catalog.setColumnPosition( catalogColumn.id, targetPosition ); + catalog.setColumnPosition( logicalColumn.id, targetPosition ); } } - } else if ( targetPosition > catalogColumn.position ) { // Walk from first column to last column + } else if ( targetPosition > logicalColumn.position ) { // Walk from first column to last column targetPosition--; for ( int i = 1; i <= columns.size(); i++ ) { - if ( i > catalogColumn.position && i <= targetPosition ) { + if ( i > logicalColumn.position && i <= targetPosition ) { catalog.setColumnPosition( columns.get( i - 1 ).id, i - 1 ); - } else if ( i == catalogColumn.position ) { - catalog.setColumnPosition( catalogColumn.id, columns.size() + 1 ); + } else if ( i == logicalColumn.position ) { + catalog.setColumnPosition( logicalColumn.id, columns.size() + 1 ); } if ( i == targetPosition ) { - catalog.setColumnPosition( catalogColumn.id, targetPosition ); + catalog.setColumnPosition( logicalColumn.id, targetPosition ); } } } @@ -1260,7 +1260,7 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str @Override public void setColumnCollation( LogicalTable catalogTable, String columnName, Collation collation, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException { - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); // Check if model permits operation checkModelLogic( catalogTable, columnName ); @@ -1268,7 +1268,7 @@ public void setColumnCollation( LogicalTable catalogTable, String columnName, Co // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); - catalog.setCollation( catalogColumn.id, collation ); + catalog.setCollation( logicalColumn.id, collation ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1277,12 +1277,12 @@ public void setColumnCollation( LogicalTable catalogTable, String columnName, Co @Override public void setDefaultValue( LogicalTable catalogTable, String columnName, String defaultValue, Statement statement ) throws ColumnNotExistsException { - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); // Check if model permits operation checkModelLogic( catalogTable, columnName ); - addDefaultValue( defaultValue, catalogColumn.id ); + addDefaultValue( defaultValue, logicalColumn.id ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1291,12 +1291,12 @@ public void setDefaultValue( LogicalTable catalogTable, String columnName, Strin @Override public void dropDefaultValue( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); // check if model permits operation checkModelLogic( catalogTable, columnName ); - catalog.deleteDefaultValue( catalogColumn.id ); + catalog.deleteDefaultValue( logicalColumn.id ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1411,7 +1411,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { tempPartitionGroupList.forEach( pg -> catalog.getPartitions( pg ).forEach( p -> intendedPartitionIds.add( p.id ) ) ); // Which columns to add - List addedColumns = new LinkedList<>(); + List addedColumns = new LinkedList<>(); for ( long cid : columnIds ) { if ( catalog.checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { @@ -1526,7 +1526,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); // Get only columns that are actually on that store - List necessaryColumns = new LinkedList<>(); + List necessaryColumns = new LinkedList<>(); catalog.getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ).forEach( cp -> necessaryColumns.add( catalog.getColumn( cp.columnId ) ) ); dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( storeId ), necessaryColumns, newPartitions ); @@ -1565,16 +1565,16 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da throw new PlacementNotExistsException(); } - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); // Make sure that this store does not contain a placement of this column - if ( catalog.checkIfExistsColumnPlacement( storeInstance.getAdapterId(), catalogColumn.id ) ) { - CatalogColumnPlacement placement = catalog.getColumnPlacement( storeInstance.getAdapterId(), catalogColumn.id ); + if ( catalog.checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { + CatalogColumnPlacement placement = catalog.getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.updateColumnPlacementType( storeInstance.getAdapterId(), - catalogColumn.id, + logicalColumn.id, PlacementType.MANUAL ); } else { throw new PlacementAlreadyExistsException(); @@ -1583,18 +1583,18 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da // Create column placement catalog.addColumnPlacement( storeInstance.getAdapterId(), - catalogColumn.id, + logicalColumn.id, PlacementType.MANUAL, null, null, null ); // Add column on store - storeInstance.addColumn( statement.getPrepareContext(), catalogTable, catalogColumn ); + storeInstance.addColumn( statement.getPrepareContext(), catalogTable, logicalColumn ); // Copy the data to the newly added column placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( storeInstance.getAdapterId() ), - ImmutableList.of( catalogColumn ), catalog.getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); + ImmutableList.of( logicalColumn ), catalog.getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); } // Reset query plan cache, implementation cache & routing cache @@ -1612,32 +1612,32 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D throw new PlacementNotExistsException(); } - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); // Check whether this store actually contains a placement of this column - if ( !catalog.checkIfExistsColumnPlacement( storeInstance.getAdapterId(), catalogColumn.id ) ) { + if ( !catalog.checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { throw new PlacementNotExistsException(); } // Check whether there are any indexes located on the store requiring this column for ( CatalogIndex index : catalog.getIndexes( catalogTable.id, false ) ) { - if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( catalogColumn.id ) ) { + if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( logicalColumn.id ) ) { throw new IndexPreventsRemovalException( index.name, columnName ); } } - if ( !catalog.validateDataPlacementsConstraints( catalogColumn.tableId, storeInstance.getAdapterId(), Arrays.asList( catalogColumn.id ), new ArrayList<>() ) ) { + if ( !catalog.validateDataPlacementsConstraints( logicalColumn.tableId, storeInstance.getAdapterId(), Arrays.asList( logicalColumn.id ), new ArrayList<>() ) ) { throw new LastPlacementException(); } // Check whether the column to drop is a primary key CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); - if ( primaryKey.columnIds.contains( catalogColumn.id ) ) { + if ( primaryKey.columnIds.contains( logicalColumn.id ) ) { throw new PlacementIsPrimaryException(); } // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getColumnPlacement( storeInstance.getAdapterId(), catalogColumn.id ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); // Drop column placement - catalog.deleteColumnPlacement( storeInstance.getAdapterId(), catalogColumn.id, false ); + catalog.deleteColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id, false ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1675,18 +1675,18 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme @Override public void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException { - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); - if ( catalog.checkIfExistsColumn( catalogColumn.tableId, newColumnName ) ) { - throw new ColumnAlreadyExistsException( newColumnName, catalogColumn.getTableName() ); + if ( catalog.checkIfExistsColumn( logicalColumn.tableId, newColumnName ) ) { + throw new ColumnAlreadyExistsException( newColumnName, logicalColumn.getTableName() ); } // Check if views are dependent from this view checkViewDependencies( catalogTable ); - catalog.renameColumn( catalogColumn.id, newColumnName ); + catalog.renameColumn( logicalColumn.id, newColumnName ); // Update Name in statistics - StatisticsManager.getInstance().updateColumnName( catalogColumn, newColumnName ); + StatisticsManager.getInstance().updateColumnName( logicalColumn, newColumnName ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1806,7 +1806,7 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR // Creates a list with all columns, tableId is needed to create the primary key List columns = getColumnInformation( projectedColumns, fieldList, true, tableId ); - Map> addedColumns = new HashMap<>(); + Map> addedColumns = new HashMap<>(); List columnIds = new ArrayList<>(); @@ -1840,14 +1840,14 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR null ); - List catalogColumns; + List logicalColumns; if ( addedColumns.containsKey( adapterId ) ) { - catalogColumns = addedColumns.get( adapterId ); + logicalColumns = addedColumns.get( adapterId ); } else { - catalogColumns = new ArrayList<>(); + logicalColumns = new ArrayList<>(); } - catalogColumns.add( catalog.getColumn( columnId ) ); - addedColumns.put( adapterId, catalogColumns ); + logicalColumns.add( catalog.getColumn( columnId ) ); + addedColumns.put( adapterId, logicalColumns ); } } @@ -1908,7 +1908,7 @@ public long createGraph( long databaseId, String graphName, boolean modifiable, } // add general graph - long graphId = catalog.addGraph( databaseId, graphName, stores, modifiable, ifNotExists, replace ); + long graphId = catalog.addGraph( graphName, stores, modifiable, ifNotExists, replace ); addGraphPlacement( graphId, stores, false, statement ); @@ -2454,7 +2454,7 @@ private void checkDocumentModel( long schemaId, List columns, @Override public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownDatabaseException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { - CatalogColumn catalogColumn = catalog.getColumn( partitionInfo.table.id, partitionInfo.columnName ); + LogicalColumn logicalColumn = catalog.getColumn( partitionInfo.table.id, partitionInfo.columnName ); PartitionType actualPartitionType = PartitionType.getByName( partitionInfo.typeName ); @@ -2469,7 +2469,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Check if specified partitionColumn is even part of the table if ( log.isDebugEnabled() ) { - log.debug( "Creating partition group for table: {} with id {} on schema: {} on column: {}", partitionInfo.table.name, partitionInfo.table.id, partitionInfo.table.getNamespaceName(), catalogColumn.id ); + log.debug( "Creating partition group for table: {} with id {} on schema: {} on column: {}", partitionInfo.table.name, partitionInfo.table.id, partitionInfo.table.getNamespaceName(), logicalColumn.id ); } LogicalTable unPartitionedTable = catalog.getTable( partitionInfo.table.id ); @@ -2479,8 +2479,8 @@ public void addPartitioning( PartitionInformation partitionInfo, List PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( actualPartitionType ); // Check whether partition function supports type of partition column - if ( !partitionManager.supportsColumnOfType( catalogColumn.type ) ) { - throw new RuntimeException( "The partition function " + actualPartitionType + " does not support columns of type " + catalogColumn.type ); + if ( !partitionManager.supportsColumnOfType( logicalColumn.type ) ) { + throw new RuntimeException( "The partition function " + actualPartitionType + " does not support columns of type " + logicalColumn.type ); } int numberOfPartitionGroups = partitionInfo.numberOfPartitionGroups; @@ -2500,7 +2500,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Validate partition setup - if ( !partitionManager.validatePartitionGroupSetup( partitionInfo.qualifiers, numberOfPartitionGroups, partitionInfo.partitionGroupNames, catalogColumn ) ) { + if ( !partitionManager.validatePartitionGroupSetup( partitionInfo.qualifiers, numberOfPartitionGroups, partitionInfo.partitionGroupNames, logicalColumn ) ) { throw new RuntimeException( "Partitioning failed for table: " + partitionInfo.table.name ); } @@ -2615,7 +2615,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List .partitionType( actualPartitionType ) .isPartitioned( true ) .internalPartitionFunction( PartitionType.valueOf( ((RawTemperaturePartitionInformation) partitionInfo.rawPartitionInformation).getInternalPartitionFunction().toString().toUpperCase() ) ) - .partitionColumnId( catalogColumn.id ) + .partitionColumnId( logicalColumn.id ) .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) .partitionIds( ImmutableList.copyOf( partitionIds ) ) .partitionCostIndication( PartitionCostIndication.valueOf( ((RawTemperaturePartitionInformation) partitionInfo.rawPartitionInformation).getAccessPattern().toString().toUpperCase() ) ) @@ -2632,7 +2632,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List partitionProperty = PartitionProperty.builder() .partitionType( actualPartitionType ) .isPartitioned( true ) - .partitionColumnId( catalogColumn.id ) + .partitionColumnId( logicalColumn.id ) .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) .partitionIds( ImmutableList.copyOf( partitionIds ) ) .reliesOnPeriodicChecks( false ) @@ -2640,13 +2640,13 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Update catalog table - catalog.partitionTable( partitionInfo.table.id, actualPartitionType, catalogColumn.id, numberOfPartitionGroups, partitionGroupIds, partitionProperty ); + catalog.partitionTable( partitionInfo.table.id, actualPartitionType, logicalColumn.id, numberOfPartitionGroups, partitionGroupIds, partitionProperty ); // Get primary key of table and use PK to find all DataPlacements of table long pkid = partitionInfo.table.primaryKey; List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient - CatalogColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) boolean fillStores = false; @@ -2687,7 +2687,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Copy data from unpartitioned to partitioned // Get only columns that are actually on that store // Every store of a newly partitioned table, initially will hold all partitions - List necessaryColumns = new LinkedList<>(); + List necessaryColumns = new LinkedList<>(); catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( catalog.getColumn( cp.columnId ) ) ); // Copy data from the old partition to new partitions @@ -2764,7 +2764,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme long pkid = partitionedTable.primaryKey; List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient - CatalogColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) List catalogColumnPlacements = catalog.getColumnPlacement( pkColumn.id ); @@ -2795,7 +2795,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme store.createPhysicalTable( statement.getPrepareContext(), mergedTable, null ); // Get only columns that are actually on that store - List necessaryColumns = new LinkedList<>(); + List necessaryColumns = new LinkedList<>(); catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( catalog.getColumn( cp.columnId ) ) ); // TODO @HENNLO Check if this can be omitted @@ -2900,8 +2900,8 @@ private void addColumn( String columnName, ColumnTypeInformation typeInformation public void addConstraint( String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - CatalogColumn catalogColumn = catalog.getColumn( tableId, columnName ); - columnIds.add( catalogColumn.id ); + LogicalColumn logicalColumn = catalog.getColumn( tableId, columnName ); + columnIds.add( logicalColumn.id ); } if ( constraintType == ConstraintType.PRIMARY ) { catalog.addPrimaryKey( tableId, columnIds ); @@ -2919,7 +2919,7 @@ public void dropSchema( long databaseId, String schemaName, boolean ifExists, St try { schemaName = schemaName.toLowerCase(); // Check if there is a schema with this name - if ( catalog.checkIfExistsSchema( databaseId, schemaName ) ) { + if ( catalog.checkIfExistsSchema( schemaName ) ) { CatalogSchema catalogSchema = catalog.getSchema( databaseId, schemaName ); // Drop all collections in this namespace @@ -3135,7 +3135,7 @@ private void prepareMonitoring( Statement statement, Kind kind, LogicalTable cat } - private void prepareMonitoring( Statement statement, Kind kind, LogicalTable catalogTable, CatalogColumn catalogColumn ) { + private void prepareMonitoring( Statement statement, Kind kind, LogicalTable catalogTable, LogicalColumn logicalColumn ) { // Initialize Monitoring if ( statement.getMonitoringEvent() == null ) { StatementEvent event = new DdlEvent(); @@ -3143,7 +3143,7 @@ private void prepareMonitoring( Statement statement, Kind kind, LogicalTable cat event.setTableId( catalogTable.id ); event.setSchemaId( catalogTable.namespaceId ); if ( kind == Kind.DROP_COLUMN ) { - event.setColumnId( catalogColumn.id ); + event.setColumnId( logicalColumn.id ); } statement.setMonitoringEvent( event ); } diff --git a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java index 8db18411bf..457de00f4c 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java @@ -23,9 +23,9 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -93,7 +93,7 @@ public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, - CatalogColumn partitionColumn ) { + LogicalColumn partitionColumn ) { if ( numPartitionGroups == 0 && partitionGroupNames.size() < 2 ) { throw new RuntimeException( "Partitioning of table failed! Can't partition table with less than 2 partitions/names" ); diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 14901c9f2c..66af0b72fd 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -31,17 +31,17 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.DataPlacementRole; -import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.monitoring.core.MonitoringServiceProvider; import org.polypheny.db.monitoring.events.metrics.DmlDataPoint; @@ -304,15 +304,15 @@ private void redistributePartitions( LogicalTable table, List partitionsFr DataPlacementRole.UPTODATE ); } - store.createPhysicalTable( statement.getPrepareContext(), table, , hotPartitionsToCreate ); + store.createPhysicalTable( statement.getPrepareContext(), table, null ); - List catalogColumns = new ArrayList<>(); - catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> catalogColumns.add( catalog.getColumn( cp.columnId ) ) ); + List logicalColumns = new ArrayList<>(); + catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getColumn( cp.columnId ) ) ); dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), - catalogColumns, + logicalColumns, hotPartitionsToCreate ); if ( !partitionsToRemoveFromStore.containsKey( store ) ) { @@ -352,12 +352,12 @@ private void redistributePartitions( LogicalTable table, List partitionsFr null, null, DataPlacementRole.UPTODATE ); } - store.createPhysicalTable( statement.getPrepareContext(), table, , coldPartitionsToCreate ); + store.createPhysicalTable( statement.getPrepareContext(), table, null ); - List catalogColumns = new ArrayList<>(); - catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> catalogColumns.add( catalog.getColumn( cp.columnId ) ) ); + List logicalColumns = new ArrayList<>(); + catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getColumn( cp.columnId ) ) ); - dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), catalogColumns, coldPartitionsToCreate ); + dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), logicalColumns, coldPartitionsToCreate ); if ( !partitionsToRemoveFromStore.containsKey( store ) ) { partitionsToRemoveFromStore.put( store, partitionsFromColdToHot ); diff --git a/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java index b5c600b8e4..d52eb99379 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java @@ -20,7 +20,7 @@ import java.util.Arrays; import java.util.List; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; @@ -52,7 +52,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue @Override - public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, CatalogColumn partitionColumn ) { + public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, LogicalColumn partitionColumn ) { super.validatePartitionGroupSetup( partitionGroupQualifiers, numPartitionGroups, partitionGroupNames, partitionColumn ); if ( !partitionGroupQualifiers.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java index 222e05a3f6..67630ac829 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java @@ -22,8 +22,8 @@ import java.util.List; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; @@ -76,7 +76,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue @Override - public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, CatalogColumn partitionColumn ) { + public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, LogicalColumn partitionColumn ) { super.validatePartitionGroupSetup( partitionGroupQualifiers, numPartitionGroups, partitionGroupNames, partitionColumn ); if ( partitionColumn.type.getFamily() == PolyTypeFamily.NUMERIC ) { diff --git a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java index b40ad64a31..89b5e96231 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java @@ -24,8 +24,8 @@ import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; @@ -76,7 +76,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue @Override - public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, CatalogColumn partitionColumn ) { + public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, LogicalColumn partitionColumn ) { super.validatePartitionGroupSetup( partitionGroupQualifiers, numPartitionGroups, partitionGroupNames, partitionColumn ); if ( partitionColumn.type.getFamily() != PolyTypeFamily.NUMERIC ) { diff --git a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java index e10556f704..ee2c47aa87 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java @@ -21,8 +21,8 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; @@ -92,7 +92,7 @@ public int getNumberOfPartitionsPerGroup( int numberOfPartitions ) { @Override - public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, CatalogColumn partitionColumn ) { + public boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, LogicalColumn partitionColumn ) { super.validatePartitionGroupSetup( partitionGroupQualifiers, numPartitionGroups, partitionGroupNames, partitionColumn ); return true; diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index a3409ef187..83ea071f9b 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -51,11 +51,11 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -486,7 +486,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme for ( int i = 0; i < foreignKey.columnIds.size(); ++i ) { final String columnName = foreignKey.getColumnNames().get( i ); final String foreignColumnName = foreignKey.getReferencedKeyColumnNames().get( i ); - final CatalogColumn foreignColumn; + final LogicalColumn foreignColumn; try { foreignColumn = Catalog.getInstance().getColumn( foreignTable.id, foreignColumnName ); } catch ( UnknownColumnException e ) { @@ -561,7 +561,7 @@ public RexNode visitFieldAccess( RexFieldAccess fieldAccess ) { for ( int i = 0; i < foreignKey.columnIds.size(); ++i ) { final String columnName = foreignKey.getReferencedKeyColumnNames().get( i ); final String foreignColumnName = foreignKey.getColumnNames().get( i ); - final CatalogColumn column, foreignColumn; + final LogicalColumn column, foreignColumn; try { column = Catalog.getInstance().getColumn( table.id, columnName ); foreignColumn = Catalog.getInstance().getColumn( foreignTable.id, foreignColumnName ); @@ -654,7 +654,7 @@ private boolean testConstraintsValid() { try { List tables = Catalog .getInstance() - .getTables( null, null, null ) + .getTables( null, null ) .stream() .filter( t -> t.entityType == EntityType.ENTITY && t.getNamespaceType() == NamespaceType.RELATIONAL ) .collect( Collectors.toList() ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 0951eb635e..f7103e818f 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -49,9 +49,9 @@ import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; @@ -163,23 +163,23 @@ private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGra @Override - public void copyData( Transaction transaction, CatalogAdapter store, List columns, List partitionIds ) { + public void copyData( Transaction transaction, CatalogAdapter store, List columns, List partitionIds ) { LogicalTable table = Catalog.getInstance().getTable( columns.get( 0 ).tableId ); CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( table.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); - for ( CatalogColumn catalogColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, catalogColumn.id ) ); + for ( LogicalColumn logicalColumn : columns ) { + targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, logicalColumn.id ) ); } - List selectColumnList = new LinkedList<>( columns ); + List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( cid ); - if ( !selectColumnList.contains( catalogColumn ) ) { - selectColumnList.add( catalogColumn ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( cid ); + if ( !selectColumnList.contains( logicalColumn ) ) { + selectColumnList.add( logicalColumn ); } } @@ -218,7 +218,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List selectColumnList, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { + public void executeQuery( List selectColumnList, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { try { PolyImplementation result; if ( isMaterializedView ) { @@ -241,20 +241,20 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl Iterator sourceIterator = enumerable.iterator(); Map resultColMapping = new HashMap<>(); - for ( CatalogColumn catalogColumn : selectColumnList ) { + for ( LogicalColumn logicalColumn : selectColumnList ) { int i = 0; for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { - if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { - resultColMapping.put( catalogColumn.id, i ); + if ( metaData.getName().equalsIgnoreCase( logicalColumn.name ) ) { + resultColMapping.put( logicalColumn.id, i ); } i++; } } if ( isMaterializedView ) { - for ( CatalogColumn catalogColumn : selectColumnList ) { - if ( !resultColMapping.containsKey( catalogColumn.id ) ) { + for ( LogicalColumn logicalColumn : selectColumnList ) { + if ( !resultColMapping.containsKey( logicalColumn.id ) ) { int i = resultColMapping.values().stream().mapToInt( v -> v ).max().orElseThrow( NoSuchElementException::new ); - resultColMapping.put( catalogColumn.id, i + 1 ); + resultColMapping.put( logicalColumn.id, i + 1 ); } } } @@ -332,9 +332,9 @@ public AlgRoot buildDeleteStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : to ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); - values.add( new RexDynamicParam( catalogColumn.getAlgDataType( typeFactory ), (int) catalogColumn.id ) ); + values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } AlgBuilder builder = AlgBuilder.create( statement, cluster ); builder.push( LogicalValues.createOneRow( cluster ) ); @@ -377,9 +377,9 @@ public AlgRoot buildInsertStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : placements ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); - values.add( new RexDynamicParam( catalogColumn.getAlgDataType( typeFactory ), (int) catalogColumn.id ) ); + values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } AlgBuilder builder = AlgBuilder.create( statement, cluster ); builder.push( LogicalValues.createOneRow( cluster ) ); @@ -422,10 +422,10 @@ private AlgRoot buildUpdateStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : to ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); - values.add( new RexDynamicParam( catalogColumn.getAlgDataType( typeFactory ), (int) catalogColumn.id ) ); + values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } builder.projectPlus( values ); @@ -477,7 +477,7 @@ public AlgRoot getSourceIterator( Statement statement, Map selectSourcePlacements( LogicalTable table, List columns, int excludingAdapterId ) { + public static List selectSourcePlacements( LogicalTable table, List columns, int excludingAdapterId ) { // Find the adapter with the most column placements Catalog catalog = Catalog.getInstance(); int adapterIdWithMostPlacements = -1; @@ -490,8 +490,8 @@ public static List selectSourcePlacements( LogicalTable } List columnIds = new LinkedList<>(); - for ( CatalogColumn catalogColumn : columns ) { - columnIds.add( catalogColumn.id ); + for ( LogicalColumn logicalColumn : columns ) { + columnIds.add( logicalColumn.id ); } // Take the adapter with most placements as base and add missing column placements @@ -528,22 +528,22 @@ public static List selectSourcePlacements( LogicalTable * @param targetPartitionIds Target Partitions where data should be inserted */ @Override - public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { + public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getPrimaryKey( sourceTable.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); - for ( CatalogColumn catalogColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, catalogColumn.id ) ); + for ( LogicalColumn logicalColumn : columns ) { + targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, logicalColumn.id ) ); } - List selectColumnList = new LinkedList<>( columns ); + List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : sourcePrimaryKey.columnIds ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( cid ); - if ( !selectColumnList.contains( catalogColumn ) ) { - selectColumnList.add( catalogColumn ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( cid ); + if ( !selectColumnList.contains( logicalColumn ) ) { + selectColumnList.add( logicalColumn ); } } @@ -568,11 +568,11 @@ public void copySelectiveData( Transaction transaction, CatalogAdapter store, Lo Iterator sourceIterator = enumerable.iterator(); Map resultColMapping = new HashMap<>(); - for ( CatalogColumn catalogColumn : selectColumnList ) { + for ( LogicalColumn logicalColumn : selectColumnList ) { int i = 0; for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { - if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { - resultColMapping.put( catalogColumn.id, i ); + if ( metaData.getName().equalsIgnoreCase( logicalColumn.name ) ) { + resultColMapping.put( logicalColumn.id, i ); } i++; } @@ -626,7 +626,7 @@ public void copySelectiveData( Transaction transaction, CatalogAdapter store, Lo * @param targetPartitionIds Target Partitions where data should be inserted */ @Override - public void copyPartitionData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, List sourcePartitionIds, List targetPartitionIds ) { + public void copyPartitionData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, List sourcePartitionIds, List targetPartitionIds ) { if ( sourceTable.id != targetTable.id ) { throw new RuntimeException( "Unsupported migration scenario. Table ID mismatch" ); } @@ -635,23 +635,23 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo // Check Lists List targetColumnPlacements = new LinkedList<>(); - for ( CatalogColumn catalogColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, catalogColumn.id ) ); + for ( LogicalColumn logicalColumn : columns ) { + targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, logicalColumn.id ) ); } - List selectColumnList = new LinkedList<>( columns ); + List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( cid ); - if ( !selectColumnList.contains( catalogColumn ) ) { - selectColumnList.add( catalogColumn ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( cid ); + if ( !selectColumnList.contains( logicalColumn ) ) { + selectColumnList.add( logicalColumn ); } } // Add partition columns to select column list long partitionColumnId = targetTable.partitionProperty.partitionColumnId; - CatalogColumn partitionColumn = Catalog.getInstance().getColumn( partitionColumnId ); + LogicalColumn partitionColumn = Catalog.getInstance().getColumn( partitionColumnId ); if ( !selectColumnList.contains( partitionColumn ) ) { selectColumnList.add( partitionColumn ); } @@ -691,11 +691,11 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo Iterator sourceIterator = (Iterator) enumerable.iterator(); Map resultColMapping = new HashMap<>(); - for ( CatalogColumn catalogColumn : selectColumnList ) { + for ( LogicalColumn logicalColumn : selectColumnList ) { int i = 0; for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { - if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { - resultColMapping.put( catalogColumn.id, i ); + if ( metaData.getName().equalsIgnoreCase( logicalColumn.name ) ) { + resultColMapping.put( logicalColumn.id, i ); } i++; } diff --git a/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java b/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java index 13b14f51c3..061e13e759 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java +++ b/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java @@ -420,7 +420,6 @@ public AlgNode visit( LogicalDocumentModify initial ) { return new LogicalDocumentModify( modify.getTraitSet(), modify.getEntity(), - modify.getCatalogReader(), input, modify.operation, modify.getKeys(), diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index 5fb864d657..60229a88a7 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -23,11 +23,11 @@ import org.polypheny.db.algebra.constant.ExplainFormat; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; @@ -97,10 +97,10 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P v.forEach( p -> { CatalogColumnPlacement catalogColumnPlacement = Catalog.getInstance().getColumnPlacement( p.left, p.right ); CatalogPartitionPlacement catalogPartitionPlacement = Catalog.getInstance().getPartitionPlacement( p.left, k ); - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( catalogColumnPlacement.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( catalogColumnPlacement.columnId ); table.addRow( catalogTable.getNamespaceName() + "." + catalogTable.name, - catalogColumn.name, + logicalColumn.name, catalogPartitionGroup.partitionGroupName + " --> " + catalogPartition.id, catalogPartitionPlacement.adapterUniqueName, catalogColumnPlacement.physicalSchemaName + "." + catalogPartitionPlacement.physicalTableName + "." + catalogColumnPlacement.physicalColumnName ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 7ac61b488d..35ddd42164 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -57,7 +57,6 @@ import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogGraphMapping; @@ -65,6 +64,7 @@ import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; @@ -307,14 +307,14 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< // Get primary key long pkid = catalog.getTable( currentPlacements.get( 0 ).tableId ).primaryKey; List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; - List pkColumns = new LinkedList<>(); + List pkColumns = new LinkedList<>(); for ( long pkColumnId : pkColumnIds ) { pkColumns.add( catalog.getColumn( pkColumnId ) ); } // Add primary key for ( Entry> entry : placementsByAdapter.entrySet() ) { - for ( CatalogColumn pkColumn : pkColumns ) { + for ( LogicalColumn pkColumn : pkColumns ) { CatalogColumnPlacement pkPlacement = catalog.getColumnPlacement( entry.getKey(), pkColumn.id ); if ( !entry.getValue().contains( pkPlacement ) ) { entry.getValue().add( pkPlacement ); @@ -400,12 +400,12 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< private void buildFinalProject( RoutedAlgBuilder builder, List currentPlacements ) { List rexNodes = new ArrayList<>(); - List placementList = currentPlacements.stream() + List placementList = currentPlacements.stream() .map( col -> catalog.getColumn( col.columnId ) ) .sorted( Comparator.comparingInt( col -> col.position ) ) .collect( Collectors.toList() ); - for ( CatalogColumn catalogColumn : placementList ) { - rexNodes.add( builder.field( catalogColumn.name ) ); + for ( LogicalColumn logicalColumn : placementList ) { + rexNodes.add( builder.field( logicalColumn.name ) ); } builder.project( rexNodes ); } @@ -459,7 +459,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Integer placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = catalog.getTables( Catalog.defaultDatabaseId, new Pattern( namespace.name ), null ); + List tables = catalog.getTables( new Pattern( namespace.name ), null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, selectPlacement( t ) ) ) ) .collect( Collectors.toList() ); @@ -597,7 +597,7 @@ private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statemen @NotNull private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer adapterId, Statement statement, RoutedAlgBuilder builder ) { - List columns = catalog.getColumns( node.entity.id ); + List columns = catalog.getColumns( node.entity.id ); AlgTraitSet out = node.getTraitSet().replace( ModelTrait.RELATIONAL ); CatalogEntity subTable = getSubstitutionTable( statement, node.entity.id, columns.get( 0 ).id, adapterId ); builder.scan( subTable ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index fafa3d8794..7b92fe7c4c 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -82,13 +82,13 @@ import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; @@ -101,8 +101,6 @@ import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.processing.WhereClauseVisitor; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -154,7 +152,7 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { long pkid = catalogTable.primaryKey; List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; - CatalogColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); // Essentially gets a list of all stores where this table resides List pkPlacements = catalog.getColumnPlacement( pkColumn.id ); @@ -203,8 +201,8 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { String columnName = updateColumnListIterator.next(); sourceExpressionListIterator.next(); try { - CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); - if ( !catalog.checkIfExistsColumnPlacement( pkPlacement.adapterId, catalogColumn.id ) ) { + LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + if ( !catalog.checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { updateColumnListIterator.remove(); sourceExpressionListIterator.remove(); } @@ -1341,7 +1339,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical long pkid = fromTable.primaryKey; List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; - CatalogColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); List pkPlacements = catalog.getColumnPlacement( pkColumn.id ); List nodes = new ArrayList<>(); @@ -1377,7 +1375,7 @@ private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, L if ( operand instanceof RexInputRef ) { int index = ((RexInputRef) operand).getIndex(); AlgDataTypeField field = node.getInput().getRowType().getFieldList().get( index ); - CatalogColumn column; + LogicalColumn column; try { String columnName; String[] columnNames = field.getName().split( "\\." ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java index 4f04b7089a..e5e9911d74 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java @@ -21,7 +21,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -31,7 +31,7 @@ public class CreateAllPlacementStrategy implements CreatePlacementStrategy { @Override - public List getDataStoresForNewColumn( CatalogColumn addedColumn ) { + public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { LogicalTable catalogTable = Catalog.getInstance().getTable( addedColumn.tableId ); return catalogTable.dataPlacements.stream() .map( elem -> AdapterManager.getInstance().getStore( elem ) ) diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreatePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreatePlacementStrategy.java index 819f575a80..1969633648 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreatePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreatePlacementStrategy.java @@ -18,7 +18,7 @@ import java.util.List; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; /** @@ -26,7 +26,7 @@ */ public interface CreatePlacementStrategy { - List getDataStoresForNewColumn( CatalogColumn addedColumn ); + List getDataStoresForNewColumn( LogicalColumn addedColumn ); List getDataStoresForNewTable(); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index a7c5ff21d7..efc70f25c4 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -22,14 +22,14 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; public class CreateSinglePlacementStrategy implements CreatePlacementStrategy { @Override - public List getDataStoresForNewColumn( CatalogColumn addedColumn ) { + public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { LogicalTable catalogTable = Catalog.getInstance().getTable( addedColumn.tableId ); return ImmutableList.of( AdapterManager.getInstance().getStore( catalogTable.dataPlacements.get( 0 ) ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index b6ef4847a3..12af517d90 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -41,11 +41,11 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -178,13 +178,15 @@ public synchronized void addMaterializedInfo( Long materializedId, MaterializedC public void addTables( Transaction transaction, List tableNames ) { if ( tableNames.size() > 1 ) { try { - LogicalTable catalogTable = Catalog.getInstance().getTable( 1, tableNames.get( 0 ), tableNames.get( 1 ) ); + LogicalTable catalogTable = Catalog.getInstance().getTable( tableNames.get( 0 ), tableNames.get( 1 ) ); long id = catalogTable.id; if ( !catalogTable.getConnectedViews().isEmpty() ) { updateCandidates.put( transaction.getXid(), id ); } } catch ( UnknownTableException e ) { throw new RuntimeException( "Not possible to getLogicalTable to update which Tables were changed.", e ); + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } } } @@ -306,7 +308,7 @@ public void prepareToUpdate( Long materializedId ) { * Is used if a materialized view is created in order to add the data from the underlying tables to the materialized view */ @Override - public void addData( Transaction transaction, List stores, Map> columns, AlgRoot algRoot, CatalogMaterializedView materializedView ) { + public void addData( Transaction transaction, List stores, Map> columns, AlgRoot algRoot, CatalogMaterializedView materializedView ) { addMaterializedInfo( materializedView.id, materializedView.getMaterializedCriteria() ); List columnPlacements = new LinkedList<>(); @@ -340,21 +342,21 @@ public void updateData( Transaction transaction, Long materializedId ) { DataMigrator dataMigrator = transaction.getDataMigrator(); List columnPlacements = new LinkedList<>(); - Map> columns = new HashMap<>(); + Map> columns = new HashMap<>(); List ids = new ArrayList<>(); if ( catalog.checkIfExistsEntity( materializedId ) && materializedInfo.containsKey( materializedId ) ) { CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalog.getTable( materializedId ); for ( int id : catalogMaterializedView.dataPlacements ) { ids.add( id ); - List catalogColumns = new ArrayList<>(); + List logicalColumns = new ArrayList<>(); int localAdapterIndex = catalogMaterializedView.dataPlacements.indexOf( id ); catalog.getDataPlacement( catalogMaterializedView.dataPlacements.get( localAdapterIndex ), catalogMaterializedView.id ) .columnPlacementsOnAdapter.forEach( col -> - catalogColumns.add( catalog.getColumn( col ) ) + logicalColumns.add( catalog.getColumn( col ) ) ); - columns.put( id, catalogColumns ); + columns.put( id, logicalColumns ); } AlgRoot targetRel; diff --git a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java index 38841f10d9..2a020d15a0 100644 --- a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java @@ -26,8 +26,8 @@ import org.polypheny.db.AdapterTestSuite; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.excluded.CassandraExcluded; import org.polypheny.db.webui.models.Result; @@ -43,17 +43,17 @@ public void addCollectionTest() { execute( "CREATE DATABASE " + graphName ); - LogicalGraph graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + LogicalGraph graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); - assertEquals( 1, catalog.getGraphs( graph.databaseId, new Pattern( graphName ) ).size() ); + assertEquals( 1, catalog.getGraphs( new Pattern( graphName ) ).size() ); execute( "DROP DATABASE " + graphName ); - assertEquals( 0, catalog.getGraphs( graph.databaseId, new Pattern( graphName ) ).size() ); + assertEquals( 0, catalog.getGraphs( new Pattern( graphName ) ).size() ); execute( "CREATE DATABASE " + graphName ); - assertEquals( 1, catalog.getGraphs( graph.databaseId, new Pattern( graphName ) ).size() ); + assertEquals( 1, catalog.getGraphs( new Pattern( graphName ) ).size() ); execute( "DROP DATABASE " + graphName ); } @@ -65,7 +65,7 @@ public void addPlacementTest() throws SQLException { try { execute( "CREATE DATABASE " + graphName ); - LogicalGraph graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + LogicalGraph graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); assertEquals( 1, graph.placements.size() ); @@ -73,7 +73,7 @@ public void addPlacementTest() throws SQLException { execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); - graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); assertEquals( 2, graph.placements.size() ); @@ -95,13 +95,13 @@ public void initialPlacementTest() throws SQLException { execute( String.format( "CREATE DATABASE %s ON STORE %s", graphName, "store1" ) ); - LogicalGraph graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + LogicalGraph graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); assertEquals( 1, graph.placements.size() ); execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "hsqldb" ), graphName ); - graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); assertEquals( 2, graph.placements.size() ); @@ -122,7 +122,7 @@ public void deletePlacementTest() throws SQLException { execute( "CREATE DATABASE " + graphName ); - LogicalGraph graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + LogicalGraph graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); assertEquals( 1, graph.placements.size() ); @@ -130,7 +130,7 @@ public void deletePlacementTest() throws SQLException { execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); - graph = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ).get( 0 ); + graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); assertEquals( 2, graph.placements.size() ); diff --git a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java index 0e7f5a9d70..d8fc88ca51 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java @@ -34,12 +34,12 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.config.Config; import org.polypheny.db.config.ConfigManager; import org.polypheny.db.excluded.CassandraExcluded; @@ -532,7 +532,7 @@ public void rangePartitioningTest() throws SQLException { + "( PARTITION parta VALUES(5,4), " + "PARTITION partb VALUES(10,6))" ); - LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "rangepartitioning3" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "rangepartitioning3" ) ).get( 0 ); List catalogPartitions = Catalog.getInstance().getPartitionsByTable( table.id ); @@ -642,7 +642,7 @@ public void partitionPlacementTest() throws SQLException { + "WITH (foo, bar, foobar, barfoo) " ); try { - LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "physicalpartitiontest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "physicalpartitiontest" ) ).get( 0 ); // Check if sufficient PartitionPlacements have been created // Check if initially as many partitionPlacements are created as requested @@ -703,7 +703,7 @@ public void temperaturePartitionTest() throws SQLException { + " USING FREQUENCY write INTERVAL 10 minutes WITH 20 HASH PARTITIONS" ); try { - LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "temperaturetest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "temperaturetest" ) ).get( 0 ); // Check if partition properties are correctly set and parsed Assert.assertEquals( 600, ((TemperaturePartitionProperty) table.partitionProperty).getFrequencyInterval() ); @@ -761,7 +761,7 @@ public void temperaturePartitionTest() throws SQLException { // This should execute two DML INSERTS on the target PartitionId and therefore redistribute the data // Verify that the partition is now in HOT and was not before - LogicalTable updatedTable = Catalog.getInstance().getTables( null, null, new Pattern( "temperaturetest" ) ).get( 0 ); + LogicalTable updatedTable = Catalog.getInstance().getTables( null, new Pattern( "temperaturetest" ) ).get( 0 ); // Manually get the target partitionID of query PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); @@ -1176,7 +1176,7 @@ public void dataPlacementTest() throws SQLException { + "WITH (foo, bar, foobar, barfoo) " ); try { - LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "horizontaldataplacementtest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "horizontaldataplacementtest" ) ).get( 0 ); // Check if sufficient PartitionPlacements have been created // Check if initially as many DataPlacements are created as requested diff --git a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java index d63185b2ae..d02a1527a2 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java @@ -31,9 +31,9 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.excluded.CassandraExcluded; @@ -167,7 +167,7 @@ public void dataPlacementTest() throws SQLException { + "PRIMARY KEY (tprimary) )" ); try { - LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); // Check if initially as many DataPlacements are created as requested (one for each store) Assert.assertEquals( 1, table.dataPlacements.size() ); @@ -310,7 +310,7 @@ public void dataDistributionTest() throws SQLException { + "PRIMARY KEY (tprimary) )" ); try { - LogicalTable table = Catalog.getInstance().getTables( null, null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); + LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); CatalogDataPlacement dataPlacement = Catalog.getInstance().getDataPlacement( table.dataPlacements.get( 0 ), table.id ); diff --git a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java index 0c3b4a9e59..ca4b188e48 100644 --- a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java +++ b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java @@ -31,11 +31,11 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.Pattern; @SuppressWarnings({ "SqlDialectInspection", "SqlNoDataSourceInspection" }) @@ -254,8 +254,8 @@ public void testSimpleRowCount() throws SQLException { ); waiter.await( 20, TimeUnit.SECONDS ); try { - LogicalTable catalogTableNation = Catalog.getInstance().getTable( "APP", "statisticschema", "nation" ); - LogicalTable catalogTableRegion = Catalog.getInstance().getTable( "APP", "statisticschema", "region" ); + LogicalTable catalogTableNation = Catalog.getInstance().getTable( "statisticschema", "nation" ); + LogicalTable catalogTableRegion = Catalog.getInstance().getTable( "statisticschema", "region" ); Integer rowCountNation = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); Integer rowCountRegion = StatisticsManager.getInstance().rowCountPerTable( catalogTableRegion.id ); @@ -308,13 +308,13 @@ private void assertStatisticsConvertTo( int maxSeconds, int target ) { boolean inCatalog = true; while ( !successfull && count < maxSeconds ) { waiter.await( 1, TimeUnit.SECONDS ); - if ( Catalog.getInstance().getTables( new Pattern( "APP" ), new Pattern( "statisticschema" ), new Pattern( "nationdelete" ) ).size() != 1 ) { + if ( Catalog.getInstance().getTables( new Pattern( "statisticschema" ), new Pattern( "nationdelete" ) ).size() != 1 ) { count++; inCatalog = false; continue; } inCatalog = true; - LogicalTable catalogTableNation = Catalog.getInstance().getTable( "APP", "statisticschema", "nationdelete" ); + LogicalTable catalogTableNation = Catalog.getInstance().getTable( "statisticschema", "nationdelete" ); Integer rowCount = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); // potentially table exists not yet in statistics but in catalog if ( rowCount != null && rowCount == target ) { diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java index ae56932a4a..93ef793076 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java @@ -21,8 +21,8 @@ import lombok.Getter; import lombok.Setter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.monitoring.core.MonitoringServiceProvider; import org.polypheny.db.monitoring.events.metrics.DmlDataPoint; import org.polypheny.db.monitoring.events.metrics.QueryDataPointImpl; @@ -83,7 +83,7 @@ public void updatePolyphenyStatistic() { catalog.getAdapters().forEach( v -> { this.availableAdapter.put( v.uniqueName, Pair.of( v.getAdapterTypeName(), v.type ) ); } ); - catalog.getSchemas( null, null ).forEach( v -> { + catalog.getSchemas( null ).forEach( v -> { availableSchemas.put( v.id, Pair.of( v.name, v.namespaceType ) ); } ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java index acf3d71997..c08d602c0b 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java @@ -19,8 +19,8 @@ import lombok.Data; import lombok.Getter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; /** @@ -32,16 +32,16 @@ class QueryResult { @Getter private final CatalogEntity entity; @Getter - private final CatalogColumn column; + private final LogicalColumn column; - QueryResult( CatalogEntity entity, CatalogColumn column ) { + QueryResult( CatalogEntity entity, LogicalColumn column ) { this.entity = entity; this.column = column; } - public static QueryResult fromCatalogColumn( CatalogColumn column ) { + public static QueryResult fromCatalogColumn( LogicalColumn column ) { return new QueryResult( Catalog.getInstance().getTable( column.tableId ), column ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 8bfe878967..2721c540a3 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -27,8 +27,8 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -96,9 +96,9 @@ public List> getSchemaTree() { List childTables = catalog.getTables( schema.id, null ); for ( LogicalTable childTable : childTables ) { List table = new ArrayList<>(); - List childColumns = catalog.getColumns( childTable.id ); - for ( CatalogColumn catalogColumn : childColumns ) { - table.add( schema.name + "." + childTable.name + "." + catalogColumn.name ); + List childColumns = catalog.getColumns( childTable.id ); + for ( LogicalColumn logicalColumn : childColumns ) { + table.add( schema.name + "." + childTable.name + "." + logicalColumn.name ); } if ( childTable.entityType == EntityType.ENTITY ) { tables.addAll( table ); @@ -118,8 +118,7 @@ public List> getSchemaTree() { */ public List getAllColumns() { Catalog catalog = Catalog.getInstance(); - List catalogColumns = catalog.getColumns( - null, + List logicalColumns = catalog.getColumns( null, null, null ) @@ -128,9 +127,9 @@ public List getAllColumns() { .collect( Collectors.toList() ); List allColumns = new ArrayList<>(); - for ( CatalogColumn catalogColumn : catalogColumns ) { - if ( catalog.getTable( catalogColumn.tableId ).entityType != EntityType.VIEW ) { - allColumns.add( QueryResult.fromCatalogColumn( catalogColumn ) ); + for ( LogicalColumn logicalColumn : logicalColumns ) { + if ( catalog.getTable( logicalColumn.tableId ).entityType != EntityType.VIEW ) { + allColumns.add( QueryResult.fromCatalogColumn( logicalColumn ) ); } } return allColumns; @@ -145,7 +144,6 @@ public List getAllColumns() { public List getAllTable() { Catalog catalog = Catalog.getInstance(); List catalogEntities = catalog.getTables( - null, null, null ); List allTables = new ArrayList<>(); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 07fa2dd472..2cacdf36f5 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -54,8 +54,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -73,8 +73,6 @@ import org.polypheny.db.information.InformationTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.schema.impl.AbstractEntity; @@ -147,13 +145,13 @@ public void initializeStatisticSettings() { @Override - public void updateColumnName( CatalogColumn catalogColumn, String newName ) { - if ( statisticSchemaMap.containsKey( catalogColumn.schemaId ) - && statisticSchemaMap.get( catalogColumn.schemaId ).containsKey( catalogColumn.tableId ) - && statisticSchemaMap.get( catalogColumn.schemaId ).get( catalogColumn.tableId ).containsKey( catalogColumn.id ) ) { - StatisticColumn statisticColumn = statisticSchemaMap.get( catalogColumn.schemaId ).get( catalogColumn.tableId ).get( catalogColumn.id ); + public void updateColumnName( LogicalColumn logicalColumn, String newName ) { + if ( statisticSchemaMap.containsKey( logicalColumn.schemaId ) + && statisticSchemaMap.get( logicalColumn.schemaId ).containsKey( logicalColumn.tableId ) + && statisticSchemaMap.get( logicalColumn.schemaId ).get( logicalColumn.tableId ).containsKey( logicalColumn.id ) ) { + StatisticColumn statisticColumn = statisticSchemaMap.get( logicalColumn.schemaId ).get( logicalColumn.tableId ).get( logicalColumn.id ); statisticColumn.updateColumnName( newName ); - statisticSchemaMap.get( catalogColumn.schemaId ).get( catalogColumn.tableId ).put( catalogColumn.id, statisticColumn ); + statisticSchemaMap.get( logicalColumn.schemaId ).get( logicalColumn.tableId ).put( logicalColumn.id, statisticColumn ); } } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 84921db7cd..b0345de932 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -67,12 +67,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.EntityType.PrimitiveTableType; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.CatalogColumn.PrimitiveCatalogColumn; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogDatabase.PrimitiveCatalogDatabase; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -87,11 +81,17 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey.CatalogPrimaryKeyColumn.PrimitiveCatalogPrimaryKeyColumn; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogSchema.PrimitiveCatalogSchema; +import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn.PrimitiveCatalogColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.logical.LogicalTable.PrimitiveCatalogTable; -import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.EntityType.PrimitiveTableType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.AuthenticationException; import org.polypheny.db.iface.Authenticator; @@ -270,7 +270,6 @@ public MetaResultSet getTables( final ConnectionHandle ch, final String database } final List tables = catalog.getTables( - database == null ? null : new Pattern( database ), (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ), (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ) ); @@ -305,8 +304,7 @@ public MetaResultSet getColumns( final ConnectionHandle ch, final String databas if ( log.isTraceEnabled() ) { log.trace( "getColumns( ConnectionHandle {}, String {}, Pat {}, Pat {}, Pat {} )", ch, database, schemaPattern, tablePattern, columnPattern ); } - final List columns = catalog.getColumns( - database == null ? null : new Pattern( database ), + final List columns = catalog.getColumns( (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ), (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ), (columnPattern == null || columnPattern.s == null) ? null : new Pattern( columnPattern.s ) @@ -351,7 +349,6 @@ public MetaResultSet getSchemas( final ConnectionHandle ch, final String databas log.trace( "getSchemas( ConnectionHandle {}, String {}, Pat {} )", ch, database, schemaPattern ); } final List schemas = catalog.getSchemas( - database == null ? null : new Pattern( database ), (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ) ); StatementHandle statementHandle = createStatement( ch ); @@ -517,7 +514,7 @@ public MetaResultSet getPrimaryKeys( final ConnectionHandle ch, final String dat final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); + final List catalogEntities = catalog.getTables( schemaPattern, tablePattern ); List primaryKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.primaryKey != null ) { @@ -554,7 +551,7 @@ public MetaResultSet getImportedKeys( final ConnectionHandle ch, final String da final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); + final List catalogEntities = catalog.getTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { List importedKeys = catalog.getForeignKeys( catalogTable.id ); @@ -597,7 +594,7 @@ public MetaResultSet getExportedKeys( final ConnectionHandle ch, final String da final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); + final List catalogEntities = catalog.getTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { List exportedKeys = catalog.getExportedKeys( catalogTable.id ); @@ -714,7 +711,7 @@ public MetaResultSet getIndexInfo( final ConnectionHandle ch, final String datab final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( databasePattern, schemaPattern, tablePattern ); + final List catalogEntities = catalog.getTables( schemaPattern, tablePattern ); List catalogIndexColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { List catalogIndexInfos = catalog.getIndexes( catalogTable.id, unique ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java index 246d192da4..968361b88f 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java @@ -18,7 +18,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -32,7 +32,7 @@ @Slf4j public class ColumnIndex { - public final CatalogColumn catalogColumn; + public final LogicalColumn logicalColumn; public final String fullyQualifiedName; public final String schemaName; public final String tableName; @@ -40,11 +40,11 @@ public class ColumnIndex { public ColumnIndex( - final CatalogColumn catalogColumn, + final LogicalColumn logicalColumn, final String schemaName, final String tableName, final String columnName ) { - this.catalogColumn = catalogColumn; + this.logicalColumn = logicalColumn; this.fullyQualifiedName = schemaName + "." + tableName + "." + columnName; this.schemaName = schemaName; this.tableName = tableName; @@ -56,7 +56,7 @@ public static ColumnIndex createIndex( String inDatabase, String schemaName, Str try { log.debug( "Creating ColumnIndex." ); Catalog catalog = Catalog.getInstance(); - CatalogColumn column = catalog.getColumn( inDatabase, schemaName, tableName, columnName ); + LogicalColumn column = catalog.getColumn( schemaName, tableName, columnName ); return new ColumnIndex( column, schemaName, tableName, columnName ); } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException | UnknownColumnException e ) { log.error( "Cannot find a underlying column for the specified column name: {}.{}.{}.", schemaName, tableName, columnName, e ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java index 7c0a0646df..347d351faf 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java @@ -32,7 +32,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.cql.BooleanGroup.ColumnOpsBooleanOperator; import org.polypheny.db.cql.exception.UnexpectedTypeException; @@ -199,7 +199,7 @@ private AlgBuilder generateProjections( AlgBuilder algBuilder, RexBuilder rexBui int ordinal = tableScanColumnOrdinalities.size(); RexNode inputRef = rexBuilder.makeInputRef( baseNode, ordinal ); inputRefs.add( inputRef ); - CatalogColumn column = catalog.getColumn( columnId ); + LogicalColumn column = catalog.getColumn( columnId ); columnNames.add( columnNamePrefix + column.name ); tableScanColumnOrdinalities.put( columnId, ordinal ); } @@ -310,7 +310,7 @@ private AlgBuilder generateSort( AlgBuilder algBuilder, RexBuilder rexBuilder ) AlgNode baseNode = algBuilder.peek(); for ( Pair> sortSpecification : sortSpecifications ) { ColumnIndex columnIndex = sortSpecification.left; - int ordinality = projectionColumnOrdinalities.get( columnIndex.catalogColumn.id ); + int ordinality = projectionColumnOrdinalities.get( columnIndex.logicalColumn.id ); RexNode sortingNode = rexBuilder.makeInputRef( baseNode, ordinality ); // TODO: Handle Modifiers diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Projections.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Projections.java index 08b88a849c..b0e0571576 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Projections.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Projections.java @@ -298,7 +298,7 @@ public Projection( ColumnIndex columnIndex, Map modifiers ) { public long getColumnId() { - return columnIndex.catalogColumn.id; + return columnIndex.logicalColumn.id; } diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java index 5adddc69e4..09096f59d0 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java @@ -49,7 +49,7 @@ public static TableIndex createIndex( String inDatabase, String schemaName, Stri try { log.debug( "Creating TableIndex." ); Catalog catalog = Catalog.getInstance(); - LogicalTable table = catalog.getTable( inDatabase, schemaName, tableName ); + LogicalTable table = catalog.getTable( schemaName, tableName ); return new TableIndex( table, schemaName, tableName ); } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { throw new UnknownIndexException( "Cannot find a underlying table for the specified table name: " + schemaName + "." + tableName + "." ); diff --git a/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/helper/AlgBuildTestHelper.java b/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/helper/AlgBuildTestHelper.java index 5a9a7f95ae..62b6412aed 100644 --- a/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/helper/AlgBuildTestHelper.java +++ b/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/helper/AlgBuildTestHelper.java @@ -24,7 +24,7 @@ import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.cql.TableIndex; import org.polypheny.db.cql.exception.UnknownIndexException; import org.polypheny.db.rex.RexBuilder; @@ -79,7 +79,7 @@ public AlgBuildTestHelper( AlgBuildLevel algBuildLevel ) throws UnknownIndexExce for ( TableIndex tableIndex : tableIndices ) { for ( Long columnId : tableIndex.catalogTable.fieldIds ) { - CatalogColumn column = catalog.getColumn( columnId ); + LogicalColumn column = catalog.getColumn( columnId ); columnNames.add( tableIndex.fullyQualifiedName + "." + column.name ); RexInputRef inputRef = rexBuilder.makeInputRef( algBuilder.peek(), inputRefs.size() ); tableScanOrdinalities.put( columnId, inputRefs.size() ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 131d020b33..34bf1588aa 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -45,12 +45,11 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.impl.AbstractNamespace; import org.polypheny.db.type.PolyType; @@ -90,10 +89,10 @@ public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable List fieldTypes = new LinkedList<>(); List fieldIds = new ArrayList<>( allocationTable.placements.size() ); for ( CatalogColumnPlacement placement : allocationTable.placements ) { - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( placement.columnId ); - AlgDataType sqlType = sqlType( typeFactory, catalogColumn.type, catalogColumn.length, catalogColumn.scale, null ); - fieldInfo.add( catalogColumn.name, placement.physicalColumnName, sqlType ).nullable( catalogColumn.nullable ); - fieldTypes.add( CsvFieldType.getCsvFieldType( catalogColumn.type ) ); + LogicalColumn logicalColumn = Catalog.getInstance().getColumn( placement.columnId ); + AlgDataType sqlType = sqlType( typeFactory, logicalColumn.type, logicalColumn.length, logicalColumn.scale, null ); + fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); + fieldTypes.add( CsvFieldType.getCsvFieldType( logicalColumn.type ) ); fieldIds.add( (int) placement.physicalPosition ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java index f4c2441948..e5c57bed6c 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java @@ -35,21 +35,15 @@ import java.util.ArrayList; import java.util.List; -import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; -import org.polypheny.db.schema.Entity.Table; -import org.polypheny.db.schema.impl.AbstractEntity; import org.polypheny.db.util.Source; /** * Base class for table that reads CSV files. */ -public abstract class CsvTable extends PhysicalTable implements Table { +public abstract class CsvTable extends PhysicalTable { protected final Source source; protected List fieldTypes; @@ -60,8 +54,8 @@ public abstract class CsvTable extends PhysicalTable implements Table { /** * Creates a CsvTable. */ - CsvTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { - super( table ); + CsvTable( Source source, AllocationTable allocationTable, List fieldTypes, int[] fields, CsvSource csvSource ) { + super( allocationTable, allocationTable.name, allocationTable.getNamespaceName(), new ArrayList<>( allocationTable.getColumnNames().values() ) ); this.source = source; this.fieldTypes = fieldTypes; this.fields = fields; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java index e68e161711..2f23e907a1 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java @@ -19,8 +19,8 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.ddl.DdlManager; @@ -53,7 +53,7 @@ public CypherAlterDatabaseAlias( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - List graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( targetName ) ); + List graphs = Catalog.getInstance().getGraphs( new Pattern( targetName ) ); if ( graphs.size() != 1 ) { if ( !ifExists ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java index d6d2330032..0f5bcb9ef9 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java @@ -19,8 +19,8 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.ddl.DdlManager; @@ -56,7 +56,7 @@ public CypherCreateDatabaseAlias( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - List graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( targetName ) ); + List graphs = Catalog.getInstance().getGraphs( new Pattern( targetName ) ); if ( graphs.size() != 1 ) { throw new RuntimeException( "Error while creating a new graph database alias." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java index 524417e137..62ccf2f2f2 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java @@ -19,8 +19,8 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.ddl.DdlManager; @@ -47,7 +47,7 @@ public CypherDropAlias( ParserPos pos, CypherSimpleEither graphs = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( aliasName ) ); + List graphs = Catalog.getInstance().getGraphs( new Pattern( aliasName ) ); if ( graphs.size() != 1 ) { throw new RuntimeException( "Error while dropping a graph database alias." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java index 23161a3a7c..51a04e264c 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java @@ -19,8 +19,8 @@ import java.util.List; import java.util.concurrent.TimeUnit; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.cypher.clause.CypherWaitClause; @@ -64,7 +64,7 @@ public void execute( Context context, Statement statement, QueryParameters param } } - List databases = Catalog.getInstance().getGraphs( Catalog.defaultDatabaseId, new Pattern( databaseName ) ); + List databases = Catalog.getInstance().getGraphs( new Pattern( databaseName ) ); if ( databases.size() != 1 ) { if ( !ifExists ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java index 071d42b1b5..9faa6ed7a6 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java @@ -23,8 +23,8 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.cypher.admin.CypherAdminCommand; @@ -64,7 +64,7 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( this.database ) ); + List graphs = catalog.getGraphs( new Pattern( this.database ) ); List dataStores = Stream.of( store ) .map( store -> (DataStore) adapterManager.getAdapter( store ) ) diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java index e7d6a21d24..d44d84bc74 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java @@ -22,8 +22,8 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.cypher.admin.CypherAdminCommand; @@ -55,7 +55,7 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( this.databaseName ) ); + List graphs = catalog.getGraphs( new Pattern( this.databaseName ) ); DataStore dataStore = Stream.of( storeName ) .map( store -> (DataStore) adapterManager.getAdapter( storeName ) ) diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 3f97c057ec..d85af4a71a 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -253,7 +253,7 @@ protected String getTypeString( PolyType type ) { @Override - protected String getDefaultPhysicalSchemaName() { + public String getDefaultPhysicalSchemaName() { return "PUBLIC"; } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java index 0910629d95..50fee163fd 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcImplementor.java @@ -38,9 +38,6 @@ import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.adapter.jdbc.rel2sql.AlgToSqlConverter; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.sql.language.SqlIdentifier; @@ -76,17 +73,15 @@ public Result implement( AlgNode node ) { @Override - public SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement placement ) { - return new SqlIdentifier( Arrays.asList( placement.physicalSchemaName, placement.physicalTableName ), ParserPos.ZERO ); + public SqlIdentifier getPhysicalTableName( JdbcEntity physical ) { + return new SqlIdentifier( Arrays.asList( physical.namespaceName, physical.name ), ParserPos.ZERO ); } @Override - public SqlIdentifier getPhysicalColumnName( CatalogPartitionPlacement placement, String columnName ) { - LogicalTable catalogTable = Catalog.getInstance().getTable( placement.tableId ); - JdbcEntity table = schema.getTableMap().get( catalogTable.name + "_" + placement.partitionId ); - if ( table.hasPhysicalColumnName( columnName ) ) { - return table.physicalColumnName( columnName ); + public SqlIdentifier getPhysicalColumnName( JdbcEntity physical, String columnName ) { + if ( physical.hasPhysicalColumnName( columnName ) ) { + return physical.physicalColumnName( columnName ); } else { return new SqlIdentifier( "_" + columnName, ParserPos.ZERO ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java index 4082fb6a71..7ad7427674 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java @@ -59,12 +59,12 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Minus; -import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; @@ -73,14 +73,12 @@ import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.plan.AlgTrait; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; -import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexLiteral; @@ -1019,7 +1017,7 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { - final RelModify modify = (RelModify) alg; + final RelModify modify = (RelModify) alg; final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { return null; @@ -1028,8 +1026,7 @@ public AlgNode convert( AlgNode alg ) { return new JdbcTableModify( modify.getCluster(), traitSet, - modify.getEntity(), - modify.getCatalogReader(), + modify.getEntity().unwrap( JdbcEntity.class ), AlgOptRule.convert( modify.getInput(), traitSet ), modify.getOperation(), modify.getUpdateColumnList(), @@ -1043,7 +1040,7 @@ public AlgNode convert( AlgNode alg ) { /** * Table-modification operator implemented in JDBC convention. */ - public static class JdbcTableModify extends RelModify implements JdbcAlg { + public static class JdbcTableModify extends RelModify implements JdbcAlg { private final Expression expression; @@ -1051,21 +1048,20 @@ public static class JdbcTableModify extends RelModify implements JdbcAlg { public JdbcTableModify( AlgOptCluster cluster, AlgTraitSet traitSet, - AlgOptEntity table, - Prepare.CatalogReader catalogReader, + JdbcEntity table, AlgNode input, Operation operation, List updateColumnList, - List sourceExpressionList, + List sourceExpressionList, boolean flattened ) { - super( cluster, traitSet, table, catalogReader, input, operation, updateColumnList, sourceExpressionList, flattened ); + super( cluster, traitSet, table, input, operation, updateColumnList, sourceExpressionList, flattened ); assert input.getConvention() instanceof JdbcConvention; assert getConvention() instanceof JdbcConvention; final ModifiableEntity modifiableTable = table.unwrap( ModifiableEntity.class ); if ( modifiableTable == null ) { throw new AssertionError(); // TODO: user error in validator } - this.expression = table.getExpression( Queryable.class ); + this.expression = table.asExpression( Queryable.class ); if ( expression == null ) { throw new AssertionError(); // TODO: user error in validator } @@ -1084,8 +1080,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new JdbcTableModify( getCluster(), traitSet, - getEntity(), - getCatalogReader(), + entity, AbstractAlgNode.sole( inputs ), getOperation(), getUpdateColumnList(), diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java index c09740c65e..69f1232340 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java @@ -46,6 +46,7 @@ import java.util.SortedSet; import java.util.stream.Collectors; import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.adapter.jdbc.JdbcEntity; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.JoinConditionType; @@ -69,7 +70,6 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; @@ -273,9 +273,9 @@ public Result visit( Aggregate e ) { /** * @see #dispatch */ - public Result visit( RelScan e ) { + public Result visit( RelScan e ) { return result( - new SqlIdentifier( List.of( e.getEntity().unwrap( LogicalTable.class ).getNamespaceName(), e.getEntity().getCatalogEntity().name ), ParserPos.ZERO ), + new SqlIdentifier( List.of( e.getEntity().unwrap( LogicalTable.class ).getNamespaceName(), e.getEntity().name ), ParserPos.ZERO ), ImmutableList.of( Clause.FROM ), e, null ); @@ -448,12 +448,12 @@ public Result visit( Sort e ) { /** * @see #dispatch */ - public Result visit( RelModify modify ) { + public Result visit( RelModify modify ) { final Map pairs = ImmutableMap.of(); final Context context = aliasContext( pairs, false ); // Target Table Name - final SqlIdentifier sqlTargetTable = getPhysicalTableName( modify.getEntity().getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ) ); + final SqlIdentifier sqlTargetTable = getPhysicalTableName( modify.getEntity().unwrap( JdbcEntity.class ) ); switch ( modify.getOperation() ) { case INSERT: { @@ -466,7 +466,7 @@ public Result visit( RelModify modify ) { sqlTargetTable, sqlSource, physicalIdentifierList( - modify.getEntity().getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ), + modify.getEntity().unwrap( JdbcEntity.class ), modify.getInput().getRowType().getFieldNames() ) ); return result( sqlInsert, ImmutableList.of(), modify, null ); } @@ -475,7 +475,7 @@ public Result visit( RelModify modify ) { final SqlUpdate sqlUpdate = new SqlUpdate( POS, sqlTargetTable, - physicalIdentifierList( modify.getEntity().getPartitionPlacement().unwrap( CatalogPartitionPlacement.class ), modify.getUpdateColumnList() ), + physicalIdentifierList( modify.getEntity().unwrap( JdbcEntity.class ), modify.getUpdateColumnList() ), exprList( context, modify.getSourceExpressionList() ), ((SqlSelect) input.node).getWhere(), input.asSelect(), @@ -518,7 +518,7 @@ private SqlNodeList identifierList( List names ) { /** * Converts a list of names expressions to a list of single-part {@link SqlIdentifier}s. */ - private SqlNodeList physicalIdentifierList( CatalogPartitionPlacement partitionPlacement, List columnNames ) { + private SqlNodeList physicalIdentifierList( JdbcEntity partitionPlacement, List columnNames ) { return new SqlNodeList( columnNames.stream().map( columnName -> getPhysicalColumnName( partitionPlacement, columnName ) ).collect( Collectors.toList() ), POS ); } @@ -660,10 +660,10 @@ private void parseCorrelTable( AlgNode algNode, Result x ) { } - public abstract SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement tableName ); + public abstract SqlIdentifier getPhysicalTableName( JdbcEntity tableName ); - public abstract SqlIdentifier getPhysicalColumnName( CatalogPartitionPlacement tableName, String columnName ); + public abstract SqlIdentifier getPhysicalColumnName( JdbcEntity tableName, String columnName ); /** @@ -694,13 +694,13 @@ public PlainAlgToSqlConverter( SqlDialect dialect ) { @Override - public SqlIdentifier getPhysicalTableName( CatalogPartitionPlacement placement ) { - return new SqlIdentifier( placement.physicalTableName, POS ); + public SqlIdentifier getPhysicalTableName( JdbcEntity placement ) { + return new SqlIdentifier( placement.name, POS ); } @Override - public SqlIdentifier getPhysicalColumnName( CatalogPartitionPlacement placement, String columnName ) { + public SqlIdentifier getPhysicalColumnName( JdbcEntity placement, String columnName ) { return new SqlIdentifier( columnName, POS ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java index edde48c4ea..da07215955 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/SqlImplementor.java @@ -77,7 +77,6 @@ import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexCorrelVariable; import org.polypheny.db.rex.RexDynamicParam; @@ -194,13 +193,13 @@ public Result setOpToSql( SqlSetOperator operator, AlgNode alg ) { final Result result = visitChild( input.i, input.e ); if ( node == null ) { if ( input.getValue() instanceof JdbcScan ) { - node = result.asSelect( ((JdbcEntity) ((AlgOptEntityImpl) input.getValue().getEntity()).getEntity()).getNodeList() ); + node = result.asSelect( input.getValue().getEntity().unwrap( JdbcEntity.class ).getNodeList() ); } else { node = result.asSelect(); } } else { if ( input.getValue() instanceof JdbcScan ) { - node = (SqlNode) operator.createCall( POS, node, result.asSelect( ((JdbcEntity) ((AlgOptEntityImpl) input.getValue().getEntity()).getEntity()).getNodeList() ) ); + node = (SqlNode) operator.createCall( POS, node, result.asSelect( input.getValue().getEntity().unwrap( JdbcEntity.class ).getNodeList() ) ); } else { node = (SqlNode) operator.createCall( POS, node, result.asSelect() ); } @@ -1176,7 +1175,7 @@ && hasNestedAggregations( (LogicalAggregate) alg ) ) { select = subSelect(); } else { if ( explicitColumnNames && alg.getInputs().size() == 1 && alg.getInput( 0 ) instanceof JdbcScan ) { - select = asSelect( ((JdbcEntity) ((AlgOptEntityImpl) alg.getInput( 0 ).getEntity()).getEntity()).getNodeList() ); + select = asSelect( alg.getInput( 0 ).getEntity().unwrap( JdbcEntity.class ).getNodeList() ); } else { select = asSelect(); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index ad070b798b..0d9c4aa67b 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -31,10 +31,10 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.config.RuntimeConfig; @@ -150,13 +150,13 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica .append( " ( " ); boolean first = true; for ( CatalogColumnPlacement placement : allocationTable.placements ) { - CatalogColumn catalogColumn = allocationTable.getColumns().get( placement.columnId ); + LogicalColumn logicalColumn = allocationTable.getColumns().get( placement.columnId ); if ( !first ) { builder.append( ", " ); } first = false; builder.append( dialect.quoteIdentifier( getPhysicalColumnName( placement.columnId ) ) ).append( " " ); - createColumnDefinition( catalogColumn, builder ); + createColumnDefinition( logicalColumn, builder ); builder.append( " NULL" ); } builder.append( " )" ); @@ -165,22 +165,22 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica @Override - public void addColumn( Context context, LogicalTable catalogTable, CatalogColumn catalogColumn ) { - String physicalColumnName = getPhysicalColumnName( catalogColumn.id ); + public void addColumn( Context context, LogicalTable catalogTable, LogicalColumn logicalColumn ) { + String physicalColumnName = getPhysicalColumnName( logicalColumn.id ); for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { String physicalTableName = partitionPlacement.physicalTableName; String physicalSchemaName = partitionPlacement.physicalSchemaName; - StringBuilder query = buildAddColumnQuery( physicalSchemaName, physicalTableName, physicalColumnName, catalogTable, catalogColumn ); + StringBuilder query = buildAddColumnQuery( physicalSchemaName, physicalTableName, physicalColumnName, catalogTable, logicalColumn ); executeUpdate( query, context ); // Insert default value - if ( catalogColumn.defaultValue != null ) { - query = buildInsertDefaultValueQuery( physicalSchemaName, physicalTableName, physicalColumnName, catalogColumn ); + if ( logicalColumn.defaultValue != null ) { + query = buildInsertDefaultValueQuery( physicalSchemaName, physicalTableName, physicalColumnName, logicalColumn ); executeUpdate( query, context ); } // Add physical name to placement catalog.updateColumnPlacementPhysicalNames( getAdapterId(), - catalogColumn.id, + logicalColumn.id, physicalSchemaName, physicalColumnName, false ); @@ -188,42 +188,42 @@ public void addColumn( Context context, LogicalTable catalogTable, CatalogColumn } - protected StringBuilder buildAddColumnQuery( String physicalSchemaName, String physicalTableName, String physicalColumnName, LogicalTable catalogTable, CatalogColumn catalogColumn ) { + protected StringBuilder buildAddColumnQuery( String physicalSchemaName, String physicalTableName, String physicalColumnName, LogicalTable catalogTable, LogicalColumn logicalColumn ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( physicalSchemaName ) ) .append( "." ) .append( dialect.quoteIdentifier( physicalTableName ) ); builder.append( " ADD " ).append( dialect.quoteIdentifier( physicalColumnName ) ).append( " " ); - createColumnDefinition( catalogColumn, builder ); + createColumnDefinition( logicalColumn, builder ); builder.append( " NULL" ); return builder; } - protected void createColumnDefinition( CatalogColumn catalogColumn, StringBuilder builder ) { - if ( !this.dialect.supportsNestedArrays() && catalogColumn.collectionsType == PolyType.ARRAY ) { + protected void createColumnDefinition( LogicalColumn logicalColumn, StringBuilder builder ) { + if ( !this.dialect.supportsNestedArrays() && logicalColumn.collectionsType == PolyType.ARRAY ) { // Returns e.g. TEXT if arrays are not supported builder.append( getTypeString( PolyType.ARRAY ) ); - } else if ( catalogColumn.collectionsType == PolyType.MAP ) { + } else if ( logicalColumn.collectionsType == PolyType.MAP ) { builder.append( getTypeString( PolyType.ARRAY ) ); } else { - builder.append( " " ).append( getTypeString( catalogColumn.type ) ); - if ( catalogColumn.length != null ) { - builder.append( "(" ).append( catalogColumn.length ); - if ( catalogColumn.scale != null ) { - builder.append( "," ).append( catalogColumn.scale ); + builder.append( " " ).append( getTypeString( logicalColumn.type ) ); + if ( logicalColumn.length != null ) { + builder.append( "(" ).append( logicalColumn.length ); + if ( logicalColumn.scale != null ) { + builder.append( "," ).append( logicalColumn.scale ); } builder.append( ")" ); } - if ( catalogColumn.collectionsType != null ) { - builder.append( " " ).append( getTypeString( catalogColumn.collectionsType ) ); + if ( logicalColumn.collectionsType != null ) { + builder.append( " " ).append( getTypeString( logicalColumn.collectionsType ) ); } } } - protected StringBuilder buildInsertDefaultValueQuery( String physicalSchemaName, String physicalTableName, String physicalColumnName, CatalogColumn catalogColumn ) { + protected StringBuilder buildInsertDefaultValueQuery( String physicalSchemaName, String physicalTableName, String physicalColumnName, LogicalColumn logicalColumn ) { StringBuilder builder = new StringBuilder(); builder.append( "UPDATE " ) .append( dialect.quoteIdentifier( physicalSchemaName ) ) @@ -231,29 +231,29 @@ protected StringBuilder buildInsertDefaultValueQuery( String physicalSchemaName, .append( dialect.quoteIdentifier( physicalTableName ) ); builder.append( " SET " ).append( dialect.quoteIdentifier( physicalColumnName ) ).append( " = " ); - if ( catalogColumn.collectionsType == PolyType.ARRAY ) { + if ( logicalColumn.collectionsType == PolyType.ARRAY ) { throw new RuntimeException( "Default values are not supported for array types" ); } SqlLiteral literal; - switch ( catalogColumn.defaultValue.type ) { + switch ( logicalColumn.defaultValue.type ) { case BOOLEAN: - literal = SqlLiteral.createBoolean( Boolean.parseBoolean( catalogColumn.defaultValue.value ), ParserPos.ZERO ); + literal = SqlLiteral.createBoolean( Boolean.parseBoolean( logicalColumn.defaultValue.value ), ParserPos.ZERO ); break; case INTEGER: case DECIMAL: case BIGINT: - literal = SqlLiteral.createExactNumeric( catalogColumn.defaultValue.value, ParserPos.ZERO ); + literal = SqlLiteral.createExactNumeric( logicalColumn.defaultValue.value, ParserPos.ZERO ); break; case REAL: case DOUBLE: - literal = SqlLiteral.createApproxNumeric( catalogColumn.defaultValue.value, ParserPos.ZERO ); + literal = SqlLiteral.createApproxNumeric( logicalColumn.defaultValue.value, ParserPos.ZERO ); break; case VARCHAR: - literal = SqlLiteral.createCharString( catalogColumn.defaultValue.value, ParserPos.ZERO ); + literal = SqlLiteral.createCharString( logicalColumn.defaultValue.value, ParserPos.ZERO ); break; default: - throw new PolyphenyDbException( "Not yet supported default value type: " + catalogColumn.defaultValue.type ); + throw new PolyphenyDbException( "Not yet supported default value type: " + logicalColumn.defaultValue.type ); } builder.append( literal.toSqlString( dialect ) ); return builder; @@ -262,8 +262,8 @@ protected StringBuilder buildInsertDefaultValueQuery( String physicalSchemaName, // Make sure to update overridden methods as well @Override - public void updateColumnType( Context context, CatalogColumnPlacement columnPlacement, CatalogColumn catalogColumn, PolyType oldType ) { - if ( !this.dialect.supportsNestedArrays() && catalogColumn.collectionsType != null ) { + public void updateColumnType( Context context, CatalogColumnPlacement columnPlacement, LogicalColumn logicalColumn, PolyType oldType ) { + if ( !this.dialect.supportsNestedArrays() && logicalColumn.collectionsType != null ) { return; } for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { @@ -273,12 +273,12 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac .append( "." ) .append( dialect.quoteIdentifier( partitionPlacement.physicalTableName ) ); builder.append( " ALTER COLUMN " ).append( dialect.quoteIdentifier( columnPlacement.physicalColumnName ) ); - builder.append( " " ).append( getTypeString( catalogColumn.type ) ); - if ( catalogColumn.length != null ) { + builder.append( " " ).append( getTypeString( logicalColumn.type ) ); + if ( logicalColumn.length != null ) { builder.append( "(" ); - builder.append( catalogColumn.length ); - if ( catalogColumn.scale != null ) { - builder.append( "," ).append( catalogColumn.scale ); + builder.append( logicalColumn.length ); + if ( logicalColumn.scale != null ) { + builder.append( "," ).append( logicalColumn.scale ); } builder.append( ")" ); } diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 4a1d45cefd..d4de8b2467 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -62,10 +62,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; @@ -87,6 +85,8 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -100,7 +100,6 @@ import org.polypheny.db.catalog.exceptions.UnknownColumnIdRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownColumnPlacementRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseIdRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; import org.polypheny.db.catalog.exceptions.UnknownGraphException; @@ -175,8 +174,8 @@ public class CatalogImpl extends Catalog { private static BTreeMap documentMappings; - private static BTreeMap columns; - private static BTreeMap columnNames; + private static BTreeMap columns; + private static BTreeMap columnNames; private static BTreeMap columnPlacements; private static HTreeMap adapters; @@ -235,14 +234,11 @@ public class CatalogImpl extends Catalog { // would throw an error. private static final List tablesFlaggedForDeletion = new ArrayList<>(); - Comparator columnComparator = Comparator.comparingInt( o -> o.position ); + Comparator columnComparator = Comparator.comparingInt( o -> o.position ); // {@link AlgNode} used to create view and materialized view @Getter private final Map nodeInfo = new HashMap<>(); - // AlgDataTypes used to create view and materialized view - @Getter - private final Map algTypeInfo = new HashMap<>(); public CatalogImpl() { @@ -323,7 +319,7 @@ public CatalogImpl( String fileName, boolean doInitSchema, boolean doInitInforma insertDefaultData(); } - } catch ( GenericCatalogException | UnknownUserException | UnknownDatabaseException | UnknownTableException | + } catch ( GenericCatalogException | UnknownUserException | UnknownTableException | UnknownSchemaException | UnknownAdapterException | UnknownColumnException e ) { throw new RuntimeException( e ); } @@ -411,7 +407,7 @@ public void restoreColumnPlacements( Transaction transaction ) { Map> restoredTables = new HashMap<>(); - for ( CatalogColumn c : columns.values() ) { + for ( LogicalColumn c : columns.values() ) { List placements = getColumnPlacement( c.id ); LogicalTable catalogTable = getTable( c.tableId ); @@ -429,11 +425,11 @@ public void restoreColumnPlacements( Transaction transaction ) { // TODO only full placements atm here if ( !restoredTables.containsKey( store.getAdapterId() ) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, , catalogTable.partitionProperty.partitionIds ); + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, null ); restoredTables.put( store.getAdapterId(), Collections.singletonList( catalogTable.id ) ); } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( catalogTable.id )) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, , catalogTable.partitionProperty.partitionIds ); + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, null ); List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); ids.add( catalogTable.id ); restoredTables.put( store.getAdapterId(), ids ); @@ -449,13 +445,13 @@ public void restoreColumnPlacements( Transaction transaction ) { DataStore store = manager.getStore( p.adapterId ); if ( !restoredTables.containsKey( store.getAdapterId() ) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, , table.partitionProperty.partitionIds ); + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, null ); List ids = new ArrayList<>(); ids.add( table.id ); restoredTables.put( store.getAdapterId(), ids ); } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( table.id )) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, , table.partitionProperty.partitionIds ); + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, null ); List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); ids.add( table.id ); restoredTables.put( store.getAdapterId(), ids ); @@ -503,7 +499,6 @@ public void restoreViews( Transaction transaction ) { sqlProcessor.validate( statement.getTransaction(), sqlNode, RuntimeConfig.ADD_DEFAULT_VALUES_IN_INSERTS.getBoolean() ).left, new QueryParameters( query, c.getNamespaceType() ) ); nodeInfo.put( c.id, algRoot.alg ); - algTypeInfo.put( c.id, algRoot.validatedRowType ); break; case "rel": @@ -519,7 +514,6 @@ public void restoreViews( Transaction transaction ) { AlgRoot root = new AlgRoot( result, result.getRowType(), Kind.SELECT, fields, collation ); nodeInfo.put( c.id, root.alg ); - algTypeInfo.put( c.id, root.validatedRowType ); break; case "mongo": @@ -531,7 +525,6 @@ public void restoreViews( Transaction transaction ) { mqlNode, new ExtendedQueryParameters( query, NamespaceType.DOCUMENT, getSchema( defaultDatabaseId ).name ) ); nodeInfo.put( c.id, mqlRel.alg ); - algTypeInfo.put( c.id, mqlRel.validatedRowType ); break; } if ( c.entityType == EntityType.MATERIALIZED_VIEW ) { @@ -751,7 +744,7 @@ private void initDatabaseInfo( DB db ) { /** * Fills the catalog database with default data, skips if data is already inserted */ - private void insertDefaultData() throws GenericCatalogException, UnknownUserException, UnknownDatabaseException, UnknownTableException, UnknownSchemaException, UnknownAdapterException, UnknownColumnException { + private void insertDefaultData() throws GenericCatalogException, UnknownUserException, UnknownTableException, UnknownSchemaException, UnknownAdapterException, UnknownColumnException { ////////////// // init users @@ -767,24 +760,14 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce } Catalog.defaultUserId = systemId; - ////////////// - // init database - long databaseId; - if ( !databaseNames.containsKey( "APP" ) ) { - databaseId = addDatabase( "APP", systemId, "system", 1L, "public" ); - } else { - databaseId = getDatabase( "APP" ).id; - } - Catalog.defaultDatabaseId = databaseId; - ////////////// // init schema long schemaId; - if ( !schemaNames.containsKey( new Object[]{ databaseId, "public" } ) ) { - schemaId = addNamespace( "public", databaseId, 1, NamespaceType.getDefault() ); + if ( !schemaNames.containsKey( new Object[]{ "public" } ) ) { + schemaId = addNamespace( "public", 1, NamespaceType.getDefault() ); } else { - schemaId = getSchema( "APP", "public" ).id; + schemaId = getSchema( "public" ).id; } ////////////// @@ -799,16 +782,16 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce // init schema CatalogAdapter csv = getAdapter( "hr" ); if ( !testMode ) { - if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "depts" } ) ) { + if ( !tableNames.containsKey( new Object[]{ schemaId, "depts" } ) ) { addTable( "depts", schemaId, systemId, EntityType.SOURCE, false ); } - if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "emps" } ) ) { + if ( !tableNames.containsKey( new Object[]{ schemaId, "emps" } ) ) { addTable( "emps", schemaId, systemId, EntityType.SOURCE, false ); } - if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "emp" } ) ) { + if ( !tableNames.containsKey( new Object[]{ schemaId, "emp" } ) ) { addTable( "emp", schemaId, systemId, EntityType.SOURCE, false ); } - if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "work" } ) ) { + if ( !tableNames.containsKey( new Object[]{ schemaId, "work" } ) ) { addTable( "work", schemaId, systemId, EntityType.SOURCE, false ); addDefaultCsvColumns( csv ); } @@ -843,8 +826,8 @@ public void restoreInterfacesIfNecessary() { /** * Initiates default columns for csv files */ - private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaException, UnknownTableException, GenericCatalogException, UnknownColumnException, UnknownDatabaseException { - CatalogSchema schema = getSchema( "APP", "public" ); + private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaException, UnknownTableException, GenericCatalogException, UnknownColumnException { + CatalogSchema schema = getSchema( "public" ); LogicalTable depts = getTable( schema.id, "depts" ); addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); @@ -967,10 +950,15 @@ public void clear() { } + @Override + public Snapshot getSnapshot( long id ) { + return null; + } + + /** * {@inheritDoc} */ - @Override public long addDatabase( String name, int ownerId, String ownerName, long defaultSchemaId, String defaultSchemaName ) { long id = databaseIdBuilder.getAndIncrement(); CatalogDatabase database = new CatalogDatabase( id, name, ownerId, ownerName, defaultSchemaId, defaultSchemaName ); @@ -987,7 +975,6 @@ public long addDatabase( String name, int ownerId, String ownerName, long defaul /** * {@inheritDoc} */ - @Override public void deleteDatabase( long databaseId ) { CatalogDatabase database = getDatabase( databaseId ); if ( database != null ) { @@ -1018,7 +1005,6 @@ public int addUser( String name, String password ) { /** * {@inheritDoc} */ - @Override public List getDatabases( Pattern pattern ) { if ( pattern != null ) { if ( pattern.containsWildcards ) { @@ -1039,21 +1025,7 @@ public List getDatabases( Pattern pattern ) { /** * {@inheritDoc} */ - @Override - public CatalogDatabase getDatabase( String databaseName ) throws UnknownDatabaseException { - try { - return Objects.requireNonNull( databaseNames.get( databaseName ) ); - } catch ( NullPointerException e ) { - throw new UnknownDatabaseException( databaseName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogDatabase getDatabase( long databaseId ) { + private CatalogDatabase getDatabase( long databaseId ) { try { return Objects.requireNonNull( databases.get( databaseId ) ); } catch ( NullPointerException e ) { @@ -1066,37 +1038,11 @@ public CatalogDatabase getDatabase( long databaseId ) { * {@inheritDoc} */ @Override - public List getSchemas( Pattern databaseNamePattern, Pattern schemaNamePattern ) { - List catalogDatabases = getDatabases( databaseNamePattern ); - if ( catalogDatabases.size() > 0 ) { - Stream catalogSchemas = catalogDatabases.stream().filter( d -> databaseChildren.containsKey( d.id ) ).flatMap( d -> Objects.requireNonNull( databaseChildren.get( d.id ) ).stream() ).map( schemas::get ); - - if ( schemaNamePattern != null ) { - catalogSchemas = catalogSchemas.filter( s -> s.name.matches( schemaNamePattern.toLowerCase().toRegex() ) ); - } - return catalogSchemas.collect( Collectors.toList() ); - } - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getSchemas( long databaseId, Pattern schemaNamePattern ) { + public List getSchemas( Pattern schemaNamePattern ) { if ( schemaNamePattern != null ) { - schemaNamePattern = schemaNamePattern.toLowerCase(); - List list = new ArrayList<>(); - for ( CatalogSchema schema : schemaNames.prefixSubMap( new Object[]{ databaseId } ).values() ) { - if ( schema.name.matches( schemaNamePattern.pattern ) ) { - list.add( schema ); - } - } - return ImmutableList.copyOf( list ); - } else { - return new ArrayList<>( schemaNames.prefixSubMap( new Object[]{ databaseId } ).values() ); + return schemaNames.values().stream().filter( s -> s.name.matches( schemaNamePattern.toRegex() ) ).collect( Collectors.toList() ); } + return new ArrayList<>(); } @@ -1117,27 +1063,12 @@ public CatalogSchema getSchema( long schemaId ) { * {@inheritDoc} */ @Override - public CatalogSchema getSchema( String databaseName, String schemaName ) throws UnknownSchemaException, UnknownDatabaseException { - schemaName = schemaName.toLowerCase(); - try { - long databaseId = getDatabase( databaseName ).id; - return Objects.requireNonNull( schemaNames.get( new Object[]{ databaseId, schemaName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaException( databaseName, schemaName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogSchema getSchema( long databaseId, String schemaName ) throws UnknownSchemaException { + public CatalogSchema getSchema( final String schemaName ) throws UnknownSchemaException { + String name = schemaName.toLowerCase(); try { - schemaName = schemaName.toLowerCase(); - return Objects.requireNonNull( schemaNames.get( new Object[]{ databaseId, schemaName } ) ); + return Objects.requireNonNull( schemaNames.get( new Object[]{ name } ) ); } catch ( NullPointerException e ) { - throw new UnknownSchemaException( databaseId, schemaName ); + throw new UnknownSchemaException( schemaName ); } } @@ -1146,18 +1077,15 @@ public CatalogSchema getSchema( long databaseId, String schemaName ) throws Unkn * {@inheritDoc} */ @Override - public long addNamespace( String name, long databaseId, int ownerId, NamespaceType namespaceType ) { + public long addNamespace( String name, int ownerId, NamespaceType namespaceType ) { name = name.toLowerCase(); CatalogUser owner = getUser( ownerId ); long id = namespaceIdBuilder.getAndIncrement(); - CatalogSchema schema = new CatalogSchema( id, name, databaseId, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); + CatalogSchema schema = new CatalogSchema( id, name, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); synchronized ( this ) { schemas.put( id, schema ); - schemaNames.put( new Object[]{ databaseId, name }, schema ); + schemaNames.put( new Object[]{ name }, schema ); schemaChildren.put( id, ImmutableList.builder().build() ); - List children = new ArrayList<>( Objects.requireNonNull( databaseChildren.get( databaseId ) ) ); - children.add( id ); - databaseChildren.replace( databaseId, ImmutableList.copyOf( children ) ); } listeners.firePropertyChange( "namespace", null, schema ); return id; @@ -1168,9 +1096,9 @@ public long addNamespace( String name, long databaseId, int ownerId, NamespaceTy * {@inheritDoc} */ @Override - public boolean checkIfExistsSchema( long databaseId, String schemaName ) { + public boolean checkIfExistsSchema( String schemaName ) { schemaName = schemaName.toLowerCase(); - return schemaNames.containsKey( new Object[]{ databaseId, schemaName } ); + return schemaNames.containsKey( new Object[]{ schemaName } ); } @@ -1182,12 +1110,12 @@ public void renameSchema( long schemaId, String name ) { name = name.toLowerCase(); try { CatalogSchema old = Objects.requireNonNull( schemas.get( schemaId ) ); - CatalogSchema schema = new CatalogSchema( old.id, name, old.databaseId, old.ownerId, old.ownerName, old.namespaceType, false ); + CatalogSchema schema = new CatalogSchema( old.id, name, old.ownerId, old.ownerName, old.namespaceType, false ); synchronized ( this ) { schemas.replace( schemaId, schema ); - schemaNames.remove( new Object[]{ old.databaseId, old.name } ); - schemaNames.put( new Object[]{ old.databaseId, name }, schema ); + schemaNames.remove( new Object[]{ old.name } ); + schemaNames.put( new Object[]{ name }, schema ); } listeners.firePropertyChange( "schema", old, schema ); } catch ( NullPointerException e ) { @@ -1203,10 +1131,10 @@ public void renameSchema( long schemaId, String name ) { public void setSchemaOwner( long schemaId, long ownerId ) { try { CatalogSchema old = Objects.requireNonNull( schemas.get( schemaId ) ); - CatalogSchema schema = new CatalogSchema( old.id, old.name, old.databaseId, (int) ownerId, old.ownerName, old.namespaceType, false ); + CatalogSchema schema = new CatalogSchema( old.id, old.name, (int) ownerId, old.ownerName, old.namespaceType, false ); synchronized ( this ) { schemas.replace( schemaId, schema ); - schemaNames.replace( new Object[]{ schema.databaseId, schema.name }, schema ); + schemaNames.replace( new Object[]{ schema.name }, schema ); } listeners.firePropertyChange( "schema", old, schema ); } catch ( NullPointerException e ) { @@ -1219,18 +1147,18 @@ public void setSchemaOwner( long schemaId, long ownerId ) { * {@inheritDoc} */ @Override - public long addGraph( long databaseId, String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { - if ( getGraphs( databaseId, new Pattern( name ) ).size() != 0 && !ifNotExists ) { + public long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { + if ( getGraphs( new Pattern( name ) ).size() != 0 && !ifNotExists ) { throw new GraphAlreadyExistsException( name ); } - long id = addNamespace( name, databaseId, Catalog.defaultUserId, NamespaceType.GRAPH ); + long id = addNamespace( name, Catalog.defaultUserId, NamespaceType.GRAPH ); - LogicalGraph graph = new LogicalGraph( databaseId, id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); + LogicalGraph graph = new LogicalGraph( id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); synchronized ( this ) { graphs.put( id, graph ); - graphNames.put( new Object[]{ databaseId, name }, graph ); + graphNames.put( new Object[]{ name }, graph ); } listeners.firePropertyChange( "graph", null, graph ); @@ -1651,7 +1579,7 @@ public void deleteGraph( long id ) { synchronized ( this ) { old.placements.forEach( a -> graphPlacements.remove( new Object[]{ old.id, a } ) ); graphs.remove( id ); - graphNames.remove( new Object[]{ old.databaseId, old.name } ); + graphNames.remove( new Object[]{ old.name } ); graphMappings.remove( id ); } listeners.firePropertyChange( "graph", old, null ); @@ -1674,7 +1602,7 @@ public LogicalGraph getGraph( long id ) { * {@inheritDoc} */ @Override - public List getGraphs( long databaseId, Pattern graphName ) { + public List getGraphs( Pattern graphName ) { if ( graphName != null ) { return ImmutableList.copyOf( Stream.concat( @@ -1694,10 +1622,7 @@ public List getGraphs( long databaseId, Pattern graphName ) { public void deleteSchema( long schemaId ) { CatalogSchema schema = getSchema( schemaId ); synchronized ( this ) { - schemaNames.remove( new Object[]{ schema.databaseId, schema.name } ); - List oldChildren = new ArrayList<>( Objects.requireNonNull( databaseChildren.get( schema.databaseId ) ) ); - oldChildren.remove( schemaId ); - databaseChildren.replace( schema.databaseId, ImmutableList.copyOf( oldChildren ) ); + schemaNames.remove( new Object[]{ schema.name } ); for ( Long id : Objects.requireNonNull( schemaChildren.get( schemaId ) ) ) { deleteTable( id ); @@ -1719,9 +1644,9 @@ public List getTables( long schemaId, Pattern tableNamePattern ) { CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); if ( tableNamePattern != null ) { - return Collections.singletonList( tableNames.get( new Object[]{ schema.databaseId, schemaId, tableNamePattern.pattern } ) ); + return Collections.singletonList( tableNames.get( new Object[]{ schemaId, tableNamePattern.pattern } ) ); } else { - return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schema.databaseId, schemaId } ).values() ); + return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schemaId } ).values() ); } } return new ArrayList<>(); @@ -1732,48 +1657,19 @@ public List getTables( long schemaId, Pattern tableNamePattern ) { * {@inheritDoc} */ @Override - public List getTables( long databaseId, Pattern schemaNamePattern, Pattern tableNamePattern ) { + public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { if ( schemaNamePattern != null && tableNamePattern != null ) { - CatalogSchema schema = schemaNames.get( new Object[]{ databaseId, schemaNamePattern.pattern } ); + CatalogSchema schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); if ( schema != null ) { - return Collections.singletonList( Objects.requireNonNull( tableNames.get( new Object[]{ databaseId, schema.id, tableNamePattern.pattern } ) ) ); + return Collections.singletonList( Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableNamePattern.pattern } ) ) ); } } else if ( schemaNamePattern != null ) { - CatalogSchema schema = schemaNames.get( new Object[]{ databaseId, schemaNamePattern.pattern } ); + CatalogSchema schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); if ( schema != null ) { - return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ databaseId, schema.id } ).values() ); + return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schema.id } ).values() ); } } else { - return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ databaseId } ).values() ); - } - - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTables( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { - List catalogSchemas = getSchemas( databaseNamePattern, schemaNamePattern ); - - if ( catalogSchemas.size() > 0 ) { - Stream catalogTables = catalogSchemas.stream() - .filter( t -> schemaChildren.containsKey( t.id ) ) - .flatMap( t -> Objects.requireNonNull( schemaChildren.get( t.id ) ).stream() ) - .map( tables::get ); - - if ( tableNamePattern != null ) { - catalogTables = catalogTables.filter( t -> { - Pattern pattern = tableNamePattern; - if ( !getSchema( t.namespaceId ).caseSensitive ) { - pattern = tableNamePattern.toLowerCase(); - } - return t.name.matches( pattern.toRegex() ); - } ); - } - return catalogTables.collect( Collectors.toList() ); + return new ArrayList<>( tableNames.values() ); } return new ArrayList<>(); @@ -1803,31 +1699,13 @@ public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTa if ( !schema.caseSensitive ) { tableName = tableName.toLowerCase(); } - return Objects.requireNonNull( tableNames.get( new Object[]{ schema.databaseId, schemaId, tableName } ) ); + return Objects.requireNonNull( tableNames.get( new Object[]{ schemaId, tableName } ) ); } catch ( NullPointerException e ) { throw new UnknownTableException( schemaId, tableName ); } } - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTable( long databaseId, String schemaName, String tableName ) throws UnknownTableException { - try { - CatalogSchema schema = Objects.requireNonNull( schemaNames.get( new Object[]{ databaseId, schemaName } ) ); - if ( !schema.caseSensitive ) { - tableName = tableName.toLowerCase(); - } - - return Objects.requireNonNull( tableNames.get( new Object[]{ databaseId, schema.id, tableName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownTableException( databaseId, schemaName, tableName ); - } - } - - /** * {@inheritDoc} */ @@ -1841,17 +1719,16 @@ public LogicalTable getTableFromPartition( long partitionId ) { * {@inheritDoc} */ @Override - public LogicalTable getTable( String databaseName, String schemaName, String tableName ) throws UnknownTableException, UnknownDatabaseException, UnknownSchemaException { + public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { try { - long databaseId = getDatabase( databaseName ).id; - CatalogSchema schema = getSchema( databaseId, schemaName ); + CatalogSchema schema = getSchema( schemaName ); if ( !schema.caseSensitive ) { tableName = tableName.toLowerCase(); } - return Objects.requireNonNull( tableNames.get( new Object[]{ databaseId, schema.id, tableName } ) ); + return Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableName } ) ); } catch ( NullPointerException e ) { - throw new UnknownTableException( databaseName, schemaName, tableName ); + throw new UnknownTableException( schemaName, tableName ); } } @@ -1887,7 +1764,6 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent name, ImmutableList.of(), namespaceId, - schema.databaseId, ownerId, entityType, null, @@ -1936,7 +1812,6 @@ public long addView( String name, long namespaceId, int ownerId, EntityType enti name, ImmutableList.of(), namespaceId, - schema.databaseId, ownerId, entityType, query,//definition, @@ -1946,15 +1821,11 @@ public long addView( String name, long namespaceId, int ownerId, EntityType enti partitionProperty, algCollation, ImmutableList.of(), - ImmutableMap.copyOf( underlyingTables.entrySet().stream().collect( Collectors.toMap( - Entry::getKey, - e -> ImmutableList.copyOf( e.getValue() ) - ) ) ), + underlyingTables, language.getSerializedName() //fieldList ); addConnectedViews( underlyingTables, viewTable.id ); updateEntityLogistics( name, namespaceId, id, schema, viewTable ); - algTypeInfo.put( id, fieldList ); nodeInfo.put( id, definition ); return id; @@ -1998,19 +1869,18 @@ public long addMaterializedView( String name, long namespaceId, int ownerId, Ent CatalogMaterializedView materializedViewTable = new CatalogMaterializedView( id, name, - ImmutableList.of(), + List.of(), namespaceId, - schema.databaseId, ownerId, entityType, query, null, - ImmutableList.of(), + List.of(), modifiable, partitionProperty, algCollation, - ImmutableList.of(), - ImmutableMap.copyOf( map ), + List.of(), + Map.copyOf( map ), language.getSerializedName(), materializedCriteria, ordered @@ -2018,7 +1888,6 @@ public long addMaterializedView( String name, long namespaceId, int ownerId, Ent addConnectedViews( underlyingTables, materializedViewTable.id ); updateEntityLogistics( name, namespaceId, id, schema, materializedViewTable ); - algTypeInfo.put( id, fieldList ); nodeInfo.put( id, definition ); } else { // Should not happen, addViewTable is only called with EntityType.View @@ -2035,7 +1904,7 @@ private void updateEntityLogistics( String name, long namespaceId, long id, Cata synchronized ( this ) { tables.put( id, entity ); tableChildren.put( id, ImmutableList.builder().build() ); - tableNames.put( new Object[]{ schema.databaseId, namespaceId, name }, entity ); + tableNames.put( new Object[]{ namespaceId, name }, entity ); List children = new ArrayList<>( Objects.requireNonNull( schemaChildren.get( namespaceId ) ) ); children.add( id ); schemaChildren.replace( namespaceId, ImmutableList.copyOf( children ) ); @@ -2054,11 +1923,11 @@ public void addConnectedViews( Map> underlyingTables, long view List connectedViews; connectedViews = new ArrayList<>( old.connectedViews ); connectedViews.add( viewId ); - LogicalTable table = old.getConnectedViews( ImmutableList.copyOf( connectedViews ) ); + LogicalTable table = old.withConnectedViews( ImmutableList.copyOf( connectedViews ) ); synchronized ( this ) { tables.replace( id, table ); assert table != null; - tableNames.replace( new Object[]{ table.databaseId, table.namespaceId, old.name }, table ); + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); } listeners.firePropertyChange( "table", old, table ); } @@ -2074,12 +1943,12 @@ public void deleteViewDependencies( CatalogView catalogView ) { LogicalTable old = getTable( id ); List connectedViews = old.connectedViews.stream().filter( e -> e != catalogView.id ).collect( Collectors.toList() ); - LogicalTable table = old.getConnectedViews( ImmutableList.copyOf( connectedViews ) ); + LogicalTable table = old.withConnectedViews( ImmutableList.copyOf( connectedViews ) ); synchronized ( this ) { tables.replace( id, table ); assert table != null; - tableNames.replace( new Object[]{ table.databaseId, table.namespaceId, old.name }, table ); + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); } listeners.firePropertyChange( "table", old, table ); } @@ -2095,7 +1964,7 @@ public boolean checkIfExistsEntity( long namespaceId, String entityName ) { if ( !schema.caseSensitive ) { entityName = entityName.toLowerCase(); } - return tableNames.containsKey( new Object[]{ schema.databaseId, namespaceId, entityName } ); + return tableNames.containsKey( new Object[]{ namespaceId, entityName } ); } @@ -2118,11 +1987,11 @@ public void renameTable( long tableId, String name ) { name = name.toLowerCase(); } - LogicalTable table = old.getRenamed( name ); + LogicalTable table = old.withName( name ); synchronized ( this ) { tables.replace( tableId, table ); - tableNames.remove( new Object[]{ table.databaseId, table.namespaceId, old.name } ); - tableNames.put( new Object[]{ table.databaseId, table.namespaceId, name }, table ); + tableNames.remove( new Object[]{ table.namespaceId, old.name } ); + tableNames.put( new Object[]{ table.namespaceId, name }, table ); } listeners.firePropertyChange( "table", old, table ); } @@ -2158,7 +2027,7 @@ public void deleteTable( long tableId ) { tableChildren.remove( tableId ); tables.remove( tableId ); - tableNames.remove( new Object[]{ table.databaseId, table.namespaceId, table.name } ); + tableNames.remove( new Object[]{ table.namespaceId, table.name } ); flagTableForDeletion( table.id, false ); // primary key was deleted and open table has to be closed if ( openTable != null && openTable == tableId ) { @@ -2176,48 +2045,11 @@ public void deleteTable( long tableId ) { @Override public void setTableOwner( long tableId, int ownerId ) { LogicalTable old = getTable( tableId ); - LogicalTable table; - - if ( old instanceof CatalogMaterializedView ) { - CatalogMaterializedView oldView = (CatalogMaterializedView) old; - table = new CatalogMaterializedView( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.databaseId, - ownerId, - old.entityType, - oldView.getQuery(), - old.primaryKey, - old.dataPlacements, - old.modifiable, - old.partitionProperty, - oldView.getAlgCollation(), - old.connectedViews, - oldView.getUnderlyingTables(), - oldView.getLanguage().getSerializedName(), - oldView.getMaterializedCriteria(), - oldView.isOrdered() ); - } else { - table = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.databaseId, - ownerId, - old.entityType, - old.primaryKey, - old.dataPlacements, - old.modifiable, - old.partitionProperty, - old.connectedViews ); - } + LogicalTable table = old.withOwnerId( ownerId ); synchronized ( this ) { tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.databaseId, table.namespaceId, table.name }, table ); + tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); } listeners.firePropertyChange( "table", old, table ); } @@ -2230,47 +2062,11 @@ public void setTableOwner( long tableId, int ownerId ) { public void setPrimaryKey( long tableId, Long keyId ) { LogicalTable old = getTable( tableId ); - LogicalTable table; - - if ( old instanceof CatalogMaterializedView ) { - CatalogMaterializedView oldView = (CatalogMaterializedView) old; - table = new CatalogMaterializedView( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.databaseId, - old.ownerId, - old.entityType, - oldView.getQuery(), - keyId, - old.dataPlacements, - old.modifiable, - old.partitionProperty, - oldView.getAlgCollation(), - old.connectedViews, - oldView.getUnderlyingTables(), - oldView.getLanguage().getSerializedName(), - oldView.getMaterializedCriteria(), - oldView.isOrdered() ); - } else { - table = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.databaseId, - old.ownerId, - old.entityType, - keyId, - old.dataPlacements, - old.modifiable, - old.partitionProperty, old.connectedViews ); - } + LogicalTable table = old.withPrimaryKey( keyId ); synchronized ( this ) { tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.databaseId, table.namespaceId, table.name }, table ); + tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); if ( keyId == null ) { openTable = tableId; @@ -2288,7 +2084,7 @@ public void setPrimaryKey( long tableId, Long keyId ) { */ @Override public void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - CatalogColumn column = Objects.requireNonNull( columns.get( columnId ) ); + LogicalColumn column = Objects.requireNonNull( columns.get( columnId ) ); CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); CatalogColumnPlacement columnPlacement = new CatalogColumnPlacement( @@ -2352,33 +2148,15 @@ public void updateMaterializedViewRefreshTime( long materializedViewId ) { MaterializedCriteria materializedCriteria = old.getMaterializedCriteria(); materializedCriteria.setLastUpdate( new Timestamp( System.currentTimeMillis() ) ); - CatalogMaterializedView catalogMaterializedView = new CatalogMaterializedView( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.databaseId, - old.ownerId, - old.entityType, - old.getQuery(), - old.primaryKey, - old.dataPlacements, - old.modifiable, - old.partitionProperty, - old.getAlgCollation(), - old.connectedViews, - old.getUnderlyingTables(), - old.getLanguage().getSerializedName(), - materializedCriteria, - old.isOrdered() ); + CatalogMaterializedView view = old.withMaterializedCriteria( materializedCriteria ); synchronized ( this ) { - tables.replace( materializedViewId, catalogMaterializedView ); + tables.replace( materializedViewId, view ); tableNames.replace( - new Object[]{ catalogMaterializedView.databaseId, catalogMaterializedView.namespaceId, catalogMaterializedView.name }, - catalogMaterializedView ); + new Object[]{ view.namespaceId, view.name }, + view ); } - listeners.firePropertyChange( "table", old, catalogMaterializedView ); + listeners.firePropertyChange( "table", old, view ); } @@ -2402,13 +2180,13 @@ public List getCollections( long namespaceId, Pattern namePat if ( schemas.containsKey( namespaceId ) ) { CatalogSchema schema = Objects.requireNonNull( schemas.get( namespaceId ) ); if ( namePattern != null ) { - LogicalCollection collection = collectionNames.get( new Object[]{ schema.databaseId, namespaceId, namePattern.pattern } ); + LogicalCollection collection = collectionNames.get( new Object[]{ namespaceId, namePattern.pattern } ); if ( collection == null ) { return new ArrayList<>(); } return Collections.singletonList( collection ); } else { - return new ArrayList<>( collectionNames.prefixSubMap( new Object[]{ schema.databaseId, namespaceId } ).values() ); + return new ArrayList<>( collectionNames.prefixSubMap( new Object[]{ namespaceId } ).values() ); } } return new ArrayList<>(); @@ -2437,7 +2215,7 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI synchronized ( this ) { collections.put( collectionId, collection ); - collectionNames.put( new Object[]{ namespace.databaseId, schemaId, name }, collection ); + collectionNames.put( new Object[]{ schemaId, name }, collection ); } listeners.firePropertyChange( "collection", null, entity ); @@ -2934,10 +2712,10 @@ public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, St * {@inheritDoc} */ @Override - public List getColumns( long tableId ) { + public List getColumns( long tableId ) { try { LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - return columnNames.prefixSubMap( new Object[]{ table.databaseId, table.namespaceId, table.id } ).values().stream().sorted( columnComparator ).collect( Collectors.toList() ); + return columnNames.prefixSubMap( new Object[]{ table.namespaceId, table.id } ).values().stream().sorted( columnComparator ).collect( Collectors.toList() ); } catch ( NullPointerException e ) { return new ArrayList<>(); } @@ -2948,11 +2726,11 @@ public List getColumns( long tableId ) { * {@inheritDoc} */ @Override - public List getColumns( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { - List catalogEntities = getTables( databaseNamePattern, schemaNamePattern, tableNamePattern ); + public List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { + List catalogEntities = getTables( schemaNamePattern, tableNamePattern ); if ( catalogEntities.size() > 0 ) { - Stream catalogColumns = catalogEntities.stream().filter( t -> tableChildren.containsKey( t.id ) ).flatMap( t -> Objects.requireNonNull( tableChildren.get( t.id ) ).stream() ).map( columns::get ); + Stream catalogColumns = catalogEntities.stream().filter( t -> tableChildren.containsKey( t.id ) ).flatMap( t -> Objects.requireNonNull( tableChildren.get( t.id ) ).stream() ).map( columns::get ); if ( columnNamePattern != null ) { catalogColumns = catalogColumns.filter( c -> c.name.matches( columnNamePattern.toRegex() ) ); @@ -2968,7 +2746,7 @@ public List getColumns( Pattern databaseNamePattern, Pattern sche * {@inheritDoc} */ @Override - public CatalogColumn getColumn( long columnId ) { + public LogicalColumn getColumn( long columnId ) { try { return Objects.requireNonNull( columns.get( columnId ) ); } catch ( NullPointerException e ) { @@ -2981,13 +2759,13 @@ public CatalogColumn getColumn( long columnId ) { * {@inheritDoc} */ @Override - public CatalogColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { + public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { try { LogicalTable table = getTable( tableId ); if ( !getSchema( table.namespaceId ).caseSensitive ) { columnName = columnName.toLowerCase(); } - return Objects.requireNonNull( columnNames.get( new Object[]{ table.databaseId, table.namespaceId, table.id, columnName } ) ); + return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); } catch ( NullPointerException e ) { throw new UnknownColumnException( tableId, columnName ); } @@ -2998,12 +2776,12 @@ public CatalogColumn getColumn( long tableId, String columnName ) throws Unknown * {@inheritDoc} */ @Override - public CatalogColumn getColumn( String databaseName, String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownDatabaseException, UnknownTableException { + public LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { try { - LogicalTable table = getTable( databaseName, schemaName, tableName ); - return Objects.requireNonNull( columnNames.get( new Object[]{ table.databaseId, table.namespaceId, table.id, columnName } ) ); + LogicalTable table = getTable( schemaName, tableName ); + return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); } catch ( NullPointerException e ) { - throw new UnknownColumnException( databaseName, schemaName, tableName, columnName ); + throw new UnknownColumnException( schemaName, tableName, columnName ); } } @@ -3029,12 +2807,11 @@ public long addColumn( String name, long tableId, int position, PolyType type, P } long id = columnIdBuilder.getAndIncrement(); - CatalogColumn column = new CatalogColumn( + LogicalColumn column = new LogicalColumn( id, name, tableId, table.namespaceId, - table.databaseId, position, type, collectionsType, @@ -3048,7 +2825,7 @@ public long addColumn( String name, long tableId, int position, PolyType type, P synchronized ( this ) { columns.put( id, column ); - columnNames.put( new Object[]{ table.databaseId, table.namespaceId, table.id, name }, column ); + columnNames.put( new Object[]{ table.namespaceId, table.id, name }, column ); List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( tableId ) ) ); children.add( id ); tableChildren.replace( tableId, ImmutableList.copyOf( children ) ); @@ -3058,9 +2835,9 @@ public long addColumn( String name, long tableId, int position, PolyType type, P LogicalTable updatedTable; - updatedTable = table.getTableWithColumns( ImmutableList.copyOf( columnIds ) ); + updatedTable = table.withConnectedViews( ImmutableList.copyOf( columnIds ) ); tables.replace( tableId, updatedTable ); - tableNames.replace( new Object[]{ updatedTable.databaseId, updatedTable.namespaceId, updatedTable.name }, updatedTable ); + tableNames.replace( new Object[]{ updatedTable.namespaceId, updatedTable.name }, updatedTable ); } listeners.firePropertyChange( "column", null, column ); return id; @@ -3072,17 +2849,17 @@ public long addColumn( String name, long tableId, int position, PolyType type, P */ @Override public void renameColumn( long columnId, String name ) { - CatalogColumn old = getColumn( columnId ); + LogicalColumn old = getColumn( columnId ); if ( !getSchema( old.schemaId ).caseSensitive ) { name = name.toLowerCase(); } - CatalogColumn column = new CatalogColumn( old.id, name, old.tableId, old.schemaId, old.databaseId, old.position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); + LogicalColumn column = new LogicalColumn( old.id, name, old.tableId, old.schemaId, old.position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); synchronized ( this ) { columns.replace( columnId, column ); - columnNames.remove( new Object[]{ column.databaseId, column.schemaId, column.tableId, old.name } ); - columnNames.put( new Object[]{ column.databaseId, column.schemaId, column.tableId, name }, column ); + columnNames.remove( new Object[]{ column.schemaId, column.tableId, old.name } ); + columnNames.put( new Object[]{ column.schemaId, column.tableId, name }, column ); } listeners.firePropertyChange( "column", old, column ); } @@ -3093,11 +2870,11 @@ public void renameColumn( long columnId, String name ) { */ @Override public void setColumnPosition( long columnId, int position ) { - CatalogColumn old = getColumn( columnId ); - CatalogColumn column = new CatalogColumn( old.id, old.name, old.tableId, old.schemaId, old.databaseId, position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); + LogicalColumn old = getColumn( columnId ); + LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); synchronized ( this ) { columns.replace( columnId, column ); - columnNames.replace( new Object[]{ column.databaseId, column.schemaId, column.tableId, column.name }, column ); + columnNames.replace( new Object[]{ column.schemaId, column.tableId, column.name }, column ); } listeners.firePropertyChange( "column", old, column ); } @@ -3109,7 +2886,7 @@ public void setColumnPosition( long columnId, int position ) { @Override public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality ) throws GenericCatalogException { try { - CatalogColumn old = Objects.requireNonNull( columns.get( columnId ) ); + LogicalColumn old = Objects.requireNonNull( columns.get( columnId ) ); if ( scale != null && scale > length ) { throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); @@ -3136,10 +2913,10 @@ public void setColumnType( long columnId, PolyType type, PolyType collectionsTyp Collation collation = type.getFamily() == PolyTypeFamily.CHARACTER ? Collation.getById( RuntimeConfig.DEFAULT_COLLATION.getInteger() ) : null; - CatalogColumn column = new CatalogColumn( old.id, old.name, old.tableId, old.schemaId, old.databaseId, old.position, type, collectionsType, length, scale, dimension, cardinality, old.nullable, collation, old.defaultValue ); + LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, old.position, type, collectionsType, length, scale, dimension, cardinality, old.nullable, collation, old.defaultValue ); synchronized ( this ) { columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.databaseId, old.schemaId, old.tableId, old.name }, column ); + columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); } listeners.firePropertyChange( "column", old, column ); } catch ( NullPointerException e ) { @@ -3154,7 +2931,7 @@ public void setColumnType( long columnId, PolyType type, PolyType collectionsTyp @Override public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { try { - CatalogColumn old = Objects.requireNonNull( columns.get( columnId ) ); + LogicalColumn old = Objects.requireNonNull( columns.get( columnId ) ); if ( nullable ) { // Check if the column is part of a primary key (pk's are not allowed to contain null values) LogicalTable table = Objects.requireNonNull( tables.get( old.tableId ) ); @@ -3168,12 +2945,11 @@ public void setNullable( long columnId, boolean nullable ) throws GenericCatalog // TODO: Check that the column does not contain any null values getColumnPlacement( columnId ); } - CatalogColumn column = new CatalogColumn( + LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, - old.databaseId, old.position, old.type, old.collectionsType, @@ -3186,7 +2962,7 @@ public void setNullable( long columnId, boolean nullable ) throws GenericCatalog old.defaultValue ); synchronized ( this ) { columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.databaseId, old.schemaId, old.tableId, old.name }, column ); + columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); } listeners.firePropertyChange( "column", old, column ); } catch ( NullPointerException e ) { @@ -3200,17 +2976,16 @@ public void setNullable( long columnId, boolean nullable ) throws GenericCatalog */ @Override public void setCollation( long columnId, Collation collation ) { - CatalogColumn old = getColumn( columnId ); + LogicalColumn old = getColumn( columnId ); if ( old.type.getFamily() != PolyTypeFamily.CHARACTER ) { throw new RuntimeException( "Illegal attempt to set collation for a non-char column!" ); } - CatalogColumn column = new CatalogColumn( + LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, - old.databaseId, old.position, old.type, old.collectionsType, @@ -3223,7 +2998,7 @@ public void setCollation( long columnId, Collation collation ) { old.defaultValue ); synchronized ( this ) { columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.databaseId, old.schemaId, old.tableId, old.name }, column ); + columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); } listeners.firePropertyChange( "column", old, column ); } @@ -3235,7 +3010,7 @@ public void setCollation( long columnId, Collation collation ) { @Override public boolean checkIfExistsColumn( long tableId, String columnName ) { LogicalTable table = getTable( tableId ); - return columnNames.containsKey( new Object[]{ table.databaseId, table.namespaceId, tableId, columnName } ); + return columnNames.containsKey( new Object[]{ table.namespaceId, tableId, columnName } ); } @@ -3245,7 +3020,7 @@ public boolean checkIfExistsColumn( long tableId, String columnName ) { @Override public void deleteColumn( long columnId ) { //TODO also delete keys with that column? - CatalogColumn column = getColumn( columnId ); + LogicalColumn column = getColumn( columnId ); List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( column.tableId ) ) ); children.remove( columnId ); @@ -3254,46 +3029,10 @@ public void deleteColumn( long columnId ) { List columnIds = new ArrayList<>( old.fieldIds ); columnIds.remove( columnId ); - LogicalTable table; - if ( old.entityType == EntityType.MATERIALIZED_VIEW ) { - CatalogMaterializedView oldView = (CatalogMaterializedView) old; - table = new CatalogMaterializedView( - old.id, - old.name, - ImmutableList.copyOf( columnIds ), - old.namespaceId, - old.databaseId, - old.ownerId, - old.entityType, - oldView.getQuery(), - old.primaryKey, - old.dataPlacements, - old.modifiable, - old.partitionProperty, - oldView.getAlgCollation(), - old.connectedViews, - oldView.getUnderlyingTables(), - oldView.getLanguage().getSerializedName(), - oldView.getMaterializedCriteria(), - oldView.isOrdered() - ); - } else { - table = new LogicalTable( - old.id, - old.name, - ImmutableList.copyOf( columnIds ), - old.namespaceId, - old.databaseId, - old.ownerId, - old.entityType, - old.primaryKey, - old.dataPlacements, - old.modifiable, - old.partitionProperty, - old.connectedViews ); - } + LogicalTable table = old.withFieldIds( ImmutableList.copyOf( columnIds ) ); + synchronized ( this ) { - columnNames.remove( new Object[]{ column.databaseId, column.schemaId, column.tableId, column.name } ); + columnNames.remove( new Object[]{ column.schemaId, column.tableId, column.name } ); tableChildren.replace( column.tableId, ImmutableList.copyOf( children ) ); deleteDefaultValue( columnId ); @@ -3301,7 +3040,7 @@ public void deleteColumn( long columnId ) { deleteColumnPlacement( p.adapterId, p.columnId, false ); } tables.replace( column.tableId, table ); - tableNames.replace( new Object[]{ table.databaseId, table.namespaceId, table.name }, table ); + tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); columns.remove( columnId ); } @@ -3316,13 +3055,12 @@ public void deleteColumn( long columnId ) { */ @Override public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { - CatalogColumn old = getColumn( columnId ); - CatalogColumn column = new CatalogColumn( + LogicalColumn old = getColumn( columnId ); + LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, - old.databaseId, old.position, old.type, old.collectionsType, @@ -3335,7 +3073,7 @@ public void setDefaultValue( long columnId, PolyType type, String defaultValue ) new CatalogDefaultValue( columnId, type, defaultValue, "defaultValue" ) ); synchronized ( this ) { columns.replace( columnId, column ); - columnNames.replace( new Object[]{ column.databaseId, column.schemaId, column.tableId, column.name }, column ); + columnNames.replace( new Object[]{ column.schemaId, column.tableId, column.name }, column ); } listeners.firePropertyChange( "column", old, column ); } @@ -3346,13 +3084,12 @@ public void setDefaultValue( long columnId, PolyType type, String defaultValue ) */ @Override public void deleteDefaultValue( long columnId ) { - CatalogColumn old = getColumn( columnId ); - CatalogColumn column = new CatalogColumn( + LogicalColumn old = getColumn( columnId ); + LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, - old.databaseId, old.position, old.type, old.collectionsType, @@ -3366,7 +3103,7 @@ public void deleteDefaultValue( long columnId ) { if ( old.defaultValue != null ) { synchronized ( this ) { columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.databaseId, old.schemaId, old.tableId, old.name }, column ); + columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); } listeners.firePropertyChange( "column", old, column ); } @@ -3407,8 +3144,8 @@ public boolean isPrimaryKey( long key ) { public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { try { // Check if the columns are set 'not null' - List nullableColumns = columnIds.stream().map( columns::get ).filter( Objects::nonNull ).filter( c -> c.nullable ).collect( Collectors.toList() ); - for ( CatalogColumn col : nullableColumns ) { + List nullableColumns = columnIds.stream().map( columns::get ).filter( Objects::nonNull ).filter( c -> c.nullable ).collect( Collectors.toList() ); + for ( LogicalColumn col : nullableColumns ) { throw new GenericCatalogException( "Primary key is not allowed to contain null values but the column '" + col.name + "' is declared nullable." ); } @@ -3535,8 +3272,8 @@ public void addForeignKey( long tableId, List columnIds, long referencesTa int i = 0; for ( long referencedColumnId : refKey.columnIds ) { - CatalogColumn referencingColumn = getColumn( columnIds.get( i++ ) ); - CatalogColumn referencedColumn = getColumn( referencedColumnId ); + LogicalColumn referencingColumn = getColumn( columnIds.get( i++ ) ); + LogicalColumn referencedColumn = getColumn( referencedColumnId ); if ( referencedColumn.type != referencingColumn.type ) { throw new GenericCatalogException( "The data type of the referenced columns does not match the data type of the referencing column: " + referencingColumn.type.name() + " != " + referencedColumn.type ); } @@ -3544,18 +3281,14 @@ public void addForeignKey( long tableId, List columnIds, long referencesTa // TODO same keys for key and foreign key if ( getKeyUniqueCount( refKey.id ) > 0 ) { long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); - //List keyColumnNames = columnIds.stream().map( id -> Objects.requireNonNull( columns.get( id ) ).name ).collect( Collectors.toList() ); - //List referencesNames = referencesIds.stream().map( id -> Objects.requireNonNull( columns.get( id ) ).name ).collect( Collectors.toList() ); CatalogForeignKey key = new CatalogForeignKey( keyId, constraintName, tableId, table.namespaceId, - table.databaseId, refKey.id, refKey.tableId, refKey.schemaId, - refKey.databaseId, columnIds, referencesIds, onUpdate, @@ -4054,7 +3787,6 @@ public long addPartitionGroup( long tableId, String partitionGroupName, long sch partitionGroupName, tableId, schemaId, - schema.databaseId, 0, null, ImmutableList.copyOf( partitionIds ), @@ -4104,7 +3836,6 @@ public void updatePartitionGroup( long partitionGroupId, List partitionIds partitionGroup.partitionGroupName, partitionGroup.tableId, partitionGroup.schemaId, - partitionGroup.databaseId, partitionGroup.partitionKey, partitionGroup.partitionQualifiers, ImmutableList.copyOf( partitionIds ), @@ -4173,7 +3904,6 @@ public void updatePartition( long partitionId, Long partitionGroupId ) { oldPartition.id, oldPartition.tableId, oldPartition.schemaId, - oldPartition.databaseId, oldPartition.partitionQualifiers, oldPartition.isUnbound, partitionGroupId @@ -4216,7 +3946,6 @@ public long addPartition( long tableId, long schemaId, long partitionGroupId, Li id, tableId, schemaId, - schema.databaseId, effectivePartitionQualifier, isUnbound, partitionGroupId ); @@ -4288,7 +4017,6 @@ public void partitionTable( long tableId, PartitionType partitionType, long part old.name, old.fieldIds, old.namespaceId, - old.databaseId, old.ownerId, old.entityType, old.primaryKey, @@ -4299,7 +4027,7 @@ public void partitionTable( long tableId, PartitionType partitionType, long part synchronized ( this ) { tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.databaseId, table.namespaceId, old.name }, table ); + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); if ( table.partitionProperty.reliesOnPeriodicChecks ) { addTableToPeriodicProcessing( tableId ); @@ -4344,7 +4072,6 @@ public void mergeTable( long tableId ) { old.name, old.fieldIds, old.namespaceId, - old.databaseId, old.ownerId, old.entityType, old.primaryKey, @@ -4355,7 +4082,7 @@ public void mergeTable( long tableId ) { synchronized ( this ) { tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.databaseId, table.namespaceId, old.name }, table ); + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); } listeners.firePropertyChange( "table", old, table ); } @@ -4373,7 +4100,6 @@ public void updateTablePartitionProperties( long tableId, PartitionProperty part old.name, old.fieldIds, old.namespaceId, - old.databaseId, old.ownerId, old.entityType, old.primaryKey, @@ -4384,7 +4110,7 @@ public void updateTablePartitionProperties( long tableId, PartitionProperty part synchronized ( this ) { tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.databaseId, table.namespaceId, old.name }, table ); + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); } listeners.firePropertyChange( "table", old, table ); @@ -4416,8 +4142,8 @@ public List getPartitionGroups( long tableId ) { * {@inheritDoc} */ @Override - public List getPartitionGroups( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { - List catalogEntities = getTables( databaseNamePattern, schemaNamePattern, tableNamePattern ); + public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { + List catalogEntities = getTables( schemaNamePattern, tableNamePattern ); Stream partitionGroupStream = Stream.of(); for ( LogicalTable catalogTable : catalogEntities ) { partitionGroupStream = Stream.concat( partitionGroupStream, getPartitionGroups( catalogTable.id ).stream() ); @@ -4451,8 +4177,8 @@ public List getPartitions( long partitionGroupId ) { * {@inheritDoc} */ @Override - public List getPartitions( Pattern databaseNamePattern, Pattern schemaNamePattern, Pattern tableNamePattern ) { - List catalogPartitionGroups = getPartitionGroups( databaseNamePattern, schemaNamePattern, tableNamePattern ); + public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { + List catalogPartitionGroups = getPartitionGroups( schemaNamePattern, tableNamePattern ); Stream partitionStream = Stream.of(); for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { partitionStream = Stream.concat( partitionStream, getPartitions( catalogPartitionGroup.id ).stream() ); @@ -4818,49 +4544,11 @@ public CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tab public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - LogicalTable newTable; - - if ( old.entityType == EntityType.MATERIALIZED_VIEW ) { - CatalogMaterializedView oldView = (CatalogMaterializedView) old; - newTable = new CatalogMaterializedView( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.databaseId, - old.ownerId, - old.entityType, - oldView.getQuery(), - old.primaryKey, - ImmutableList.copyOf( newDataPlacements ), - old.modifiable, - old.partitionProperty, - oldView.getAlgCollation(), - old.connectedViews, - oldView.getUnderlyingTables(), - oldView.getLanguage().getSerializedName(), - oldView.getMaterializedCriteria(), - oldView.isOrdered() - ); - } else { - newTable = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.databaseId, - old.ownerId, - old.entityType, - old.primaryKey, - ImmutableList.copyOf( newDataPlacements ), - old.modifiable, - old.partitionProperty, - old.connectedViews ); - } + LogicalTable newTable = old.withDataPlacements( ImmutableList.copyOf( newDataPlacements ) ); synchronized ( this ) { tables.replace( tableId, newTable ); - tableNames.replace( new Object[]{ newTable.databaseId, newTable.namespaceId, newTable.name }, newTable ); + tableNames.replace( new Object[]{ newTable.namespaceId, newTable.name }, newTable ); } } @@ -4927,7 +4615,7 @@ public long addGraphPlacement( int adapterId, long graphId ) { synchronized ( this ) { graphPlacements.put( new Object[]{ graph.id, adapterId }, placement ); graphs.replace( graph.id, graph ); - graphNames.replace( new Object[]{ old.databaseId, graph.name }, graph ); + graphNames.replace( new Object[]{ graph.name }, graph ); } listeners.firePropertyChange( "graphPlacement", null, placement ); return id; @@ -5071,7 +4759,7 @@ protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) @Override protected void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ) { LogicalTable old = getTable( tableId ); - List updatedPlacements = old.dataPlacements.stream().collect( Collectors.toList() ); + List updatedPlacements = new ArrayList<>( old.dataPlacements ); if ( updatedPlacements.contains( adapterId ) ) { updatedPlacements.remove( adapterId ); @@ -5116,7 +4804,7 @@ protected void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ) { CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - Set columnPlacementsOnAdapter = oldDataPlacement.columnPlacementsOnAdapter.stream().collect( Collectors.toSet() ); + Set columnPlacementsOnAdapter = new HashSet<>( oldDataPlacement.columnPlacementsOnAdapter ); columnPlacementsOnAdapter.removeAll( columnIds ); CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( @@ -5124,7 +4812,7 @@ protected void removeColumnsFromDataPlacement( int adapterId, long tableId, List oldDataPlacement.adapterId, oldDataPlacement.placementType, oldDataPlacement.dataPlacementRole, - ImmutableList.copyOf( columnPlacementsOnAdapter.stream().collect( Collectors.toList() ) ), + ImmutableList.copyOf( new ArrayList<>( columnPlacementsOnAdapter ) ), ImmutableList.copyOf( oldDataPlacement.getAllPartitionIds() ) ); @@ -5169,8 +4857,8 @@ protected void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ) { CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - Set partitionPlacementsOnAdapter = oldDataPlacement.getAllPartitionIds().stream().collect( Collectors.toSet() ); - partitionPlacementsOnAdapter.removeAll( partitionIds ); + Set partitionPlacementsOnAdapter = new HashSet<>( oldDataPlacement.getAllPartitionIds() ); + partitionIds.forEach( partitionPlacementsOnAdapter::remove ); CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( oldDataPlacement.tableId, @@ -5178,7 +4866,7 @@ protected void removePartitionsFromDataPlacement( int adapterId, long tableId, L oldDataPlacement.placementType, oldDataPlacement.dataPlacementRole, oldDataPlacement.columnPlacementsOnAdapter, - ImmutableList.copyOf( partitionPlacementsOnAdapter.stream().collect( Collectors.toList() ) ) ); + ImmutableList.copyOf( new ArrayList<>( partitionPlacementsOnAdapter ) ) ); modifyDataPlacement( adapterId, tableId, newDataPlacement ); @@ -5465,7 +5153,7 @@ private long addKey( long tableId, List columnIds, EnforcementTime enforce try { LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); long id = keyIdBuilder.getAndIncrement(); - CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, table.databaseId, columnIds, enforcementTime ); + CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); synchronized ( this ) { keys.put( id, key ); keyColumns.put( columnIds.stream().mapToLong( Long::longValue ).toArray(), id ); @@ -5510,16 +5198,13 @@ public void validate() throws GenericCatalogException { public void startCheck() { columns.forEach( ( key, column ) -> { - assert (databases.containsKey( column.databaseId )); - assert (Objects.requireNonNull( databaseChildren.get( column.databaseId ) ).contains( column.schemaId )); - assert (schemas.containsKey( column.schemaId )); assert (Objects.requireNonNull( schemaChildren.get( column.schemaId ) ).contains( column.tableId )); assert (tables.containsKey( column.tableId )); assert (Objects.requireNonNull( tableChildren.get( column.tableId ) ).contains( column.id )); - assert (columnNames.containsKey( new Object[]{ column.databaseId, column.schemaId, column.tableId, column.name } )); + assert (columnNames.containsKey( new Object[]{ column.schemaId, column.tableId, column.name } )); } ); columnPlacements.forEach( ( key, placement ) -> { diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java new file mode 100644 index 0000000000..682a113e06 --- /dev/null +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java @@ -0,0 +1,5218 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog; + + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.io.File; +import java.io.IOException; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.mapdb.BTreeMap; +import org.mapdb.DB; +import org.mapdb.DBException.SerializationError; +import org.mapdb.DBMaker; +import org.mapdb.HTreeMap; +import org.mapdb.Serializer; +import org.mapdb.serializer.SerializerArrayTuple; +import org.pf4j.Extension; +import org.polypheny.db.StatusService; +import org.polypheny.db.StatusService.ErrorConfig; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.entity.CatalogCollectionMapping; +import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogConstraint; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.CatalogDatabase; +import org.polypheny.db.catalog.entity.CatalogDefaultValue; +import org.polypheny.db.catalog.entity.CatalogForeignKey; +import org.polypheny.db.catalog.entity.CatalogGraphMapping; +import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; +import org.polypheny.db.catalog.entity.CatalogMaterializedView; +import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.CatalogPartitionGroup; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.CatalogQueryInterface; +import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.GraphAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; +import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.catalog.exceptions.UnknownAdapterIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownCollectionException; +import org.polypheny.db.catalog.exceptions.UnknownCollectionPlacementException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownColumnIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownColumnPlacementRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownConstraintException; +import org.polypheny.db.catalog.exceptions.UnknownDatabaseIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; +import org.polypheny.db.catalog.exceptions.UnknownGraphException; +import org.polypheny.db.catalog.exceptions.UnknownGraphPlacementsException; +import org.polypheny.db.catalog.exceptions.UnknownIndexException; +import org.polypheny.db.catalog.exceptions.UnknownIndexIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownKeyIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownPartitionGroupIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownPartitionPlacementException; +import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; +import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.exceptions.UnknownTableIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.exceptions.UnknownUserIdRuntimeException; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.iface.QueryInterfaceManager; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.partition.FrequencyMap; +import org.polypheny.db.partition.properties.PartitionProperty; +import org.polypheny.db.processing.ExtendedQueryParameters; +import org.polypheny.db.processing.Processor; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.PolyTypeFamily; +import org.polypheny.db.util.ImmutableIntList; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.PolyphenyHomeDirManager; +import org.polypheny.db.view.MaterializedViewManager; + +@Extension +@Slf4j +public class CatalogImplBackup extends Catalog { + + private static final String FILE_PATH = "mapDB"; + private static DB db; + + private static HTreeMap users; + private static HTreeMap userNames; + + private static BTreeMap databases; + private static BTreeMap databaseNames; + private static HTreeMap> databaseChildren; + + private static BTreeMap schemas; + private static BTreeMap schemaNames; + private static HTreeMap> schemaChildren; + + private static BTreeMap tables; + private static BTreeMap tableNames; + private static HTreeMap> tableChildren; + + private static BTreeMap collections; + private static BTreeMap collectionNames; + + private static BTreeMap collectionPlacements; + + private static BTreeMap documentMappings; + + private static BTreeMap columns; + private static BTreeMap columnNames; + private static BTreeMap columnPlacements; + + private static HTreeMap adapters; + private static HTreeMap adapterNames; + + private static HTreeMap queryInterfaces; + private static HTreeMap queryInterfaceNames; + + private static HTreeMap keys; + private static HTreeMap keyColumns; + + private static HTreeMap primaryKeys; + private static HTreeMap foreignKeys; + private static HTreeMap constraints; + private static HTreeMap indexes; + + private static BTreeMap partitionGroups; + private static BTreeMap partitions; + private static BTreeMap partitionPlacements; // (AdapterId, Partition) + + // Container Object that contains all other placements + private static BTreeMap dataPlacements; // (AdapterId, TableId) -> CatalogDataPlacement + + private static BTreeMap graphs; + private static BTreeMap graphAliases; + private static BTreeMap graphNames; + private static BTreeMap graphPlacements; + + private static BTreeMap graphMappings; + + private static Long openTable; + + private static final AtomicInteger adapterIdBuilder = new AtomicInteger( 1 ); + private static final AtomicInteger queryInterfaceIdBuilder = new AtomicInteger( 1 ); + private static final AtomicInteger userIdBuilder = new AtomicInteger( 1 ); + + private static final AtomicLong databaseIdBuilder = new AtomicLong( 1 ); + private static final AtomicLong namespaceIdBuilder = new AtomicLong( 1 ); + private static final AtomicLong entityIdBuilder = new AtomicLong( 1 ); + private static final AtomicLong columnIdBuilder = new AtomicLong( 1 ); + + private static final AtomicLong partitionGroupIdBuilder = new AtomicLong( 1 ); + private static final AtomicLong partitionIdBuilder = new AtomicLong( 1000 ); + + private static final AtomicLong keyIdBuilder = new AtomicLong( 1 ); + private static final AtomicLong constraintIdBuilder = new AtomicLong( 1 ); + private static final AtomicLong indexIdBuilder = new AtomicLong( 1 ); + private static final AtomicLong foreignKeyIdBuilder = new AtomicLong( 1 ); + + private static final AtomicLong physicalPositionBuilder = new AtomicLong(); + + private static Set frequencyDependentTables = new HashSet<>(); // All tables to consider for periodic processing + + // Keeps a list of all tableIDs which are going to be deleted. This is required to avoid constraints when recursively + // removing a table and all placements and partitions. Otherwise **validatePartitionDistribution()** inside the Catalog + // would throw an error. + private static final List tablesFlaggedForDeletion = new ArrayList<>(); + + Comparator columnComparator = Comparator.comparingInt( o -> o.position ); + + // {@link AlgNode} used to create view and materialized view + @Getter + private final Map nodeInfo = new HashMap<>(); + + + public CatalogImplBackup() { + this( FILE_PATH, true, true, false ); + } + + + /** + * Creates a new catalog after the given parameters + * + * @param fileName name of persistent catalog file + * @param doInitSchema if the default schema is initiated + * @param doInitInformationPage if a new informationPage should be created + * @param deleteAfter if the file is deleted when the catalog is closed + */ + public CatalogImplBackup( String fileName, boolean doInitSchema, boolean doInitInformationPage, boolean deleteAfter ) { + super(); + + if ( db != null ) { + db.close(); + } + synchronized ( this ) { + + if ( Catalog.memoryCatalog || Catalog.testMode ) { + isPersistent = false; + } else { + isPersistent = isPersistent(); + } + + if ( isPersistent ) { + StatusService.printInfo( "Making the catalog persistent." ); + File folder = PolyphenyHomeDirManager.getInstance().registerNewFolder( "catalog" ); + + if ( Catalog.resetCatalog ) { + StatusService.printInfo( "Resetting catalog on startup." ); + if ( new File( folder, fileName ).exists() ) { + //noinspection ResultOfMethodCallIgnored + new File( folder, fileName ).delete(); + } + } + + if ( !deleteAfter ) { + db = DBMaker + .fileDB( new File( folder, fileName ) ) + .closeOnJvmShutdown() + .transactionEnable() + .fileMmapEnableIfSupported() + .fileMmapPreclearDisable() + .make(); + } else { + db = DBMaker + .fileDB( new File( folder, fileName ) ) + .closeOnJvmShutdown() + .fileDeleteAfterClose() + .transactionEnable() + .fileMmapEnableIfSupported() + .fileMmapPreclearDisable() + .make(); + } + db.getStore().fileLoad(); + + } else { + StatusService.printInfo( "Making the catalog in-memory." ); + db = DBMaker + .memoryDB() + .transactionEnable() + .closeOnJvmShutdown() + .make(); + } + + initDBLayout( db ); + + // mirrors default data from old sql file + restoreAllIdBuilders(); + try { + + if ( doInitSchema ) { + insertDefaultData(); + } + + } catch ( GenericCatalogException | UnknownUserException | UnknownTableException | + UnknownSchemaException | UnknownAdapterException | UnknownColumnException e ) { + throw new RuntimeException( e ); + } + if ( doInitInformationPage ) { + new CatalogInfoPage( this ); + } + + new CatalogValidator().startCheck(); + } + } + + + @Override + public void commit() throws NoTablePrimaryKeyException { + if ( openTable != null ) { + throw new NoTablePrimaryKeyException(); + } + db.commit(); + } + + + @Override + public void rollback() { + db.rollback(); + } + + + /** + * Checks if a file can be created on the system, accessed and changed + * + * @return if it was possible + */ + private boolean isPersistent() { + File file = PolyphenyHomeDirManager.getInstance().registerNewFile( "testfile" ); + try { + if ( !file.exists() ) { + boolean res = file.createNewFile(); + if ( !res ) { + return false; + } + } + } catch ( IOException e ) { + return false; + } + if ( !file.canRead() || !file.canWrite() ) { + return false; + } + file.delete(); + + return true; + } + + + /** + * Initializes the default catalog layout + * + * @param db the databases object on which the layout is created + */ + private void initDBLayout( DB db ) { + try { + initUserInfo( db ); + initDatabaseInfo( db ); + initSchemaInfo( db ); + initTableInfo( db ); + initGraphInfo( db ); + initDocumentInfo( db ); + initColumnInfo( db ); + initKeysAndConstraintsInfo( db ); + initAdapterInfo( db ); + initQueryInterfaceInfo( db ); + } catch ( SerializationError e ) { + log.error( "!!!!!!!!!!! Error while restoring the catalog !!!!!!!!!!!" ); + log.error( "This usually means that there have been changes to the internal structure of the catalog with the last update of Polypheny-DB." ); + log.error( "To fix this, you must reset the catalog. To do this, please start Polypheny-DB once with the argument \"-resetCatalog\"." ); + StatusService.printError( + "Unsupported version of catalog! Unable to restore the schema.", + ErrorConfig.builder().func( ErrorConfig.DO_NOTHING ).doExit( true ).showButton( true ).buttonMessage( "Exit" ).build() ); + } + } + + + @Override + public void restoreColumnPlacements( Transaction transaction ) { + AdapterManager manager = AdapterManager.getInstance(); + + Map> restoredTables = new HashMap<>(); + + for ( LogicalColumn c : columns.values() ) { + List placements = getColumnPlacement( c.id ); + LogicalTable catalogTable = getTable( c.tableId ); + + // No column placements need to be restored if it is a view + if ( catalogTable.entityType != EntityType.VIEW ) { + if ( placements.size() == 0 ) { + // No placements shouldn't happen + throw new RuntimeException( "There seems to be no placement for the column with the id " + c.id ); + } else if ( placements.size() == 1 ) { + Adapter adapter = manager.getAdapter( placements.get( 0 ).adapterId ); + if ( adapter instanceof DataStore ) { + DataStore store = (DataStore) adapter; + if ( !store.isPersistent() ) { + + // TODO only full placements atm here + + if ( !restoredTables.containsKey( store.getAdapterId() ) ) { + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, null ); + restoredTables.put( store.getAdapterId(), Collections.singletonList( catalogTable.id ) ); + + } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( catalogTable.id )) ) { + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, null ); + List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); + ids.add( catalogTable.id ); + restoredTables.put( store.getAdapterId(), ids ); + } + } + } + } else { + Map persistent = placements.stream().collect( Collectors.toMap( p -> p.adapterId, p -> manager.getStore( p.adapterId ).isPersistent() ) ); + + if ( !persistent.containsValue( true ) ) { // no persistent placement for this column + LogicalTable table = getTable( c.tableId ); + for ( CatalogColumnPlacement p : placements ) { + DataStore store = manager.getStore( p.adapterId ); + + if ( !restoredTables.containsKey( store.getAdapterId() ) ) { + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, null ); + List ids = new ArrayList<>(); + ids.add( table.id ); + restoredTables.put( store.getAdapterId(), ids ); + + } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( table.id )) ) { + store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, null ); + List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); + ids.add( table.id ); + restoredTables.put( store.getAdapterId(), ids ); + } + } + } else if ( persistent.containsValue( true ) && persistent.containsValue( false ) ) { + // TODO DL change so column gets copied + for ( Entry p : persistent.entrySet() ) { + if ( !p.getValue() ) { + deleteColumnPlacement( p.getKey(), c.id, false ); + } + } + } + } + } + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void restoreViews( Transaction transaction ) { + Statement statement = transaction.createStatement(); + + for ( LogicalTable c : tables.values() ) { + if ( c.entityType == EntityType.VIEW || c.entityType == EntityType.MATERIALIZED_VIEW ) { + String query; + QueryLanguage language; + if ( c.entityType == EntityType.VIEW ) { + query = ((CatalogView) c).getQuery(); + language = ((CatalogView) c).getLanguage(); + } else { + query = ((CatalogMaterializedView) c).getQuery(); + language = ((CatalogMaterializedView) c).getLanguage(); + } + + switch ( language.getSerializedName() ) { + case "sql": + Processor sqlProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "rel" ) ); + Node sqlNode = sqlProcessor.parse( query ).get( 0 ); + AlgRoot algRoot = sqlProcessor.translate( + statement, + sqlProcessor.validate( statement.getTransaction(), sqlNode, RuntimeConfig.ADD_DEFAULT_VALUES_IN_INSERTS.getBoolean() ).left, + new QueryParameters( query, c.getNamespaceType() ) ); + nodeInfo.put( c.id, algRoot.alg ); + break; + + case "rel": + Processor jsonRelProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "rel" ) ); + AlgNode result = jsonRelProcessor.translate( statement, null, new QueryParameters( query, c.getNamespaceType() ) ).alg; + + final AlgDataType rowType = result.getRowType(); + final List> fields = Pair.zip( ImmutableIntList.identity( rowType.getFieldCount() ), rowType.getFieldNames() ); + final AlgCollation collation = + result instanceof Sort + ? ((Sort) result).collation + : AlgCollations.EMPTY; + AlgRoot root = new AlgRoot( result, result.getRowType(), Kind.SELECT, fields, collation ); + + nodeInfo.put( c.id, root.alg ); + break; + + case "mongo": + Processor mqlProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "mongo" ) ); + Node mqlNode = mqlProcessor.parse( query ).get( 0 ); + + AlgRoot mqlRel = mqlProcessor.translate( + statement, + mqlNode, + new ExtendedQueryParameters( query, NamespaceType.DOCUMENT, getSchema( defaultDatabaseId ).name ) ); + nodeInfo.put( c.id, mqlRel.alg ); + break; + } + if ( c.entityType == EntityType.MATERIALIZED_VIEW ) { + log.info( "Updating materialized view: {}", c.getNamespaceName() + "." + c.name ); + MaterializedViewManager materializedManager = MaterializedViewManager.getInstance(); + materializedManager.addMaterializedInfo( c.id, ((CatalogMaterializedView) c).getMaterializedCriteria() ); + materializedManager.updateData( statement.getTransaction(), c.id ); + materializedManager.updateMaterializedTime( c.id ); + } + } + } + } + + + /** + * Sets the idBuilder for a given map to the new starting position + * + * @param map the map to which the idBuilder belongs + * @param idBuilder which is creates new unique ids + */ + private void restoreIdBuilder( Map map, AtomicInteger idBuilder ) { + if ( !map.isEmpty() ) { + idBuilder.set( Collections.max( map.keySet() ) + 1 ); + } + } + + + private void restoreIdBuilder( Map map, AtomicLong idBuilder ) { + if ( !map.isEmpty() ) { + idBuilder.set( Collections.max( map.keySet() ) + 1 ); + } + } + + + private void restoreAllIdBuilders() { + restoreIdBuilder( schemas, namespaceIdBuilder ); + restoreIdBuilder( databases, databaseIdBuilder ); + restoreIdBuilder( tables, entityIdBuilder ); + restoreIdBuilder( columns, columnIdBuilder ); + restoreIdBuilder( users, userIdBuilder ); + restoreIdBuilder( keys, keyIdBuilder ); + restoreIdBuilder( constraints, columnIdBuilder ); + restoreIdBuilder( indexes, indexIdBuilder ); + restoreIdBuilder( adapters, adapterIdBuilder ); + restoreIdBuilder( queryInterfaces, queryInterfaceIdBuilder ); + restoreIdBuilder( foreignKeys, foreignKeyIdBuilder ); + restoreIdBuilder( partitionGroups, partitionGroupIdBuilder ); + restoreIdBuilder( partitions, partitionIdBuilder ); + + // Restore physical position builder + if ( columnPlacements.size() > 0 ) { + long highestPosition = 0; + for ( CatalogColumnPlacement placement : columnPlacements.values() ) { + if ( placement.physicalPosition > highestPosition ) { + highestPosition = placement.physicalPosition; + } + } + physicalPositionBuilder.set( highestPosition + 1 ); + } + } + + + /** + * Initiates all needed maps for adapters + * + * adapters: adapterId {@code ->} CatalogAdapter + * adapterName: adapterName {@code ->} CatalogAdapter + */ + private void initAdapterInfo( DB db ) { + adapters = db.hashMap( "adapters", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); + adapterNames = db.hashMap( "adapterNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); + } + + + /** + * Initiates all needed maps for query interfaces + * + * queryInterfaces: ifaceId CatalogQueryInterface + * queryInterfaceNames: ifaceName CatalogQueryInterface + */ + private void initQueryInterfaceInfo( DB db ) { + queryInterfaces = db.hashMap( "queryInterfaces", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); + queryInterfaceNames = db.hashMap( "queryInterfaceNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); + } + + + /** + * Creates all needed maps for keys and constraints + * + * keyColumns: [columnId1, columnId2,...] keyId + * keys: keyId CatalogKey + * primaryKeys: keyId CatalogPrimaryKey + * foreignKeys: keyId CatalogForeignKey + * constraints: constraintId CatalogConstraint + * indexes: indexId {@code ->} CatalogIndex + */ + private void initKeysAndConstraintsInfo( DB db ) { + keyColumns = db.hashMap( "keyColumns", Serializer.LONG_ARRAY, Serializer.LONG ).createOrOpen(); + keys = db.hashMap( "keys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); + primaryKeys = db.hashMap( "primaryKeys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); + foreignKeys = db.hashMap( "foreignKeys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); + constraints = db.hashMap( "constraints", Serializer.LONG, new GenericSerializer() ).createOrOpen(); + indexes = db.hashMap( "indexes", Serializer.LONG, new GenericSerializer() ).createOrOpen(); + } + + + /** + * Creates all needed maps for users + * + * users: userId {@code ->} CatalogUser + * userNames: name {@code ->} CatalogUser + */ + private void initUserInfo( DB db ) { + users = db.hashMap( "users", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); + userNames = db.hashMap( "usersNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); + } + + + /** + * Initialize the column maps + * + * columns: columnId {@code ->} CatalogColumn + * columnNames: new Object[]{databaseId, schemaId, tableId, columnName} {@code ->} CatalogColumn + * columnPlacements: new Object[]{adapterId, columnId} {@code ->} CatalogPlacement + */ + private void initColumnInfo( DB db ) { + //noinspection unchecked + columns = db.treeMap( "columns", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + //noinspection unchecked + columnNames = db.treeMap( "columnNames", new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); + //noinspection unchecked + columnPlacements = db.treeMap( "columnPlacement", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); + } + + + /** + * Creates all maps needed for tables + * + * tables: tableId {@code ->} CatalogEntity + * tableChildren: tableId {@code ->} [columnId, columnId,..] + * tableNames: new Object[]{databaseId, schemaId, tableName} {@code ->} CatalogEntity + */ + private void initTableInfo( DB db ) { + //noinspection unchecked + tables = db.treeMap( "tables", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + tableChildren = db.hashMap( "tableChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); + //noinspection unchecked + tableNames = db.treeMap( "tableNames" ) + .keySerializer( new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.STRING ) ) + .valueSerializer( Serializer.JAVA ) + .createOrOpen(); + dataPlacements = db.treeMap( "dataPlacement", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); + partitionGroups = db.treeMap( "partitionGroups", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + partitions = db.treeMap( "partitions", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + + partitionPlacements = db.treeMap( "partitionPlacements", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); + + // Restores all Tables dependent on periodic checks like TEMPERATURE Partitioning + frequencyDependentTables = tables.values().stream().filter( t -> t.partitionProperty.reliesOnPeriodicChecks ).map( t -> t.id ).collect( Collectors.toSet() ); + } + + + @SuppressWarnings("unchecked") + private void initGraphInfo( DB db ) { + graphs = db.treeMap( "graphs", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + graphNames = db.treeMap( "graphNames", new SerializerArrayTuple( Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); + graphPlacements = db.treeMap( "graphPlacements", new SerializerArrayTuple( Serializer.LONG, Serializer.INTEGER ), Serializer.JAVA ).createOrOpen(); + + graphMappings = db.treeMap( "graphMappings", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + graphAliases = db.treeMap( "graphAliases", Serializer.STRING, Serializer.JAVA ).createOrOpen(); + } + + + @SuppressWarnings("unchecked") + private void initDocumentInfo( DB db ) { + collections = db.treeMap( "collections", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + collectionNames = db.treeMap( "collectionNames", new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); + + documentMappings = db.treeMap( "documentMappings", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + + collectionPlacements = db.treeMap( "collectionPlacements", new SerializerArrayTuple( Serializer.LONG, Serializer.INTEGER ), Serializer.JAVA ).createOrOpen(); + } + + + /** + * Creates all needed maps for schemas + * + * schemas: schemaId {@code ->} CatalogNamespace + * schemaChildren: schemaId {@code ->} [tableId, tableId, etc] + * schemaNames: new Object[]{databaseId, schemaName} {@code ->} CatalogNamespace + */ + private void initSchemaInfo( DB db ) { + //noinspection unchecked + schemas = db.treeMap( "schemas", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + schemaChildren = db.hashMap( "schemaChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); + //noinspection unchecked + schemaNames = db.treeMap( "schemaNames", new SerializerArrayTuple( Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); + } + + + /** + * Creates maps for databases + * + * databases: databaseId {@code ->} CatalogDatabase + * databaseNames: databaseName {@code ->} CatalogDatabase + * databaseChildren: databaseId {@code ->} [tableId, tableId,...] + */ + private void initDatabaseInfo( DB db ) { + //noinspection unchecked + databases = db.treeMap( "databases", Serializer.LONG, Serializer.JAVA ).createOrOpen(); + //noinspection unchecked + databaseNames = db.treeMap( "databaseNames", Serializer.STRING, Serializer.JAVA ).createOrOpen(); + databaseChildren = db.hashMap( "databaseChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); + } + + + /** + * Fills the catalog database with default data, skips if data is already inserted + */ + private void insertDefaultData() throws GenericCatalogException, UnknownUserException, UnknownTableException, UnknownSchemaException, UnknownAdapterException, UnknownColumnException { + + ////////////// + // init users + int systemId; + if ( !userNames.containsKey( "system" ) ) { + systemId = addUser( "system", "" ); + } else { + systemId = getUser( "system" ).id; + } + + if ( !userNames.containsKey( "pa" ) ) { + addUser( "pa", "" ); + } + Catalog.defaultUserId = systemId; + + ////////////// + // init schema + + long schemaId; + if ( !schemaNames.containsKey( new Object[]{ "public" } ) ) { + schemaId = addNamespace( "public", 1, NamespaceType.getDefault() ); + } else { + schemaId = getSchema( "public" ).id; + } + + ////////////// + // init adapters + if ( adapterNames.size() == 0 ) { + // Deploy default store + addAdapter( "hsqldb", defaultStore.getAdapterName(), AdapterType.STORE, defaultStore.getDefaultSettings() ); + + // Deploy default CSV view + addAdapter( "hr", defaultSource.getAdapterName(), AdapterType.SOURCE, defaultSource.getDefaultSettings() ); + + // init schema + CatalogAdapter csv = getAdapter( "hr" ); + if ( !testMode ) { + if ( !tableNames.containsKey( new Object[]{ schemaId, "depts" } ) ) { + addTable( "depts", schemaId, systemId, EntityType.SOURCE, false ); + } + if ( !tableNames.containsKey( new Object[]{ schemaId, "emps" } ) ) { + addTable( "emps", schemaId, systemId, EntityType.SOURCE, false ); + } + if ( !tableNames.containsKey( new Object[]{ schemaId, "emp" } ) ) { + addTable( "emp", schemaId, systemId, EntityType.SOURCE, false ); + } + if ( !tableNames.containsKey( new Object[]{ schemaId, "work" } ) ) { + addTable( "work", schemaId, systemId, EntityType.SOURCE, false ); + addDefaultCsvColumns( csv ); + } + } + } + + try { + commit(); + } catch ( NoTablePrimaryKeyException e ) { + throw new RuntimeException( e ); + } + + } + + + @Override + public void restoreInterfacesIfNecessary() { + //////////////////////// + // init query interfaces + if ( queryInterfaceNames.size() == 0 ) { + QueryInterfaceManager.getREGISTER().values().forEach( i -> addQueryInterface( i.interfaceName, i.clazz.getName(), i.defaultSettings ) ); + + try { + commit(); + } catch ( NoTablePrimaryKeyException e ) { + throw new RuntimeException( e ); + } + } + } + + + /** + * Initiates default columns for csv files + */ + private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaException, UnknownTableException, GenericCatalogException, UnknownColumnException { + CatalogSchema schema = getSchema( "public" ); + LogicalTable depts = getTable( schema.id, "depts" ); + + addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); + + LogicalTable emps = getTable( schema.id, "emps" ); + addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); + addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); + addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); + + LogicalTable emp = getTable( schema.id, "emp" ); + addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); + addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 ); + addDefaultCsvColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 ); + addDefaultCsvColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); + addDefaultCsvColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null ); + addDefaultCsvColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); + addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); + addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); + + LogicalTable work = getTable( schema.id, "work" ); + addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); + addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null ); + addDefaultCsvColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 ); + addDefaultCsvColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); + addDefaultCsvColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 ); + addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); + addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); + + // set all needed primary keys + addPrimaryKey( depts.id, Collections.singletonList( getColumn( depts.id, "deptno" ).id ) ); + addPrimaryKey( emps.id, Collections.singletonList( getColumn( emps.id, "empid" ).id ) ); + addPrimaryKey( emp.id, Collections.singletonList( getColumn( emp.id, "employeeno" ).id ) ); + addPrimaryKey( work.id, Collections.singletonList( getColumn( work.id, "employeeno" ).id ) ); + + // set foreign keys + addForeignKey( + emps.id, + ImmutableList.of( getColumn( emps.id, "deptno" ).id ), + depts.id, + ImmutableList.of( getColumn( depts.id, "deptno" ).id ), + "fk_emps_depts", + ForeignKeyOption.NONE, + ForeignKeyOption.NONE ); + addForeignKey( + work.id, + ImmutableList.of( getColumn( work.id, "employeeno" ).id ), + emp.id, + ImmutableList.of( getColumn( emp.id, "employeeno" ).id ), + "fk_work_emp", + ForeignKeyOption.NONE, + ForeignKeyOption.NONE ); + } + + + private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { + if ( !checkIfExistsColumn( table.id, name ) ) { + long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); + String filename = table.name + ".csv"; + if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { + filename += ".gz"; + } + + addColumnPlacement( csv.id, colId, PlacementType.AUTOMATIC, filename, table.name, name ); + updateColumnPlacementPhysicalPosition( csv.id, colId, position ); + + long partitionId = table.partitionProperty.partitionIds.get( 0 ); + addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, filename, table.name, DataPlacementRole.UPTODATE ); + } + } + + + private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { + if ( !checkIfExistsColumn( table.id, name ) ) { + long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); + addColumnPlacement( adapter.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name ); + updateColumnPlacementPhysicalPosition( adapter.id, colId, position ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void validateColumns() { + CatalogValidator validator = new CatalogValidator(); + db.rollback(); + try { + validator.validate(); + } catch ( GenericCatalogException e ) { + throw new RuntimeException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void close() { + db.close(); + } + + + /** + * {@inheritDoc} + */ + @Override + public void clear() { + db.getAll().clear(); + initDBLayout( db ); + restoreAllIdBuilders(); + } + + + @Override + public Snapshot getSnapshot( long id ) { + return null; + } + + + /** + * {@inheritDoc} + */ + public long addDatabase( String name, int ownerId, String ownerName, long defaultSchemaId, String defaultSchemaName ) { + long id = databaseIdBuilder.getAndIncrement(); + CatalogDatabase database = new CatalogDatabase( id, name, ownerId, ownerName, defaultSchemaId, defaultSchemaName ); + synchronized ( this ) { + databases.put( id, database ); + databaseNames.put( name, database ); + databaseChildren.put( id, ImmutableList.builder().build() ); + } + listeners.firePropertyChange( "database", null, database ); + return id; + } + + + /** + * {@inheritDoc} + */ + public void deleteDatabase( long databaseId ) { + CatalogDatabase database = getDatabase( databaseId ); + if ( database != null ) { + synchronized ( this ) { + databases.remove( databaseId ); + databaseNames.remove( database.name ); + databaseChildren.remove( databaseId ); + } + } + } + + + /** + * {@inheritDoc} + */ + @Override + public int addUser( String name, String password ) { + CatalogUser user = new CatalogUser( userIdBuilder.getAndIncrement(), name, password ); + synchronized ( this ) { + users.put( user.id, user ); + userNames.put( user.name, user ); + } + listeners.firePropertyChange( "user", null, user ); + return user.id; + } + + + /** + * {@inheritDoc} + */ + public List getDatabases( Pattern pattern ) { + if ( pattern != null ) { + if ( pattern.containsWildcards ) { + return databaseNames.entrySet().stream().filter( e -> e.getKey().matches( pattern.toRegex() ) ).map( Entry::getValue ).sorted().collect( Collectors.toList() ); + } else { + if ( databaseNames.containsKey( pattern.pattern ) ) { + return Collections.singletonList( databaseNames.get( pattern.pattern ) ); + } else { + return new ArrayList<>(); + } + } + } else { + return new ArrayList<>( databases.values() ); + } + } + + + /** + * {@inheritDoc} + */ + private CatalogDatabase getDatabase( long databaseId ) { + try { + return Objects.requireNonNull( databases.get( databaseId ) ); + } catch ( NullPointerException e ) { + throw new UnknownDatabaseIdRuntimeException( databaseId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getSchemas( Pattern schemaNamePattern ) { + if ( schemaNamePattern != null ) { + return schemaNames.values().stream().filter( s -> s.name.matches( schemaNamePattern.toRegex() ) ).collect( Collectors.toList() ); + } + return new ArrayList<>(); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogSchema getSchema( long schemaId ) { + try { + return Objects.requireNonNull( schemas.get( schemaId ) ); + } catch ( NullPointerException e ) { + throw new UnknownSchemaIdRuntimeException( schemaId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogSchema getSchema( final String schemaName ) throws UnknownSchemaException { + String name = schemaName.toLowerCase(); + try { + return Objects.requireNonNull( schemaNames.get( new Object[]{ name } ) ); + } catch ( NullPointerException e ) { + throw new UnknownSchemaException( schemaName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public long addNamespace( String name, int ownerId, NamespaceType namespaceType ) { + name = name.toLowerCase(); + CatalogUser owner = getUser( ownerId ); + long id = namespaceIdBuilder.getAndIncrement(); + CatalogSchema schema = new CatalogSchema( id, name, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); + synchronized ( this ) { + schemas.put( id, schema ); + schemaNames.put( new Object[]{ name }, schema ); + schemaChildren.put( id, ImmutableList.builder().build() ); + } + listeners.firePropertyChange( "namespace", null, schema ); + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean checkIfExistsSchema( String schemaName ) { + schemaName = schemaName.toLowerCase(); + return schemaNames.containsKey( new Object[]{ schemaName } ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void renameSchema( long schemaId, String name ) { + name = name.toLowerCase(); + try { + CatalogSchema old = Objects.requireNonNull( schemas.get( schemaId ) ); + CatalogSchema schema = new CatalogSchema( old.id, name, old.ownerId, old.ownerName, old.namespaceType, false ); + + synchronized ( this ) { + schemas.replace( schemaId, schema ); + schemaNames.remove( new Object[]{ old.name } ); + schemaNames.put( new Object[]{ name }, schema ); + } + listeners.firePropertyChange( "schema", old, schema ); + } catch ( NullPointerException e ) { + throw new UnknownSchemaIdRuntimeException( schemaId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void setSchemaOwner( long schemaId, long ownerId ) { + try { + CatalogSchema old = Objects.requireNonNull( schemas.get( schemaId ) ); + CatalogSchema schema = new CatalogSchema( old.id, old.name, (int) ownerId, old.ownerName, old.namespaceType, false ); + synchronized ( this ) { + schemas.replace( schemaId, schema ); + schemaNames.replace( new Object[]{ schema.name }, schema ); + } + listeners.firePropertyChange( "schema", old, schema ); + } catch ( NullPointerException e ) { + throw new UnknownSchemaIdRuntimeException( schemaId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { + if ( getGraphs( new Pattern( name ) ).size() != 0 && !ifNotExists ) { + throw new GraphAlreadyExistsException( name ); + } + + long id = addNamespace( name, Catalog.defaultUserId, NamespaceType.GRAPH ); + + LogicalGraph graph = new LogicalGraph( id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); + + synchronized ( this ) { + graphs.put( id, graph ); + graphNames.put( new Object[]{ name }, graph ); + } + + listeners.firePropertyChange( "graph", null, graph ); + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { + LogicalGraph graph = Objects.requireNonNull( getGraph( graphId ) ); + + if ( graphAliases.containsKey( alias ) ) { + if ( !ifNotExists ) { + throw new RuntimeException( "Error while creating alias: " + alias ); + } + return; + } + + synchronized ( this ) { + graphAliases.put( alias, graph ); + } + listeners.firePropertyChange( "graphAlias", null, alias ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void removeGraphAlias( long graphId, String alias, boolean ifExists ) { + if ( !graphAliases.containsKey( alias ) ) { + if ( !ifExists ) { + throw new RuntimeException( "Error while removing alias: " + alias ); + } + return; + } + synchronized ( this ) { + graphAliases.remove( alias ); + } + listeners.firePropertyChange( "graphAlias", alias, null ); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogGraphMapping getGraphMapping( long graphId ) { + return Objects.requireNonNull( graphMappings.get( graphId ) ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException { + /// --- nodes + // table id nodes -> id, node, labels + long nodesId; + if ( !onlyPlacement ) { + nodesId = addTable( "_nodes_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + } else { + nodesId = getTable( id, "_nodes_" ).id; + } + + stores.forEach( store -> addDataPlacement( store.getAdapterId(), nodesId ) ); + + long idNodeId; + long labelNodeId; + if ( !onlyPlacement ) { + idNodeId = addColumn( "_id_", nodesId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); + labelNodeId = addColumn( "_label_", nodesId, 1, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); + } else { + idNodeId = getColumn( nodesId, "_id_" ).id; + labelNodeId = getColumn( nodesId, "_label_" ).id; + } + + for ( DataStore s : stores ) { + addColumnPlacement( + s.getAdapterId(), + idNodeId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + s.getAdapterId(), + labelNodeId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + } + + if ( !onlyPlacement ) { + addPrimaryKey( nodesId, List.of( idNodeId, labelNodeId ) ); + } + + /// --- node properties + + // table id nodes -> id, node, labels + long nodesPropertyId; + if ( !onlyPlacement ) { + nodesPropertyId = addTable( "_n_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + } else { + nodesPropertyId = getTable( id, "_n_properties_" ).id; + } + + stores.forEach( store -> addDataPlacement( store.getAdapterId(), nodesPropertyId ) ); + + long idNodesPropertyId; + long keyNodePropertyId; + long valueNodePropertyId; + + if ( !onlyPlacement ) { + idNodesPropertyId = addColumn( "_id_", nodesPropertyId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); + keyNodePropertyId = addColumn( "_key_", nodesPropertyId, 1, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); + valueNodePropertyId = addColumn( "_value_", nodesPropertyId, 2, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); + } else { + idNodesPropertyId = getColumn( nodesPropertyId, "_id_" ).id; + keyNodePropertyId = getColumn( nodesPropertyId, "_key_" ).id; + valueNodePropertyId = getColumn( nodesPropertyId, "_value_" ).id; + } + + for ( DataStore s : stores ) { + addColumnPlacement( + s.getAdapterId(), + idNodesPropertyId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + s.getAdapterId(), + keyNodePropertyId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + s.getAdapterId(), + valueNodePropertyId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + } + + if ( !onlyPlacement ) { + addPrimaryKey( nodesPropertyId, List.of( idNodesPropertyId, keyNodePropertyId ) ); + } + + /// --- edges + + // table id relationships -> id, rel, labels + long edgesId; + if ( !onlyPlacement ) { + edgesId = addTable( "_edges_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + } else { + edgesId = getTable( id, "_edges_" ).id; + } + + stores.forEach( store -> addDataPlacement( store.getAdapterId(), edgesId ) ); + + long idEdgeId; + long labelEdgeId; + long sourceEdgeId; + long targetEdgeId; + + if ( !onlyPlacement ) { + idEdgeId = addColumn( + "_id_", + edgesId, + 0, + PolyType.VARCHAR, + null, + 36, + null, + null, + null, + false, + Collation.getDefaultCollation() ); + labelEdgeId = addColumn( + "_label_", + edgesId, + 1, + PolyType.VARCHAR, + null, + 255, + null, + null, + null, + false, + Collation.getDefaultCollation() ); + sourceEdgeId = addColumn( + "_l_id_", + edgesId, + 2, + PolyType.VARCHAR, + null, + 36, + null, + null, + null, + false, + Collation.getDefaultCollation() ); + targetEdgeId = addColumn( + "_r_id_", + edgesId, + 3, + PolyType.VARCHAR, + null, + 36, + null, + null, + null, + false, + Collation.getDefaultCollation() ); + } else { + idEdgeId = getColumn( edgesId, "_id_" ).id; + labelEdgeId = getColumn( edgesId, "_label_" ).id; + sourceEdgeId = getColumn( edgesId, "_l_id_" ).id; + targetEdgeId = getColumn( edgesId, "_r_id_" ).id; + } + + for ( DataStore store : stores ) { + addColumnPlacement( + store.getAdapterId(), + idEdgeId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + store.getAdapterId(), + labelEdgeId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + store.getAdapterId(), + sourceEdgeId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + store.getAdapterId(), + targetEdgeId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + } + + if ( !onlyPlacement ) { + addPrimaryKey( edgesId, Collections.singletonList( idEdgeId ) ); + } + + /// --- edge properties + + // table id nodes -> id, node, labels + long edgesPropertyId; + if ( !onlyPlacement ) { + edgesPropertyId = addTable( "_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + } else { + edgesPropertyId = getTable( id, "_properties_" ).id; + } + + stores.forEach( store -> addDataPlacement( store.getAdapterId(), edgesPropertyId ) ); + + long idEdgePropertyId; + long keyEdgePropertyId; + long valueEdgePropertyId; + + if ( !onlyPlacement ) { + idEdgePropertyId = addColumn( + "_id_", + edgesPropertyId, + 0, + PolyType.VARCHAR, + null, + 255, + null, + null, + null, + false, + Collation.getDefaultCollation() ); + keyEdgePropertyId = addColumn( + "_key_", + edgesPropertyId, + 1, + PolyType.VARCHAR, + null, + 255, + null, + null, + null, + false, + Collation.getDefaultCollation() ); + valueEdgePropertyId = addColumn( + "_value_", + edgesPropertyId, + 2, + PolyType.VARCHAR, + null, + 255, + null, + null, + null, + false, + Collation.getDefaultCollation() ); + } else { + idEdgePropertyId = getColumn( edgesPropertyId, "_id_" ).id; + keyEdgePropertyId = getColumn( edgesPropertyId, "_key_" ).id; + valueEdgePropertyId = getColumn( edgesPropertyId, "_value_" ).id; + } + + for ( DataStore s : stores ) { + addColumnPlacement( + s.getAdapterId(), + idEdgePropertyId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + s.getAdapterId(), + keyEdgePropertyId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + s.getAdapterId(), + valueEdgePropertyId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + } + + if ( !onlyPlacement ) { + addPrimaryKey( edgesPropertyId, List.of( idEdgePropertyId, keyEdgePropertyId ) ); + + CatalogGraphMapping mapping = new CatalogGraphMapping( + id, + nodesId, + idNodeId, + labelNodeId, + nodesPropertyId, + idNodesPropertyId, + keyNodePropertyId, + valueNodePropertyId, + edgesId, + idEdgeId, + labelEdgeId, + sourceEdgeId, + targetEdgeId, + edgesPropertyId, + idEdgePropertyId, + keyEdgePropertyId, + valueEdgePropertyId ); + + graphMappings.put( id, mapping ); + } + + } + + + private void removeGraphLogistics( long graphId ) { + if ( !graphMappings.containsKey( graphId ) ) { + throw new UnknownGraphException( graphId ); + } + + deleteSchema( graphId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteGraph( long id ) { + if ( !graphs.containsKey( id ) ) { + throw new UnknownGraphException( id ); + } + + LogicalGraph old = Objects.requireNonNull( graphs.get( id ) ); + + removeGraphLogistics( id ); + + synchronized ( this ) { + old.placements.forEach( a -> graphPlacements.remove( new Object[]{ old.id, a } ) ); + graphs.remove( id ); + graphNames.remove( new Object[]{ old.name } ); + graphMappings.remove( id ); + } + listeners.firePropertyChange( "graph", old, null ); + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalGraph getGraph( long id ) { + if ( !graphs.containsKey( id ) ) { + throw new UnknownGraphException( id ); + } + return graphs.get( id ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getGraphs( Pattern graphName ) { + if ( graphName != null ) { + return ImmutableList.copyOf( + Stream.concat( + graphAliases.values().stream(), + graphs.values().stream() ).filter( g -> g.name.matches( graphName.pattern.toLowerCase() ) ) + .collect( Collectors.toList() ) ); + } else { + return ImmutableList.copyOf( graphs.values() ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteSchema( long schemaId ) { + CatalogSchema schema = getSchema( schemaId ); + synchronized ( this ) { + schemaNames.remove( new Object[]{ schema.name } ); + + for ( Long id : Objects.requireNonNull( schemaChildren.get( schemaId ) ) ) { + deleteTable( id ); + } + schemaChildren.remove( schemaId ); + schemas.remove( schemaId ); + + } + listeners.firePropertyChange( "Schema", schema, null ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getTables( long schemaId, Pattern tableNamePattern ) { + if ( schemas.containsKey( schemaId ) ) { + + CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); + if ( tableNamePattern != null ) { + return Collections.singletonList( tableNames.get( new Object[]{ schemaId, tableNamePattern.pattern } ) ); + } else { + return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schemaId } ).values() ); + } + } + return new ArrayList<>(); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { + if ( schemaNamePattern != null && tableNamePattern != null ) { + CatalogSchema schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); + if ( schema != null ) { + return Collections.singletonList( Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableNamePattern.pattern } ) ) ); + } + } else if ( schemaNamePattern != null ) { + CatalogSchema schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); + if ( schema != null ) { + return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schema.id } ).values() ); + } + } else { + return new ArrayList<>( tableNames.values() ); + } + + return new ArrayList<>(); + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalTable getTable( long tableId ) { + try { + return Objects.requireNonNull( tables.get( tableId ) ); + } catch ( NullPointerException e ) { + throw new UnknownTableIdRuntimeException( tableId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { + try { + CatalogSchema schema = getSchema( schemaId ); + if ( !schema.caseSensitive ) { + tableName = tableName.toLowerCase(); + } + return Objects.requireNonNull( tableNames.get( new Object[]{ schemaId, tableName } ) ); + } catch ( NullPointerException e ) { + throw new UnknownTableException( schemaId, tableName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalTable getTableFromPartition( long partitionId ) { + return getTable( getPartition( partitionId ).tableId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { + try { + CatalogSchema schema = getSchema( schemaName ); + if ( !schema.caseSensitive ) { + tableName = tableName.toLowerCase(); + } + + return Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableName } ) ); + } catch ( NullPointerException e ) { + throw new UnknownTableException( schemaName, tableName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { + long id = entityIdBuilder.getAndIncrement(); + CatalogSchema schema = getSchema( namespaceId ); + if ( !schema.caseSensitive ) { + name = name.toLowerCase(); + } + + try { + //Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition + List partitionGroupIds = new ArrayList<>(); + partitionGroupIds.add( addPartitionGroup( id, "full", namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); + //get All(only one) PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds + CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); + + PartitionProperty partitionProperty = PartitionProperty.builder() + .partitionType( PartitionType.NONE ) + .isPartitioned( false ) + .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) + .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) + .reliesOnPeriodicChecks( false ) + .build(); + + LogicalTable table = new LogicalTable( + id, + name, + ImmutableList.of(), + namespaceId, + ownerId, + entityType, + null, + ImmutableList.of(), + modifiable, + partitionProperty, + ImmutableList.of() ); + + updateEntityLogistics( name, namespaceId, id, schema, table ); + if ( schema.namespaceType != NamespaceType.DOCUMENT ) { + openTable = id; + } + + } catch ( GenericCatalogException e ) { + throw new RuntimeException( "Error when adding table " + name, e ); + } + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { + long id = entityIdBuilder.getAndIncrement(); + CatalogSchema schema = getSchema( namespaceId ); + + if ( !schema.caseSensitive ) { + name = name.toLowerCase(); + } + + PartitionProperty partitionProperty = PartitionProperty.builder() + .partitionType( PartitionType.NONE ) + .reliesOnPeriodicChecks( false ) + .partitionIds( ImmutableList.copyOf( new ArrayList<>() ) ) + .partitionGroupIds( ImmutableList.copyOf( new ArrayList<>() ) ) + .build(); + + if ( entityType != EntityType.VIEW ) { + // Should not happen, addViewTable is only called with EntityType.View + throw new RuntimeException( "addViewTable is only possible with EntityType = VIEW" ); + } + CatalogView viewTable = new CatalogView( + id, + name, + ImmutableList.of(), + namespaceId, + ownerId, + entityType, + query,//definition, + null, + ImmutableList.of(), + modifiable, + partitionProperty, + algCollation, + ImmutableList.of(), + underlyingTables, + language.getSerializedName() //fieldList + ); + addConnectedViews( underlyingTables, viewTable.id ); + updateEntityLogistics( name, namespaceId, id, schema, viewTable ); + nodeInfo.put( id, definition ); + + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { + long id = entityIdBuilder.getAndIncrement(); + CatalogSchema schema = getSchema( namespaceId ); + + if ( !schema.caseSensitive ) { + name = name.toLowerCase(); + } + + // Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition + List partitionGroupIds = new ArrayList<>(); + partitionGroupIds.add( addPartitionGroup( id, "full", namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); + + // Get the single PartitionGroup and consequently retrieve all contained partitionIds to add them to completeList of partitionIds in the partitionProperty + CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); + + PartitionProperty partitionProperty = PartitionProperty.builder() + .partitionType( PartitionType.NONE ) + .isPartitioned( false ) + .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) + .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) + .reliesOnPeriodicChecks( false ) + .build(); + + if ( entityType == EntityType.MATERIALIZED_VIEW ) { + Map> map = new HashMap<>(); + for ( Entry> e : underlyingTables.entrySet() ) { + if ( map.put( e.getKey(), ImmutableList.copyOf( e.getValue() ) ) != null ) { + throw new IllegalStateException( "Duplicate key" ); + } + } + CatalogMaterializedView materializedViewTable = new CatalogMaterializedView( + id, + name, + List.of(), + namespaceId, + ownerId, + entityType, + query, + null, + List.of(), + modifiable, + partitionProperty, + algCollation, + List.of(), + Map.copyOf( map ), + language.getSerializedName(), + materializedCriteria, + ordered + ); + addConnectedViews( underlyingTables, materializedViewTable.id ); + updateEntityLogistics( name, namespaceId, id, schema, materializedViewTable ); + + nodeInfo.put( id, definition ); + } else { + // Should not happen, addViewTable is only called with EntityType.View + throw new RuntimeException( "addMaterializedViewTable is only possible with EntityType = MATERIALIZED_VIEW" ); + } + return id; + } + + + /** + * Update all information after the addition of all kind of tables + */ + private void updateEntityLogistics( String name, long namespaceId, long id, CatalogSchema schema, LogicalTable entity ) { + synchronized ( this ) { + tables.put( id, entity ); + tableChildren.put( id, ImmutableList.builder().build() ); + tableNames.put( new Object[]{ namespaceId, name }, entity ); + List children = new ArrayList<>( Objects.requireNonNull( schemaChildren.get( namespaceId ) ) ); + children.add( id ); + schemaChildren.replace( namespaceId, ImmutableList.copyOf( children ) ); + } + + listeners.firePropertyChange( "entity", null, entity ); + } + + + /** + * Add additional Information to Table, what Views are connected to table + */ + public void addConnectedViews( Map> underlyingTables, long viewId ) { + for ( long id : underlyingTables.keySet() ) { + LogicalTable old = getTable( id ); + List connectedViews; + connectedViews = new ArrayList<>( old.connectedViews ); + connectedViews.add( viewId ); + LogicalTable table = old.withConnectedViews( ImmutableList.copyOf( connectedViews ) ); + synchronized ( this ) { + tables.replace( id, table ); + assert table != null; + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); + } + listeners.firePropertyChange( "table", old, table ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteViewDependencies( CatalogView catalogView ) { + for ( long id : catalogView.getUnderlyingTables().keySet() ) { + LogicalTable old = getTable( id ); + List connectedViews = old.connectedViews.stream().filter( e -> e != catalogView.id ).collect( Collectors.toList() ); + + LogicalTable table = old.withConnectedViews( ImmutableList.copyOf( connectedViews ) ); + + synchronized ( this ) { + tables.replace( id, table ); + assert table != null; + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); + } + listeners.firePropertyChange( "table", old, table ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean checkIfExistsEntity( long namespaceId, String entityName ) { + CatalogSchema schema = getSchema( namespaceId ); + if ( !schema.caseSensitive ) { + entityName = entityName.toLowerCase(); + } + return tableNames.containsKey( new Object[]{ namespaceId, entityName } ); + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean checkIfExistsEntity( long tableId ) { + return tables.containsKey( tableId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void renameTable( long tableId, String name ) { + LogicalTable old = getTable( tableId ); + if ( !getSchema( old.namespaceId ).caseSensitive ) { + name = name.toLowerCase(); + } + + LogicalTable table = old.withName( name ); + synchronized ( this ) { + tables.replace( tableId, table ); + tableNames.remove( new Object[]{ table.namespaceId, old.name } ); + tableNames.put( new Object[]{ table.namespaceId, name }, table ); + } + listeners.firePropertyChange( "table", old, table ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteTable( long tableId ) { + LogicalTable table = getTable( tableId ); + List children = new ArrayList<>( Objects.requireNonNull( schemaChildren.get( table.namespaceId ) ) ); + children.remove( tableId ); + synchronized ( this ) { + schemaChildren.replace( table.namespaceId, ImmutableList.copyOf( children ) ); + + if ( table.partitionProperty.reliesOnPeriodicChecks ) { + removeTableFromPeriodicProcessing( tableId ); + } + + if ( table.partitionProperty.isPartitioned ) { + for ( Long partitionGroupId : Objects.requireNonNull( table.partitionProperty.partitionGroupIds ) ) { + deletePartitionGroup( table.id, table.namespaceId, partitionGroupId ); + } + } + + for ( Long columnId : Objects.requireNonNull( tableChildren.get( tableId ) ) ) { + deleteColumn( columnId ); + } + + // Remove all placement containers along with all placements + table.dataPlacements.forEach( adapterId -> removeDataPlacement( adapterId, tableId ) ); + + tableChildren.remove( tableId ); + tables.remove( tableId ); + tableNames.remove( new Object[]{ table.namespaceId, table.name } ); + flagTableForDeletion( table.id, false ); + // primary key was deleted and open table has to be closed + if ( openTable != null && openTable == tableId ) { + openTable = null; + } + + } + listeners.firePropertyChange( "table", table, null ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void setTableOwner( long tableId, int ownerId ) { + LogicalTable old = getTable( tableId ); + LogicalTable table = old.withOwnerId( ownerId ); + + synchronized ( this ) { + tables.replace( tableId, table ); + tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); + } + listeners.firePropertyChange( "table", old, table ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void setPrimaryKey( long tableId, Long keyId ) { + LogicalTable old = getTable( tableId ); + + LogicalTable table = old.withPrimaryKey( keyId ); + + synchronized ( this ) { + tables.replace( tableId, table ); + tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); + + if ( keyId == null ) { + openTable = tableId; + } else { + primaryKeys.put( keyId, new CatalogPrimaryKey( Objects.requireNonNull( keys.get( keyId ) ) ) ); + openTable = null; + } + } + listeners.firePropertyChange( "table", old, table ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { + LogicalColumn column = Objects.requireNonNull( columns.get( columnId ) ); + CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); + + CatalogColumnPlacement columnPlacement = new CatalogColumnPlacement( + column.schemaId, + column.tableId, + columnId, + adapterId, + store.uniqueName, + placementType, + physicalSchemaName, + physicalColumnName, + physicalPositionBuilder.getAndIncrement() ); + + synchronized ( this ) { + columnPlacements.put( new Object[]{ adapterId, columnId }, columnPlacement ); + + // Adds this ColumnPlacement to existing DataPlacement container + addColumnsToDataPlacement( adapterId, column.tableId, List.of( columnId ) ); + } + listeners.firePropertyChange( "columnPlacement", null, columnPlacement ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void updatePartitionPlacementPhysicalNames( int adapterId, long partitionId, String physicalSchemaName, String physicalTableName ) { + try { + CatalogPartitionPlacement old = Objects.requireNonNull( partitionPlacements.get( new Object[]{ adapterId, partitionId } ) ); + CatalogPartitionPlacement placement = new CatalogPartitionPlacement( + old.namespaceId, + old.tableId, + old.adapterId, + old.adapterUniqueName, + old.placementType, + physicalSchemaName, + physicalTableName, + old.partitionId, + old.role ); + + synchronized ( this ) { + partitionPlacements.replace( new Object[]{ adapterId, partitionId }, placement ); + listeners.firePropertyChange( "partitionPlacement", old, placement ); + } + } catch ( NullPointerException e ) { + getAdapter( adapterId ); + getPartition( partitionId ); + throw new UnknownPartitionPlacementException( adapterId, partitionId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateMaterializedViewRefreshTime( long materializedViewId ) { + CatalogMaterializedView old = (CatalogMaterializedView) getTable( materializedViewId ); + + MaterializedCriteria materializedCriteria = old.getMaterializedCriteria(); + materializedCriteria.setLastUpdate( new Timestamp( System.currentTimeMillis() ) ); + + CatalogMaterializedView view = old.withMaterializedCriteria( materializedCriteria ); + + synchronized ( this ) { + tables.replace( materializedViewId, view ); + tableNames.replace( + new Object[]{ view.namespaceId, view.name }, + view ); + } + listeners.firePropertyChange( "table", old, view ); + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalCollection getCollection( long id ) { + if ( !collections.containsKey( id ) ) { + throw new UnknownTableIdRuntimeException( id ); + } + return Objects.requireNonNull( collections.get( id ) ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getCollections( long namespaceId, Pattern namePattern ) { + if ( schemas.containsKey( namespaceId ) ) { + CatalogSchema schema = Objects.requireNonNull( schemas.get( namespaceId ) ); + if ( namePattern != null ) { + LogicalCollection collection = collectionNames.get( new Object[]{ namespaceId, namePattern.pattern } ); + if ( collection == null ) { + return new ArrayList<>(); + } + return Collections.singletonList( collection ); + } else { + return new ArrayList<>( collectionNames.prefixSubMap( new Object[]{ namespaceId } ).values() ); + } + } + return new ArrayList<>(); + } + + + /** + * {@inheritDoc} + */ + @Override + public long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ) { + long collectionId = entityIdBuilder.getAndIncrement(); + if ( id != null ) { + collectionId = id; + } + + CatalogSchema namespace = getSchema( schemaId ); + LogicalCollection collection = new LogicalCollection( + Catalog.defaultDatabaseId, + schemaId, + collectionId, + name, + List.of(), + EntityType.ENTITY, + null ); + + synchronized ( this ) { + collections.put( collectionId, collection ); + collectionNames.put( new Object[]{ schemaId, name }, collection ); + } + listeners.firePropertyChange( "collection", null, entity ); + + return collectionId; + } + + + /** + * {@inheritDoc} + */ + @Override + public long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ) { + long id = partitionIdBuilder.getAndIncrement(); + CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, null, null, id ); + LogicalCollection old = collections.get( collectionId ); + if ( old == null ) { + throw new UnknownCollectionException( collectionId ); + } + + LogicalCollection collection = old.addPlacement( adapterId ); + + synchronized ( this ) { + collectionPlacements.put( new Object[]{ collectionId, adapterId }, placement ); + collections.replace( collectionId, collection ); + collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); + } + listeners.firePropertyChange( "collectionPlacement", null, placement ); + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { + LogicalCollection old = getCollection( collectionId ); + if ( old == null ) { + throw new UnknownCollectionException( collectionId ); + } + + CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, physicalCollectionName, physicalNamespaceName, old.id ); + LogicalCollection collection = old.setPhysicalName( physicalCollectionName ); + synchronized ( this ) { + collections.replace( collectionId, collection ); + collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); + collectionPlacements.replace( new Object[]{ collectionId, adapterId }, placement ); + } + listeners.firePropertyChange( "collectionPlacements", old, collection ); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogCollectionMapping getCollectionMapping( long id ) { + if ( !documentMappings.containsKey( id ) ) { + throw new UnknownTableIdRuntimeException( id ); + } + return Objects.requireNonNull( documentMappings.get( id ) ); + } + + + /** + * {@inheritDoc} + */ + @Override + public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { + long tableId; + if ( onlyPlacement ) { + try { + tableId = getTable( schemaId, name ).id; + } catch ( UnknownTableException e ) { + throw new RuntimeException( e ); + } + } else { + tableId = addTable( name, schemaId, Catalog.defaultUserId, EntityType.ENTITY, true ); + } + + stores.forEach( store -> addDataPlacement( store.getAdapterId(), tableId ) ); + + long idId; + long dataId; + if ( !onlyPlacement ) { + idId = addColumn( "_id_", tableId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); + dataId = addColumn( "_data_", tableId, 1, PolyType.JSON, null, null, null, null, null, false, Collation.getDefaultCollation() ); + } else { + try { + idId = getColumn( tableId, "_id_" ).id; + dataId = getColumn( tableId, "_data_" ).id; + } catch ( UnknownColumnException e ) { + throw new RuntimeException( "Error while adding a document placement." ); + } + } + + for ( DataStore s : stores ) { + addColumnPlacement( + s.getAdapterId(), + idId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + + addColumnPlacement( + s.getAdapterId(), + dataId, + PlacementType.AUTOMATIC, + null, + null, + null + ); + } + + addPrimaryKey( tableId, List.of( idId, dataId ) ); + + if ( !onlyPlacement ) { + CatalogCollectionMapping mapping = new CatalogCollectionMapping( tableId, idId, dataId ); + documentMappings.put( tableId, mapping ); + } + + return tableId; + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteCollection( long id ) { + LogicalCollection collection = getCollection( id ); + + synchronized ( this ) { + collections.remove( collection.namespaceId ); + collectionNames.remove( new Object[]{ collection.databaseId, collection.namespaceId, collection.name } ); + collection.placements.forEach( p -> collectionPlacements.remove( new Object[]{ collection.id, p } ) ); + } + listeners.firePropertyChange( "collection", null, null ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void dropCollectionPlacement( long id, int adapterId ) { + LogicalCollection oldCollection = Objects.requireNonNull( collections.get( id ) ); + LogicalCollection collection = oldCollection.removePlacement( adapterId ); + + synchronized ( this ) { + collectionPlacements.remove( new Object[]{ id, adapterId } ); + collections.replace( id, collection ); + collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); + } + listeners.firePropertyChange( "collectionPlacement", null, null ); + } + + + /** + * {@inheritDoc} + */ + public List getGraphPlacements( int adapterId ) { + return graphPlacements.entrySet().stream().filter( e -> e.getKey()[1].equals( adapterId ) ).map( Entry::getValue ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getCollectionPlacementsByAdapter( int adapterId ) { + return collectionPlacements.values().stream().filter( p -> p.adapter == adapterId ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogCollectionPlacement getCollectionPlacement( long collectionId, int adapterId ) { + if ( !collectionPlacements.containsKey( new Object[]{ collectionId, adapterId } ) ) { + throw new UnknownCollectionPlacementException( collectionId, adapterId ); + } + + return collectionPlacements.get( new Object[]{ collectionId, adapterId } ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ) { + LogicalTable oldTable = getTable( getColumn( columnId ).tableId ); + + synchronized ( this ) { + if ( log.isDebugEnabled() ) { + log.debug( "Is flagged for deletion {}", isTableFlaggedForDeletion( oldTable.id ) ); + } + + if ( oldTable.partitionProperty.isPartitioned ) { + if ( !isTableFlaggedForDeletion( oldTable.id ) ) { + if ( !columnOnly ) { + if ( !validateDataPlacementsConstraints( oldTable.id, adapterId, Arrays.asList( columnId ), new ArrayList<>() ) ) { + throw new RuntimeException( "Partition Distribution failed" ); + } + } + } + } + + removeColumnsFromDataPlacement( adapterId, oldTable.id, Arrays.asList( columnId ) ); + columnPlacements.remove( new Object[]{ adapterId, columnId } ); + } + listeners.firePropertyChange( "columnPlacement", oldTable, null ); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ) { + try { + return Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); + } catch ( NullPointerException e ) { + getAdapter( adapterId ); + getColumn( columnId ); + throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean checkIfExistsColumnPlacement( int adapterId, long columnId ) { + CatalogColumnPlacement placement = columnPlacements.get( new Object[]{ adapterId, columnId } ); + return placement != null; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumnPlacementsOnAdapter( int adapterId ) { + return new ArrayList<>( columnPlacements.prefixSubMap( new Object[]{ adapterId } ).values() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ) { + final Comparator columnPlacementComparator = Comparator.comparingInt( p -> getColumn( p.columnId ).position ); + return getColumnPlacementsOnAdapter( adapterId ) + .stream() + .filter( p -> p.tableId == tableId ) + .sorted( columnPlacementComparator ) + .collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumnPlacementsOnAdapterSortedByPhysicalPosition( int adapterId, long tableId ) { + final Comparator columnPlacementComparator = Comparator.comparingLong( p -> p.physicalPosition ); + return getColumnPlacementsOnAdapter( adapterId ) + .stream() + .filter( p -> p.tableId == tableId ) + .sorted( columnPlacementComparator ) + .collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumnPlacementsByColumn( long columnId ) { + return columnPlacements.values() + .stream() + .filter( p -> p.columnId == columnId ) + .collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { + LogicalTable table = getTable( tableId ); + Map> columnPlacementsByAdapter = new HashMap<>(); + + table.dataPlacements.forEach( adapterId -> columnPlacementsByAdapter.put( + adapterId, + ImmutableList.copyOf( + getDataPlacement( adapterId, tableId ).columnPlacementsOnAdapter ) + ) + ); + + return ImmutableMap.copyOf( columnPlacementsByAdapter ); + } + + + /** + * {@inheritDoc} + */ + @Override + public ImmutableMap> getPartitionPlacementsByAdapter( long tableId ) { + LogicalTable table = getTable( tableId ); + Map> partitionPlacementsByAdapter = new HashMap<>(); + + table.dataPlacements.forEach( adapterId -> partitionPlacementsByAdapter.put( + adapterId, + ImmutableList.copyOf( + getDataPlacement( adapterId, tableId ).getAllPartitionIds() ) + ) + ); + + return ImmutableMap.copyOf( partitionPlacementsByAdapter ); + } + + + /** + * {@inheritDoc} + */ + @Override + public ImmutableMap> getPartitionGroupsByAdapter( long tableId ) { + return null; + } + + + /** + * {@inheritDoc} + */ + @Override + public long getPartitionGroupByPartition( long partitionId ) { + return getPartition( partitionId ).partitionGroupId; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumnPlacement( long columnId ) { + return columnPlacements.values() + .stream() + .filter( p -> p.columnId == columnId ) + .collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ) { + try { + return getColumnPlacementsOnAdapter( adapterId ).stream().filter( p -> Objects.requireNonNull( columns.get( p.columnId ) ).schemaId == schemaId ).collect( Collectors.toList() ); + } catch ( NullPointerException e ) { + getAdapter( adapterId ); + getSchema( schemaId ); + return new ArrayList<>(); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ) { + try { + CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); + CatalogColumnPlacement placement = new CatalogColumnPlacement( + old.namespaceId, + old.tableId, + old.columnId, + old.adapterId, + old.adapterUniqueName, + placementType, + old.physicalSchemaName, + old.physicalColumnName, + old.physicalPosition ); + synchronized ( this ) { + columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); + } + listeners.firePropertyChange( "columnPlacement", old, placement ); + } catch ( NullPointerException e ) { + getAdapter( adapterId ); + getColumn( columnId ); + throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ) { + try { + CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); + CatalogColumnPlacement placement = new CatalogColumnPlacement( + old.namespaceId, + old.tableId, + old.columnId, + old.adapterId, + old.adapterUniqueName, + old.placementType, + old.physicalSchemaName, + old.physicalColumnName, + position ); + synchronized ( this ) { + columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); + } + listeners.firePropertyChange( "columnPlacement", old, placement ); + } catch ( NullPointerException e ) { + getAdapter( adapterId ); + getColumn( columnId ); + throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ) { + try { + CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); + CatalogColumnPlacement placement = new CatalogColumnPlacement( + old.namespaceId, + old.tableId, + old.columnId, + old.adapterId, + old.adapterUniqueName, + old.placementType, + old.physicalSchemaName, + old.physicalColumnName, + physicalPositionBuilder.getAndIncrement() ); + synchronized ( this ) { + columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); + } + listeners.firePropertyChange( "columnPlacement", old, placement ); + } catch ( NullPointerException e ) { + getAdapter( adapterId ); + getColumn( columnId ); + throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ) { + try { + CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); + CatalogColumnPlacement placement = new CatalogColumnPlacement( + old.namespaceId, + old.tableId, + old.columnId, + old.adapterId, + old.adapterUniqueName, + old.placementType, + physicalSchemaName, + physicalColumnName, + updatePhysicalColumnPosition ? physicalPositionBuilder.getAndIncrement() : old.physicalPosition ); + synchronized ( this ) { + columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); + } + listeners.firePropertyChange( "columnPlacement", old, placement ); + } catch ( NullPointerException e ) { + getAdapter( adapterId ); + getColumn( columnId ); + throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumns( long tableId ) { + try { + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); + return columnNames.prefixSubMap( new Object[]{ table.namespaceId, table.id } ).values().stream().sorted( columnComparator ).collect( Collectors.toList() ); + } catch ( NullPointerException e ) { + return new ArrayList<>(); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { + List catalogEntities = getTables( schemaNamePattern, tableNamePattern ); + + if ( catalogEntities.size() > 0 ) { + Stream catalogColumns = catalogEntities.stream().filter( t -> tableChildren.containsKey( t.id ) ).flatMap( t -> Objects.requireNonNull( tableChildren.get( t.id ) ).stream() ).map( columns::get ); + + if ( columnNamePattern != null ) { + catalogColumns = catalogColumns.filter( c -> c.name.matches( columnNamePattern.toRegex() ) ); + } + return catalogColumns.collect( Collectors.toList() ); + } + + return new ArrayList<>(); + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalColumn getColumn( long columnId ) { + try { + return Objects.requireNonNull( columns.get( columnId ) ); + } catch ( NullPointerException e ) { + throw new UnknownColumnIdRuntimeException( columnId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { + try { + LogicalTable table = getTable( tableId ); + if ( !getSchema( table.namespaceId ).caseSensitive ) { + columnName = columnName.toLowerCase(); + } + return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); + } catch ( NullPointerException e ) { + throw new UnknownColumnException( tableId, columnName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { + try { + LogicalTable table = getTable( schemaName, tableName ); + return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); + } catch ( NullPointerException e ) { + throw new UnknownColumnException( schemaName, tableName, columnName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { + LogicalTable table = getTable( tableId ); + + if ( !getSchema( table.namespaceId ).caseSensitive ) { + name = name.toLowerCase(); + } + + if ( type.getFamily() == PolyTypeFamily.CHARACTER && collation == null ) { + throw new RuntimeException( "Collation is not allowed to be null for char types." ); + } + if ( scale != null && length != null ) { + if ( scale > length ) { + throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); + } + } + + long id = columnIdBuilder.getAndIncrement(); + LogicalColumn column = new LogicalColumn( + id, + name, + tableId, + table.namespaceId, + position, + type, + collectionsType, + length, + scale, + dimension, + cardinality, + nullable, + collation, + null ); + + synchronized ( this ) { + columns.put( id, column ); + columnNames.put( new Object[]{ table.namespaceId, table.id, name }, column ); + List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( tableId ) ) ); + children.add( id ); + tableChildren.replace( tableId, ImmutableList.copyOf( children ) ); + + List columnIds = new ArrayList<>( table.fieldIds ); + columnIds.add( id ); + + LogicalTable updatedTable; + + updatedTable = table.withConnectedViews( ImmutableList.copyOf( columnIds ) ); + tables.replace( tableId, updatedTable ); + tableNames.replace( new Object[]{ updatedTable.namespaceId, updatedTable.name }, updatedTable ); + } + listeners.firePropertyChange( "column", null, column ); + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public void renameColumn( long columnId, String name ) { + LogicalColumn old = getColumn( columnId ); + + if ( !getSchema( old.schemaId ).caseSensitive ) { + name = name.toLowerCase(); + } + + LogicalColumn column = new LogicalColumn( old.id, name, old.tableId, old.schemaId, old.position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); + synchronized ( this ) { + columns.replace( columnId, column ); + columnNames.remove( new Object[]{ column.schemaId, column.tableId, old.name } ); + columnNames.put( new Object[]{ column.schemaId, column.tableId, name }, column ); + } + listeners.firePropertyChange( "column", old, column ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void setColumnPosition( long columnId, int position ) { + LogicalColumn old = getColumn( columnId ); + LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); + synchronized ( this ) { + columns.replace( columnId, column ); + columnNames.replace( new Object[]{ column.schemaId, column.tableId, column.name }, column ); + } + listeners.firePropertyChange( "column", old, column ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality ) throws GenericCatalogException { + try { + LogicalColumn old = Objects.requireNonNull( columns.get( columnId ) ); + + if ( scale != null && scale > length ) { + throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); + } + + // Check that the column is not part of a key + for ( CatalogKey key : getKeys() ) { + if ( key.columnIds.contains( columnId ) ) { + String name = "UNKNOWN"; + if ( key instanceof CatalogPrimaryKey ) { + name = "PRIMARY KEY"; + } else if ( key instanceof CatalogForeignKey ) { + name = ((CatalogForeignKey) key).name; + } else { + List constraints = getConstraints( key ); + if ( constraints.size() > 0 ) { + name = constraints.get( 0 ).name; + } + } + throw new GenericCatalogException( "The column \"" + old.name + "\" is part of the key \"" + name + "\". Unable to change the type of a column that is part of a key." ); + } + } + + Collation collation = type.getFamily() == PolyTypeFamily.CHARACTER + ? Collation.getById( RuntimeConfig.DEFAULT_COLLATION.getInteger() ) + : null; + LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, old.position, type, collectionsType, length, scale, dimension, cardinality, old.nullable, collation, old.defaultValue ); + synchronized ( this ) { + columns.replace( columnId, column ); + columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); + } + listeners.firePropertyChange( "column", old, column ); + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { + try { + LogicalColumn old = Objects.requireNonNull( columns.get( columnId ) ); + if ( nullable ) { + // Check if the column is part of a primary key (pk's are not allowed to contain null values) + LogicalTable table = Objects.requireNonNull( tables.get( old.tableId ) ); + if ( table.primaryKey != null ) { + CatalogKey catalogKey = getPrimaryKey( table.primaryKey ); + if ( catalogKey.columnIds.contains( columnId ) ) { + throw new GenericCatalogException( "Unable to allow null values in a column that is part of the primary key." ); + } + } + } else { + // TODO: Check that the column does not contain any null values + getColumnPlacement( columnId ); + } + LogicalColumn column = new LogicalColumn( + old.id, + old.name, + old.tableId, + old.schemaId, + old.position, + old.type, + old.collectionsType, + old.length, + old.scale, + old.dimension, + old.cardinality, + nullable, + old.collation, + old.defaultValue ); + synchronized ( this ) { + columns.replace( columnId, column ); + columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); + } + listeners.firePropertyChange( "column", old, column ); + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void setCollation( long columnId, Collation collation ) { + LogicalColumn old = getColumn( columnId ); + + if ( old.type.getFamily() != PolyTypeFamily.CHARACTER ) { + throw new RuntimeException( "Illegal attempt to set collation for a non-char column!" ); + } + LogicalColumn column = new LogicalColumn( + old.id, + old.name, + old.tableId, + old.schemaId, + old.position, + old.type, + old.collectionsType, + old.length, + old.scale, + old.dimension, + old.cardinality, + old.nullable, + collation, + old.defaultValue ); + synchronized ( this ) { + columns.replace( columnId, column ); + columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); + } + listeners.firePropertyChange( "column", old, column ); + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean checkIfExistsColumn( long tableId, String columnName ) { + LogicalTable table = getTable( tableId ); + return columnNames.containsKey( new Object[]{ table.namespaceId, tableId, columnName } ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteColumn( long columnId ) { + //TODO also delete keys with that column? + LogicalColumn column = getColumn( columnId ); + + List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( column.tableId ) ) ); + children.remove( columnId ); + + LogicalTable old = getTable( column.tableId ); + List columnIds = new ArrayList<>( old.fieldIds ); + columnIds.remove( columnId ); + + LogicalTable table = old.withFieldIds( ImmutableList.copyOf( columnIds ) ); + + synchronized ( this ) { + columnNames.remove( new Object[]{ column.schemaId, column.tableId, column.name } ); + tableChildren.replace( column.tableId, ImmutableList.copyOf( children ) ); + + deleteDefaultValue( columnId ); + for ( CatalogColumnPlacement p : getColumnPlacement( columnId ) ) { + deleteColumnPlacement( p.adapterId, p.columnId, false ); + } + tables.replace( column.tableId, table ); + tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); + + columns.remove( columnId ); + } + listeners.firePropertyChange( "column", column, null ); + } + + + /** + * {@inheritDoc} + * + * TODO: String is only a temporary solution + */ + @Override + public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { + LogicalColumn old = getColumn( columnId ); + LogicalColumn column = new LogicalColumn( + old.id, + old.name, + old.tableId, + old.schemaId, + old.position, + old.type, + old.collectionsType, + old.length, + old.scale, + old.dimension, + old.cardinality, + old.nullable, + old.collation, + new CatalogDefaultValue( columnId, type, defaultValue, "defaultValue" ) ); + synchronized ( this ) { + columns.replace( columnId, column ); + columnNames.replace( new Object[]{ column.schemaId, column.tableId, column.name }, column ); + } + listeners.firePropertyChange( "column", old, column ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteDefaultValue( long columnId ) { + LogicalColumn old = getColumn( columnId ); + LogicalColumn column = new LogicalColumn( + old.id, + old.name, + old.tableId, + old.schemaId, + old.position, + old.type, + old.collectionsType, + old.length, + old.scale, + old.dimension, + old.cardinality, + old.nullable, + old.collation, + null ); + if ( old.defaultValue != null ) { + synchronized ( this ) { + columns.replace( columnId, column ); + columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); + } + listeners.firePropertyChange( "column", old, column ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogPrimaryKey getPrimaryKey( long key ) { + try { + return Objects.requireNonNull( primaryKeys.get( key ) ); + } catch ( NullPointerException e ) { + throw new UnknownKeyIdRuntimeException( key ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean isPrimaryKey( long key ) { + try { + Long primary = getTable( Objects.requireNonNull( keys.get( key ) ).tableId ).primaryKey; + return primary != null && primary == key; + } catch ( NullPointerException e ) { + throw new UnknownKeyIdRuntimeException( key ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { + try { + // Check if the columns are set 'not null' + List nullableColumns = columnIds.stream().map( columns::get ).filter( Objects::nonNull ).filter( c -> c.nullable ).collect( Collectors.toList() ); + for ( LogicalColumn col : nullableColumns ) { + throw new GenericCatalogException( "Primary key is not allowed to contain null values but the column '" + col.name + "' is declared nullable." ); + } + + // TODO: Check if the current values are unique + + // Check if there is already a primary key defined for this table and if so, delete it. + LogicalTable table = getTable( tableId ); + + if ( table.primaryKey != null ) { + // CatalogCombinedKey combinedKey = getCombinedKey( table.primaryKey ); + if ( getKeyUniqueCount( table.primaryKey ) == 1 && isForeignKey( tableId ) ) { + // This primary key is the only constraint for the uniqueness of this key. + throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key, first drop the foreign keys or create a unique constraint." ); + } + synchronized ( this ) { + setPrimaryKey( tableId, null ); + deleteKeyIfNoLongerUsed( table.primaryKey ); + } + } + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); + setPrimaryKey( tableId, keyId ); + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + private int getKeyUniqueCount( long keyId ) { + CatalogKey key = keys.get( keyId ); + int count = 0; + if ( isPrimaryKey( keyId ) ) { + count++; + } + + for ( CatalogConstraint constraint : getConstraints( key ) ) { + if ( constraint.type == ConstraintType.UNIQUE ) { + count++; + } + } + + for ( CatalogIndex index : getIndexes( key ) ) { + if ( index.unique ) { + count++; + } + } + + return count; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getForeignKeys( long tableId ) { + return foreignKeys.values().stream().filter( f -> f.tableId == tableId ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getExportedKeys( long tableId ) { + return foreignKeys.values().stream().filter( k -> k.referencedKeyTableId == tableId ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getConstraints( long tableId ) { + List keysOfTable = keys.values().stream().filter( k -> k.tableId == tableId ).map( k -> k.id ).collect( Collectors.toList() ); + return constraints.values().stream().filter( c -> keysOfTable.contains( c.keyId ) ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { + try { + return constraints.values().stream() + .filter( c -> c.key.tableId == tableId && c.name.equals( constraintName ) ) + .findFirst() + .orElseThrow( NullPointerException::new ); + } catch ( NullPointerException e ) { + throw new UnknownConstraintException( tableId, constraintName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { + try { + return foreignKeys.values().stream() + .filter( f -> f.tableId == tableId && f.name.equals( foreignKeyName ) ) + .findFirst() + .orElseThrow( NullPointerException::new ); + } catch ( NullPointerException e ) { + throw new UnknownForeignKeyException( tableId, foreignKeyName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { + try { + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); + List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); + + for ( CatalogKey refKey : childKeys ) { + if ( refKey.columnIds.size() == referencesIds.size() && refKey.columnIds.containsAll( referencesIds ) && referencesIds.containsAll( refKey.columnIds ) ) { + + // CatalogKey combinedKey = getCombinedKey( refKey.id ); + + int i = 0; + for ( long referencedColumnId : refKey.columnIds ) { + LogicalColumn referencingColumn = getColumn( columnIds.get( i++ ) ); + LogicalColumn referencedColumn = getColumn( referencedColumnId ); + if ( referencedColumn.type != referencingColumn.type ) { + throw new GenericCatalogException( "The data type of the referenced columns does not match the data type of the referencing column: " + referencingColumn.type.name() + " != " + referencedColumn.type ); + } + } + // TODO same keys for key and foreign key + if ( getKeyUniqueCount( refKey.id ) > 0 ) { + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); + CatalogForeignKey key = new CatalogForeignKey( + keyId, + constraintName, + tableId, + table.namespaceId, + refKey.id, + refKey.tableId, + refKey.schemaId, + columnIds, + referencesIds, + onUpdate, + onDelete ); + synchronized ( this ) { + foreignKeys.put( keyId, key ); + } + listeners.firePropertyChange( "foreignKey", null, key ); + return; + } + } + } + throw new GenericCatalogException( "There is no key over the referenced columns." ); + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { + try { + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); + // Check if there is already a unique constraint + List catalogConstraints = constraints.values().stream() + .filter( c -> c.keyId == keyId && c.type == ConstraintType.UNIQUE ) + .collect( Collectors.toList() ); + if ( catalogConstraints.size() > 0 ) { + throw new GenericCatalogException( "There is already a unique constraint!" ); + } + long id = constraintIdBuilder.getAndIncrement(); + synchronized ( this ) { + constraints.put( id, new CatalogConstraint( id, keyId, ConstraintType.UNIQUE, constraintName, Objects.requireNonNull( keys.get( keyId ) ) ) ); + } + listeners.firePropertyChange( "constraint", null, keyId ); + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getIndexes( long tableId, boolean onlyUnique ) { + if ( !onlyUnique ) { + return indexes.values().stream().filter( i -> i.key.tableId == tableId ).collect( Collectors.toList() ); + } else { + return indexes.values().stream().filter( i -> i.key.tableId == tableId && i.unique ).collect( Collectors.toList() ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { + try { + return indexes.values().stream() + .filter( i -> i.key.tableId == tableId && i.name.equals( indexName ) ) + .findFirst() + .orElseThrow( NullPointerException::new ); + } catch ( NullPointerException e ) { + throw new UnknownIndexException( tableId, indexName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean checkIfExistsIndex( long tableId, String indexName ) { + try { + LogicalTable table = getTable( tableId ); + getIndex( table.id, indexName ); + return true; + } catch ( UnknownIndexException e ) { + return false; + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogIndex getIndex( long indexId ) { + try { + return Objects.requireNonNull( indexes.get( indexId ) ); + } catch ( NullPointerException e ) { + throw new UnknownIndexIdRuntimeException( indexId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getIndexes() { + return new ArrayList<>( indexes.values() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException { + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); + if ( unique ) { + // TODO: Check if the current values are unique + } + long id = indexIdBuilder.getAndIncrement(); + synchronized ( this ) { + indexes.put( id, new CatalogIndex( + id, + indexName, + unique, + method, + methodDisplayName, + type, + location, + keyId, + Objects.requireNonNull( keys.get( keyId ) ), + null ) ); + } + listeners.firePropertyChange( "index", null, keyId ); + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public void setIndexPhysicalName( long indexId, String physicalName ) { + try { + CatalogIndex oldEntry = Objects.requireNonNull( indexes.get( indexId ) ); + CatalogIndex newEntry = new CatalogIndex( + oldEntry.id, + oldEntry.name, + oldEntry.unique, + oldEntry.method, + oldEntry.methodDisplayName, + oldEntry.type, + oldEntry.location, + oldEntry.keyId, + oldEntry.key, + physicalName ); + synchronized ( this ) { + indexes.replace( indexId, newEntry ); + } + listeners.firePropertyChange( "index", oldEntry, newEntry ); + } catch ( NullPointerException e ) { + throw new UnknownIndexIdRuntimeException( indexId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteIndex( long indexId ) { + CatalogIndex index = getIndex( indexId ); + if ( index.unique ) { + if ( getKeyUniqueCount( index.keyId ) == 1 && isForeignKey( index.keyId ) ) { + // This unique index is the only constraint for the uniqueness of this key. + //throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To delete this index, first add a unique constraint." ); + } + } + synchronized ( this ) { + indexes.remove( indexId ); + } + listeners.firePropertyChange( "index", index.key, null ); + deleteKeyIfNoLongerUsed( index.keyId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void deletePrimaryKey( long tableId ) throws GenericCatalogException { + LogicalTable table = getTable( tableId ); + + // TODO: Check if the currently stored values are unique + if ( table.primaryKey != null ) { + // Check if this primary key is required to maintain to uniqueness + // CatalogCombinedKey key = getCombinedKey( table.primaryKey ); + if ( isForeignKey( table.primaryKey ) ) { + if ( getKeyUniqueCount( table.primaryKey ) < 2 ) { + throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key either drop the foreign key or create a unique constraint." ); + } + } + + setPrimaryKey( tableId, null ); + deleteKeyIfNoLongerUsed( table.primaryKey ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { + try { + CatalogForeignKey catalogForeignKey = Objects.requireNonNull( foreignKeys.get( foreignKeyId ) ); + synchronized ( this ) { + foreignKeys.remove( catalogForeignKey.id ); + deleteKeyIfNoLongerUsed( catalogForeignKey.id ); + } + listeners.firePropertyChange( "foreignKey", foreignKeyId, null ); + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteConstraint( long constraintId ) throws GenericCatalogException { + try { + CatalogConstraint catalogConstraint = Objects.requireNonNull( constraints.get( constraintId ) ); + + //CatalogCombinedKey key = getCombinedKey( catalogConstraint.keyId ); + if ( catalogConstraint.type == ConstraintType.UNIQUE && isForeignKey( catalogConstraint.keyId ) ) { + if ( getKeyUniqueCount( catalogConstraint.keyId ) < 2 ) { + throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. Unable to drop unique constraint." ); + } + } + synchronized ( this ) { + constraints.remove( catalogConstraint.id ); + } + listeners.firePropertyChange( "constraint", catalogConstraint, null ); + deleteKeyIfNoLongerUsed( catalogConstraint.keyId ); + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogUser getUser( String userName ) throws UnknownUserException { + try { + return Objects.requireNonNull( userNames.get( userName ) ); + } catch ( NullPointerException e ) { + throw new UnknownUserException( userName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogUser getUser( int userId ) { + try { + return Objects.requireNonNull( users.get( userId ) ); + } catch ( NullPointerException e ) { + throw new UnknownUserIdRuntimeException( userId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getAdapters() { + return new ArrayList<>( adapters.values() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { + uniqueName = uniqueName.toLowerCase(); + try { + return Objects.requireNonNull( adapterNames.get( uniqueName ) ); + } catch ( NullPointerException e ) { + throw new UnknownAdapterException( uniqueName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogAdapter getAdapter( int adapterId ) { + try { + return Objects.requireNonNull( adapters.get( adapterId ) ); + } catch ( NullPointerException e ) { + throw new UnknownAdapterIdRuntimeException( adapterId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean checkIfExistsAdapter( int adapterId ) { + return adapters.containsKey( adapterId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public int addAdapter( String uniqueName, String adapterName, AdapterType type, Map settings ) { + uniqueName = uniqueName.toLowerCase(); + + int id = adapterIdBuilder.getAndIncrement(); + Map temp = new HashMap<>( settings ); + CatalogAdapter adapter = new CatalogAdapter( id, uniqueName, adapterName, type, temp ); + synchronized ( this ) { + adapters.put( id, adapter ); + adapterNames.put( uniqueName, adapter ); + } + try { + commit(); + } catch ( NoTablePrimaryKeyException e ) { + throw new RuntimeException( "An error occurred while creating the adapter." ); + } + listeners.firePropertyChange( "adapter", null, adapter ); + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateAdapterSettings( int adapterId, Map newSettings ) { + CatalogAdapter old = getAdapter( adapterId ); + Map temp = new HashMap<>(); + newSettings.forEach( temp::put ); + CatalogAdapter adapter = new CatalogAdapter( old.id, old.uniqueName, old.adapterName, old.type, temp ); + synchronized ( this ) { + adapters.put( adapter.id, adapter ); + adapterNames.put( adapter.uniqueName, adapter ); + } + listeners.firePropertyChange( "adapter", old, adapter ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteAdapter( int adapterId ) { + try { + CatalogAdapter adapter = Objects.requireNonNull( adapters.get( adapterId ) ); + synchronized ( this ) { + adapters.remove( adapterId ); + adapterNames.remove( adapter.uniqueName ); + } + try { + commit(); + } catch ( NoTablePrimaryKeyException e ) { + throw new RuntimeException( "An error occurred while deleting the adapter." ); + } + try { + commit(); + } catch ( NoTablePrimaryKeyException e ) { + throw new RuntimeException( "Could not delete adapter" ); + } + listeners.firePropertyChange( "adapter", adapter, null ); + } catch ( NullPointerException e ) { + throw new UnknownAdapterIdRuntimeException( adapterId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getQueryInterfaces() { + return new ArrayList<>( queryInterfaces.values() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { + uniqueName = uniqueName.toLowerCase(); + try { + return Objects.requireNonNull( queryInterfaceNames.get( uniqueName ) ); + } catch ( NullPointerException e ) { + throw new UnknownQueryInterfaceException( uniqueName ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogQueryInterface getQueryInterface( int ifaceId ) { + try { + return Objects.requireNonNull( queryInterfaces.get( ifaceId ) ); + } catch ( NullPointerException e ) { + throw new UnknownQueryInterfaceRuntimeException( ifaceId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public int addQueryInterface( String uniqueName, String clazz, Map settings ) { + uniqueName = uniqueName.toLowerCase(); + + int id = queryInterfaceIdBuilder.getAndIncrement(); + Map temp = new HashMap<>( settings ); + CatalogQueryInterface queryInterface = new CatalogQueryInterface( id, uniqueName, clazz, temp ); + synchronized ( this ) { + queryInterfaces.put( id, queryInterface ); + queryInterfaceNames.put( uniqueName, queryInterface ); + } + try { + commit(); + } catch ( NoTablePrimaryKeyException e ) { + throw new RuntimeException( "An error occurred while creating the query interface." ); + } + listeners.firePropertyChange( "queryInterface", null, queryInterface ); + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteQueryInterface( int ifaceId ) { + try { + CatalogQueryInterface queryInterface = Objects.requireNonNull( queryInterfaces.get( ifaceId ) ); + synchronized ( this ) { + queryInterfaces.remove( ifaceId ); + queryInterfaceNames.remove( queryInterface.name ); + } + try { + commit(); + } catch ( NoTablePrimaryKeyException e ) { + throw new RuntimeException( "An error occurred while deleting the query interface." ); + } + listeners.firePropertyChange( "queryInterface", queryInterface, null ); + } catch ( NullPointerException e ) { + throw new UnknownQueryInterfaceRuntimeException( ifaceId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { + try { + long id = partitionGroupIdBuilder.getAndIncrement(); + if ( log.isDebugEnabled() ) { + log.debug( "Creating partitionGroup of type '{}' with id '{}'", partitionType, id ); + } + CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); + + List partitionIds = new ArrayList<>(); + for ( int i = 0; i < numberOfInternalPartitions; i++ ) { + long partId = addPartition( tableId, schemaId, id, effectivePartitionGroupQualifier, isUnbound ); + partitionIds.add( partId ); + } + + CatalogPartitionGroup partitionGroup = new CatalogPartitionGroup( + id, + partitionGroupName, + tableId, + schemaId, + 0, + null, + ImmutableList.copyOf( partitionIds ), + isUnbound ); + + synchronized ( this ) { + partitionGroups.put( id, partitionGroup ); + } + //listeners.firePropertyChange( "partitionGroups", null, partitionGroup ); + return id; + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ) throws UnknownPartitionGroupIdRuntimeException { + if ( log.isDebugEnabled() ) { + log.debug( "Deleting partitionGroup with id '{}' on table with id '{}'", partitionGroupId, tableId ); + } + // Check whether there this partition id exists + CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); + synchronized ( this ) { + for ( long partitionId : partitionGroup.partitionIds ) { + deletePartition( tableId, schemaId, partitionId ); + } + partitionGroups.remove( partitionGroupId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void updatePartitionGroup( long partitionGroupId, List partitionIds ) throws UnknownPartitionGroupIdRuntimeException { + + // Check whether there this partition id exists + CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); + + CatalogPartitionGroup updatedCatalogPartitionGroup = new CatalogPartitionGroup( + partitionGroup.id, + partitionGroup.partitionGroupName, + partitionGroup.tableId, + partitionGroup.schemaId, + partitionGroup.partitionKey, + partitionGroup.partitionQualifiers, + ImmutableList.copyOf( partitionIds ), + partitionGroup.isUnbound ); + + synchronized ( this ) { + partitionGroups.replace( partitionGroupId, updatedCatalogPartitionGroup ); + listeners.firePropertyChange( "partitionGroup", partitionGroup, updatedCatalogPartitionGroup ); + } + + } + + + /** + * {@inheritDoc} + */ + @Override + public void addPartitionToGroup( long partitionGroupId, Long partitionId ) { + // Check whether there this partition id exists + getPartition( partitionId ); + + CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); + List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); + + if ( !newPartitionIds.contains( partitionId ) ) { + newPartitionIds.add( partitionId ); + updatePartitionGroup( partitionGroupId, newPartitionIds ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void removePartitionFromGroup( long partitionGroupId, Long partitionId ) { + // Check whether there this partition id exists + CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); + List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); + + if ( newPartitionIds.contains( partitionId ) ) { + newPartitionIds.remove( partitionId ); + updatePartitionGroup( partitionGroupId, newPartitionIds ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void updatePartition( long partitionId, Long partitionGroupId ) { + // Check whether there this partition id exists + CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); + List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); + + CatalogPartition oldPartition = getPartition( partitionId ); + + if ( !newPartitionIds.contains( partitionId ) ) { + newPartitionIds.add( partitionId ); + + addPartitionToGroup( partitionGroupId, partitionId ); + removePartitionFromGroup( oldPartition.partitionGroupId, partitionId ); + + CatalogPartition updatedPartition = new CatalogPartition( + oldPartition.id, + oldPartition.tableId, + oldPartition.schemaId, + oldPartition.partitionQualifiers, + oldPartition.isUnbound, + partitionGroupId + ); + + synchronized ( this ) { + partitions.put( updatedPartition.id, updatedPartition ); + } + listeners.firePropertyChange( "partition", oldPartition, updatedPartition ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) throws UnknownPartitionGroupIdRuntimeException { + try { + return Objects.requireNonNull( partitionGroups.get( partitionGroupId ) ); + } catch ( NullPointerException e ) { + throw new UnknownPartitionGroupIdRuntimeException( partitionGroupId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionQualifier, boolean isUnbound ) throws GenericCatalogException { + try { + long id = partitionIdBuilder.getAndIncrement(); + if ( log.isDebugEnabled() ) { + log.debug( "Creating partition with id '{}'", id ); + } + CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); + + CatalogPartition partition = new CatalogPartition( + id, + tableId, + schemaId, + effectivePartitionQualifier, + isUnbound, + partitionGroupId ); + + synchronized ( this ) { + partitions.put( id, partition ); + } + listeners.firePropertyChange( "partition", null, partition ); + return id; + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void deletePartition( long tableId, long schemaId, long partitionId ) { + if ( log.isDebugEnabled() ) { + log.debug( "Deleting partition with id '{}' on table with id '{}'", partitionId, tableId ); + } + // Check whether there this partition id exists + getPartition( partitionId ); + synchronized ( this ) { + for ( CatalogPartitionPlacement partitionPlacement : getPartitionPlacements( partitionId ) ) { + deletePartitionPlacement( partitionPlacement.adapterId, partitionId ); + } + partitions.remove( partitionId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogPartition getPartition( long partitionId ) { + try { + return Objects.requireNonNull( partitions.get( partitionId ) ); + } catch ( NullPointerException e ) { + throw new UnknownPartitionGroupIdRuntimeException( partitionId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionsByTable( long tableId ) { + return partitions.values() + .stream() + .filter( p -> p.tableId == tableId ) + .collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { + LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); + + LogicalTable table = new LogicalTable( + old.id, + old.name, + old.fieldIds, + old.namespaceId, + old.ownerId, + old.entityType, + old.primaryKey, + old.dataPlacements, + old.modifiable, + partitionProperty, + old.connectedViews ); + + synchronized ( this ) { + tables.replace( tableId, table ); + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); + + if ( table.partitionProperty.reliesOnPeriodicChecks ) { + addTableToPeriodicProcessing( tableId ); + } + } + + listeners.firePropertyChange( "table", old, table ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void mergeTable( long tableId ) { + LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); + + if ( old.partitionProperty.reliesOnPeriodicChecks ) { + removeTableFromPeriodicProcessing( tableId ); + } + + //Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition + List partitionGroupIds = new ArrayList<>(); + try { + partitionGroupIds.add( addPartitionGroup( tableId, "full", old.namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); + } catch ( GenericCatalogException e ) { + throw new RuntimeException( e ); + } + + // Get All(only one) PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds + CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); + PartitionProperty partitionProperty = PartitionProperty.builder() + .partitionType( PartitionType.NONE ) + .isPartitioned( false ) + .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) + .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) + .reliesOnPeriodicChecks( false ) + .build(); + + LogicalTable table = new LogicalTable( + old.id, + old.name, + old.fieldIds, + old.namespaceId, + old.ownerId, + old.entityType, + old.primaryKey, + old.dataPlacements, + old.modifiable, + partitionProperty, + old.connectedViews ); + + synchronized ( this ) { + tables.replace( tableId, table ); + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); + } + listeners.firePropertyChange( "table", old, table ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { + LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); + + LogicalTable table = new LogicalTable( + old.id, + old.name, + old.fieldIds, + old.namespaceId, + old.ownerId, + old.entityType, + old.primaryKey, + old.dataPlacements, + old.modifiable, + partitionProperty, + old.connectedViews ); + + synchronized ( this ) { + tables.replace( tableId, table ); + tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); + } + + listeners.firePropertyChange( "table", old, table ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionGroups( long tableId ) { + try { + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); + List partitionGroups = new ArrayList<>(); + if ( table.partitionProperty.partitionGroupIds == null ) { + return new ArrayList<>(); + } + for ( long partId : table.partitionProperty.partitionGroupIds ) { + partitionGroups.add( getPartitionGroup( partId ) ); + } + return partitionGroups; + } catch ( UnknownPartitionGroupIdRuntimeException e ) { + return new ArrayList<>(); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { + List catalogEntities = getTables( schemaNamePattern, tableNamePattern ); + Stream partitionGroupStream = Stream.of(); + for ( LogicalTable catalogTable : catalogEntities ) { + partitionGroupStream = Stream.concat( partitionGroupStream, getPartitionGroups( catalogTable.id ).stream() ); + } + return partitionGroupStream.collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitions( long partitionGroupId ) { + try { + CatalogPartitionGroup partitionGroup = Objects.requireNonNull( partitionGroups.get( partitionGroupId ) ); + List partitions = new ArrayList<>(); + if ( partitionGroup.partitionIds == null ) { + return new ArrayList<>(); + } + for ( long partId : partitionGroup.partitionIds ) { + partitions.add( getPartition( partId ) ); + } + return partitions; + } catch ( UnknownPartitionGroupIdRuntimeException e ) { + return new ArrayList<>(); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { + List catalogPartitionGroups = getPartitionGroups( schemaNamePattern, tableNamePattern ); + Stream partitionStream = Stream.of(); + for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { + partitionStream = Stream.concat( partitionStream, getPartitions( catalogPartitionGroup.id ).stream() ); + } + return partitionStream.collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionGroupNames( long tableId ) { + List partitionGroupNames = new ArrayList<>(); + for ( CatalogPartitionGroup catalogPartitionGroup : getPartitionGroups( tableId ) ) { + partitionGroupNames.add( catalogPartitionGroup.partitionGroupName ); + } + return partitionGroupNames; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { + List catalogColumnPlacements = new ArrayList<>(); + for ( CatalogColumnPlacement ccp : getColumnPlacement( columnId ) ) { + if ( getPartitionGroupsOnDataPlacement( ccp.adapterId, tableId ).contains( partitionGroupId ) ) { + catalogColumnPlacements.add( ccp ); + } + } + + return catalogColumnPlacements; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { + Set catalogAdapters = new HashSet<>(); + + for ( CatalogDataPlacement dataPlacement : getDataPlacements( tableId ) ) { + for ( long partitionId : dataPlacement.getAllPartitionIds() ) { + long partitionGroup = getPartitionGroupByPartition( partitionId ); + if ( partitionGroup == partitionGroupId ) { + catalogAdapters.add( getAdapter( dataPlacement.adapterId ) ); + } + } + } + + return new ArrayList<>( catalogAdapters ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ) { + Set partitionGroups = new HashSet<>(); + CatalogDataPlacement dataPlacement = getDataPlacement( adapterId, tableId ); + + dataPlacement.getAllPartitionIds().forEach( + partitionId -> partitionGroups.add( getPartitionGroupByPartition( partitionId ) + ) + ); + + return new ArrayList<>( partitionGroups ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionsOnDataPlacement( int adapterId, long tableId ) { + return getDataPlacement( adapterId, tableId ).getAllPartitionIds(); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ) { + List partitionGroups = getPartitionGroupsOnDataPlacement( adapterId, tableId ); + if ( partitionGroups == null ) { + return new ArrayList<>(); + } + + List partitionGroupIndexList = new ArrayList<>(); + LogicalTable catalogTable = getTable( tableId ); + for ( int index = 0; index < catalogTable.partitionProperty.partitionGroupIds.size(); index++ ) { + if ( partitionGroups.contains( catalogTable.partitionProperty.partitionGroupIds.get( index ) ) ) { + partitionGroupIndexList.add( (long) index ); + } + } + return partitionGroupIndexList; + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogDataPlacement getDataPlacement( int adapterId, long tableId ) { + return dataPlacements.get( new Object[]{ adapterId, tableId } ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getDataPlacements( long tableId ) { + List catalogDataPlacements = new ArrayList<>(); + + getTable( tableId ).dataPlacements.forEach( adapterId -> catalogDataPlacements.add( getDataPlacement( adapterId, tableId ) ) ); + + return catalogDataPlacements; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getAllFullDataPlacements( long tableId ) { + List dataPlacements = new ArrayList<>(); + List allDataPlacements = getDataPlacements( tableId ); + + for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { + if ( dataPlacement.hasFullPlacement() ) { + dataPlacements.add( dataPlacement ); + } + } + return dataPlacements; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getAllColumnFullDataPlacements( long tableId ) { + List dataPlacements = new ArrayList<>(); + List allDataPlacements = getDataPlacements( tableId ); + + for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { + if ( dataPlacement.hasColumnFullPlacement() ) { + dataPlacements.add( dataPlacement ); + } + } + return dataPlacements; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getAllPartitionFullDataPlacements( long tableId ) { + List dataPlacements = new ArrayList<>(); + List allDataPlacements = getDataPlacements( tableId ); + + for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { + if ( dataPlacement.hasPartitionFullPlacement() ) { + dataPlacements.add( dataPlacement ); + } + } + return dataPlacements; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { + List catalogDataPlacements = new ArrayList<>(); + for ( CatalogDataPlacement dataPlacement : getDataPlacements( tableId ) ) { + if ( dataPlacement.dataPlacementRole.equals( role ) ) { + catalogDataPlacements.add( dataPlacement ); + } + } + return catalogDataPlacements; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { + List partitionPlacements = new ArrayList<>(); + for ( CatalogDataPlacement dataPlacement : getDataPlacementsByRole( tableId, role ) ) { + if ( dataPlacement.partitionPlacementsOnAdapterByRole.containsKey( role ) ) { + dataPlacement.partitionPlacementsOnAdapterByRole.get( role ) + .forEach( + partitionId -> partitionPlacements.add( getPartitionPlacement( dataPlacement.adapterId, partitionId ) ) + ); + } + } + return partitionPlacements; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { + List partitionPlacements = new ArrayList<>(); + for ( CatalogPartitionPlacement partitionPlacement : getPartitionPlacements( partitionId ) ) { + if ( partitionPlacement.role.equals( role ) ) { + partitionPlacements.add( partitionPlacement ); + } + } + return partitionPlacements; + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { + if ( (columnIdsToBeRemoved.isEmpty() && partitionsIdsToBeRemoved.isEmpty()) || isTableFlaggedForDeletion( tableId ) ) { + log.warn( "Invoked validation with two empty lists of columns and partitions to be revoked. Is therefore always true..." ); + return true; + } + + // TODO @HENNLO Focus on PartitionPlacements that are labeled as UPTODATE nodes. The outdated nodes do not + // necessarily need placement constraints + + LogicalTable table = getTable( tableId ); + List dataPlacements = getDataPlacements( tableId ); + + // Checks for every column on every DataPlacement if each column is placed with all partitions + for ( long columnId : table.fieldIds ) { + List partitionsToBeCheckedForColumn = table.partitionProperty.partitionIds.stream().collect( Collectors.toList() ); + // Check for every column if it has every partition + for ( CatalogDataPlacement dataPlacement : dataPlacements ) { + // Can instantly return because we still have a full placement somewhere + if ( dataPlacement.hasFullPlacement() && dataPlacement.adapterId != adapterId ) { + return true; + } + + List effectiveColumnsOnStore = dataPlacement.columnPlacementsOnAdapter.stream().collect( Collectors.toList() ); + List effectivePartitionsOnStore = dataPlacement.getAllPartitionIds(); + + // Remove columns and partitions from store to not evaluate them + if ( dataPlacement.adapterId == adapterId ) { + + // Skips columns that shall be removed + if ( columnIdsToBeRemoved.contains( columnId ) ) { + continue; + } + + // Only process those parts that shall be present after change + effectiveColumnsOnStore.removeAll( columnIdsToBeRemoved ); + effectivePartitionsOnStore.removeAll( partitionsIdsToBeRemoved ); + } + + if ( effectiveColumnsOnStore.contains( columnId ) ) { + partitionsToBeCheckedForColumn.removeAll( effectivePartitionsOnStore ); + } else { + continue; + } + + // Found all partitions for column, continue with next column + if ( partitionsToBeCheckedForColumn.isEmpty() ) { + break; + } + } + + if ( !partitionsToBeCheckedForColumn.isEmpty() ) { + return false; + } + } + + return true; + } + + + /** + * {@inheritDoc} + */ + @Override + public void flagTableForDeletion( long tableId, boolean flag ) { + if ( flag && !tablesFlaggedForDeletion.contains( tableId ) ) { + tablesFlaggedForDeletion.add( tableId ); + } else if ( !flag && tablesFlaggedForDeletion.contains( tableId ) ) { + tablesFlaggedForDeletion.remove( tableId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean isTableFlaggedForDeletion( long tableId ) { + return tablesFlaggedForDeletion.contains( tableId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { + if ( !checkIfExistsPartitionPlacement( adapterId, partitionId ) ) { + CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); + CatalogPartitionPlacement partitionPlacement = new CatalogPartitionPlacement( + namespaceId, + tableId, + adapterId, + store.uniqueName, + placementType, + physicalSchemaName, + physicalTableName, + partitionId, + role ); + + synchronized ( this ) { + partitionPlacements.put( new Object[]{ adapterId, partitionId }, partitionPlacement ); + + // Adds this PartitionPlacement to existing DataPlacement container + addPartitionsToDataPlacement( adapterId, tableId, List.of( partitionId ) ); + + listeners.firePropertyChange( "partitionPlacement", null, partitionPlacements ); + } + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ) { + CatalogDataPlacement dataPlacement; + if ( (dataPlacement = getDataPlacement( adapterId, tableId )) == null ) { + if ( log.isDebugEnabled() ) { + log.debug( "No DataPlacement exists on adapter '{}' for entity '{}'. Creating a new one.", getAdapter( adapterId ), getTable( tableId ) ); + } + addDataPlacement( adapterId, tableId ); + dataPlacement = getDataPlacement( adapterId, tableId ); + } + + return dataPlacement; + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { + LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); + + LogicalTable newTable = old.withDataPlacements( ImmutableList.copyOf( newDataPlacements ) ); + + synchronized ( this ) { + tables.replace( tableId, newTable ); + tableNames.replace( new Object[]{ newTable.namespaceId, newTable.name }, newTable ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void addDataPlacement( int adapterId, long tableId ) { + if ( log.isDebugEnabled() ) { + log.debug( "Creating DataPlacement on adapter '{}' for entity '{}'", getAdapter( adapterId ), getTable( tableId ) ); + } + + if ( !dataPlacements.containsKey( new Object[]{ adapterId, tableId } ) ) { + CatalogDataPlacement dataPlacement = new CatalogDataPlacement( + tableId, + adapterId, + PlacementType.AUTOMATIC, + DataPlacementRole.UPTODATE, + ImmutableList.of(), + ImmutableList.of() ); + + synchronized ( this ) { + dataPlacements.put( new Object[]{ adapterId, tableId }, dataPlacement ); + addSingleDataPlacementToTable( adapterId, tableId ); + } + listeners.firePropertyChange( "dataPlacement", null, dataPlacement ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + protected void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ) { + + try { + CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); + synchronized ( this ) { + dataPlacements.replace( new Object[]{ adapterId, tableId }, catalogDataPlacement ); + } + listeners.firePropertyChange( "dataPlacement", oldDataPlacement, catalogDataPlacement ); + } catch ( NullPointerException e ) { + e.printStackTrace(); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public long addGraphPlacement( int adapterId, long graphId ) { + long id = partitionIdBuilder.getAndIncrement(); + CatalogGraphPlacement placement = new CatalogGraphPlacement( adapterId, graphId, null, id ); + LogicalGraph old = graphs.get( graphId ); + if ( old == null ) { + throw new UnknownGraphException( graphId ); + } + + LogicalGraph graph = old.addPlacement( adapterId ); + + synchronized ( this ) { + graphPlacements.put( new Object[]{ graph.id, adapterId }, placement ); + graphs.replace( graph.id, graph ); + graphNames.replace( new Object[]{ graph.name }, graph ); + } + listeners.firePropertyChange( "graphPlacement", null, placement ); + return id; + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateGraphPlacementPhysicalNames( long graphId, int adapterId, String physicalGraphName ) { + if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { + throw new UnknownGraphPlacementsException( graphId, adapterId ); + } + + CatalogGraphPlacement old = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); + + CatalogGraphPlacement placement = old.replacePhysicalName( physicalGraphName ); + + synchronized ( this ) { + graphPlacements.replace( new Object[]{ graphId, adapterId }, placement ); + } + + listeners.firePropertyChange( "graphPlacement", old, placement ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void deleteGraphPlacement( int adapterId, long graphId ) { + if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { + throw new UnknownGraphPlacementsException( graphId, adapterId ); + } + CatalogGraphPlacement placement = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); + + deleteGraphPlacementLogistics( placement.graphId, adapterId ); + + LogicalGraph old = Objects.requireNonNull( graphs.get( placement.graphId ) ); + + LogicalGraph graph = old.removePlacement( adapterId ); + + synchronized ( this ) { + graphPlacements.remove( new Object[]{ graphId, adapterId } ); + graphs.replace( graphId, graph ); + graphNames.replace( new Object[]{ Catalog.defaultDatabaseId, graph.name }, graph ); + } + listeners.firePropertyChange( "graphPlacements", null, null ); + } + + + private void deleteGraphPlacementLogistics( long graphId, int adapterId ) { + if ( !graphMappings.containsKey( graphId ) ) { + throw new UnknownGraphException( graphId ); + } + CatalogGraphMapping mapping = Objects.requireNonNull( graphMappings.get( graphId ) ); + if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { + throw new UnknownGraphPlacementsException( graphId, adapterId ); + } + CatalogGraphPlacement placement = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); + + removeSingleDataPlacementFromTable( placement.adapterId, mapping.nodesId ); + removeSingleDataPlacementFromTable( placement.adapterId, mapping.nodesPropertyId ); + removeSingleDataPlacementFromTable( placement.adapterId, mapping.edgesId ); + removeSingleDataPlacementFromTable( placement.adapterId, mapping.edgesPropertyId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ) { + if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { + throw new UnknownGraphPlacementsException( graphId, adapterId ); + } + + return graphPlacements.get( new Object[]{ graphId, adapterId } ); + } + + + /** + * {@inheritDoc} + */ + @Override + public void removeDataPlacement( int adapterId, long tableId ) { + CatalogDataPlacement dataPlacement = getDataPlacement( adapterId, tableId ); + + if ( log.isDebugEnabled() ) { + log.debug( "Removing DataPlacement on adapter '{}' for entity '{}'", getAdapter( adapterId ), getTable( tableId ) ); + } + + // Make sure that all columnPlacements and partitionPlacements are correctly dropped. + // Although, they should've been dropped earlier. + + // Recursively removing columns that exist on this placement + for ( Long columnId : dataPlacement.columnPlacementsOnAdapter ) { + try { + deleteColumnPlacement( adapterId, columnId, false ); + } catch ( UnknownColumnIdRuntimeException e ) { + log.debug( "Column has been removed before the placement" ); + } + } + + // Recursively removing partitions that exist on this placement + for ( Long partitionId : dataPlacement.getAllPartitionIds() ) { + try { + deletePartitionPlacement( adapterId, partitionId ); + } catch ( UnknownColumnIdRuntimeException e ) { + log.debug( "Partition has been removed before the placement" ); + } + } + + synchronized ( this ) { + dataPlacements.remove( new Object[]{ adapterId, tableId } ); + removeSingleDataPlacementFromTable( adapterId, tableId ); + } + listeners.firePropertyChange( "dataPlacement", dataPlacement, null ); + } + + + /** + * {@inheritDoc} + */ + @Override + protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) { + LogicalTable old = getTable( tableId ); + List updatedPlacements = new ArrayList<>( old.dataPlacements ); + + if ( !updatedPlacements.contains( adapterId ) ) { + updatedPlacements.add( adapterId ); + updateDataPlacementsOnTable( tableId, updatedPlacements ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + protected void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ) { + LogicalTable old = getTable( tableId ); + List updatedPlacements = new ArrayList<>( old.dataPlacements ); + + if ( updatedPlacements.contains( adapterId ) ) { + updatedPlacements.remove( adapterId ); + updateDataPlacementsOnTable( tableId, updatedPlacements ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + protected void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ) { + CatalogDataPlacement oldDataPlacement = addDataPlacementIfNotExists( adapterId, tableId ); + + Set columnPlacementsOnAdapter = new HashSet<>( oldDataPlacement.columnPlacementsOnAdapter ); + + // Merges new columnIds to list of already existing placements + columnPlacementsOnAdapter.addAll( columnIds ); + + CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( + oldDataPlacement.tableId, + oldDataPlacement.adapterId, + oldDataPlacement.placementType, + oldDataPlacement.dataPlacementRole, + ImmutableList.copyOf( new ArrayList<>( columnPlacementsOnAdapter ) ), + ImmutableList.copyOf( oldDataPlacement.getAllPartitionIds() ) + ); + + modifyDataPlacement( adapterId, tableId, newDataPlacement ); + + if ( log.isDebugEnabled() ) { + log.debug( "Added columns: {} of table {}, to placement on adapter {}.", columnIds, tableId, adapterId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + protected void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ) { + CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); + + Set columnPlacementsOnAdapter = new HashSet<>( oldDataPlacement.columnPlacementsOnAdapter ); + columnPlacementsOnAdapter.removeAll( columnIds ); + + CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( + oldDataPlacement.tableId, + oldDataPlacement.adapterId, + oldDataPlacement.placementType, + oldDataPlacement.dataPlacementRole, + ImmutableList.copyOf( new ArrayList<>( columnPlacementsOnAdapter ) ), + ImmutableList.copyOf( oldDataPlacement.getAllPartitionIds() ) + ); + + modifyDataPlacement( adapterId, tableId, newDataPlacement ); + + if ( log.isDebugEnabled() ) { + log.debug( "Removed columns: {} from table {}, to placement on adapter {}.", columnIds, tableId, adapterId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + protected void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ) { + CatalogDataPlacement oldDataPlacement = addDataPlacementIfNotExists( adapterId, tableId ); + + Set partitionPlacementsOnAdapter = new HashSet<>( oldDataPlacement.getAllPartitionIds() ); + partitionPlacementsOnAdapter.addAll( partitionIds ); + + CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( + oldDataPlacement.tableId, + oldDataPlacement.adapterId, + oldDataPlacement.placementType, + oldDataPlacement.dataPlacementRole, + oldDataPlacement.columnPlacementsOnAdapter, + ImmutableList.copyOf( new ArrayList<>( partitionPlacementsOnAdapter ) ) ); + + modifyDataPlacement( adapterId, tableId, newDataPlacement ); + + if ( log.isDebugEnabled() ) { + log.debug( "Added partitions: {} of table {}, to placement on adapter {}.", partitionIds, tableId, adapterId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + protected void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ) { + CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); + + Set partitionPlacementsOnAdapter = new HashSet<>( oldDataPlacement.getAllPartitionIds() ); + partitionIds.forEach( partitionPlacementsOnAdapter::remove ); + + CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( + oldDataPlacement.tableId, + oldDataPlacement.adapterId, + oldDataPlacement.placementType, + oldDataPlacement.dataPlacementRole, + oldDataPlacement.columnPlacementsOnAdapter, + ImmutableList.copyOf( new ArrayList<>( partitionPlacementsOnAdapter ) ) ); + + modifyDataPlacement( adapterId, tableId, newDataPlacement ); + + if ( log.isDebugEnabled() ) { + log.debug( "Removed partitions: {} from table {}, to placement on adapter {}.", partitionIds, tableId, adapterId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ) { + CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); + + CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( + oldDataPlacement.tableId, + oldDataPlacement.adapterId, + oldDataPlacement.placementType, + oldDataPlacement.dataPlacementRole, + ImmutableList.copyOf( columnIds ), + ImmutableList.copyOf( partitionIds ) ); + + modifyDataPlacement( adapterId, tableId, newDataPlacement ); + + if ( log.isDebugEnabled() ) { + log.debug( "Added columns {} & partitions: {} of table {}, to placement on adapter {}.", columnIds, partitionIds, tableId, adapterId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void deletePartitionPlacement( int adapterId, long partitionId ) { + if ( checkIfExistsPartitionPlacement( adapterId, partitionId ) ) { + synchronized ( this ) { + partitionPlacements.remove( new Object[]{ adapterId, partitionId } ); + removePartitionsFromDataPlacement( adapterId, getTableFromPartition( partitionId ).id, Arrays.asList( partitionId ) ); + } + } + } + + + /** + * {@inheritDoc} + */ + @Override + public CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ) { + try { + return Objects.requireNonNull( partitionPlacements.get( new Object[]{ adapterId, partitionId } ) ); + } catch ( NullPointerException e ) { + getAdapter( adapterId ); + getPartition( partitionId ); + throw new UnknownPartitionPlacementException( adapterId, partitionId ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionPlacementsByAdapter( int adapterId ) { + return new ArrayList<>( partitionPlacements.prefixSubMap( new Object[]{ adapterId } ).values() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ) { + return getPartitionPlacementsByAdapter( adapterId ) + .stream() + .filter( p -> p.tableId == tableId ) + .collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getAllPartitionPlacementsByTable( long tableId ) { + return partitionPlacements.values() + .stream() + .filter( p -> p.tableId == tableId ) + .collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getPartitionPlacements( long partitionId ) { + return partitionPlacements.values() + .stream() + .filter( p -> p.partitionId == partitionId ) + .collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getTablesForPeriodicProcessing() { + List procTables = new ArrayList<>(); + for ( Iterator iterator = frequencyDependentTables.iterator(); iterator.hasNext(); ) { + long tableId = -1; + try { + tableId = iterator.next(); + procTables.add( getTable( tableId ) ); + } catch ( UnknownTableIdRuntimeException e ) { + iterator.remove(); + } + } + + return procTables; + } + + + /** + * {@inheritDoc} + */ + @Override + public void addTableToPeriodicProcessing( long tableId ) { + int beforeSize = frequencyDependentTables.size(); + getTable( tableId ); + if ( !frequencyDependentTables.contains( tableId ) ) { + frequencyDependentTables.add( tableId ); + } + // Initially starts the periodic job if this was the first table to enable periodic processing + if ( beforeSize == 0 && frequencyDependentTables.size() == 1 ) { + // Start Job for periodic processing + FrequencyMap.INSTANCE.initialize(); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void removeTableFromPeriodicProcessing( long tableId ) { + getTable( tableId ); + if ( !frequencyDependentTables.contains( tableId ) ) { + frequencyDependentTables.remove( tableId ); + } + + // Terminates the periodic job if this was the last table with periodic processing + if ( frequencyDependentTables.size() == 0 ) { + // Terminate Job for periodic processing + FrequencyMap.INSTANCE.terminate(); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ) { + CatalogPartitionPlacement placement = partitionPlacements.get( new Object[]{ adapterId, partitionId } ); + return placement != null; + } + + + /** + * {@inheritDoc} + */ + @Override + public List getTableKeys( long tableId ) { + return keys.values().stream().filter( k -> k.tableId == tableId ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getIndexes( CatalogKey key ) { + return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getForeignKeys( CatalogKey key ) { + return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public List getConstraints( CatalogKey key ) { + return constraints.values().stream().filter( c -> c.keyId == key.id ).collect( Collectors.toList() ); + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean isIndex( long keyId ) { + return indexes.values().stream().anyMatch( i -> i.keyId == keyId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean isConstraint( long keyId ) { + return constraints.values().stream().anyMatch( c -> c.keyId == keyId ); + } + + + /** + * {@inheritDoc} + */ + @Override + public boolean isForeignKey( long keyId ) { + return foreignKeys.values().stream().anyMatch( f -> f.referencedKeyId == keyId ); + } + + + /** + * Check if the specified key is used as primary key, index or constraint. If so, this is a NoOp. If it is not used, the key is deleted. + */ + private void deleteKeyIfNoLongerUsed( Long keyId ) { + if ( keyId == null ) { + return; + } + CatalogKey key = getKey( keyId ); + LogicalTable table = getTable( key.tableId ); + if ( table.primaryKey != null && table.primaryKey.equals( keyId ) ) { + return; + } + if ( constraints.values().stream().anyMatch( c -> c.keyId == keyId ) ) { + return; + } + if ( foreignKeys.values().stream().anyMatch( f -> f.id == keyId ) ) { + return; + } + if ( indexes.values().stream().anyMatch( i -> i.keyId == keyId ) ) { + return; + } + synchronized ( this ) { + keys.remove( keyId ); + keyColumns.remove( key.columnIds.stream().mapToLong( Long::longValue ).toArray() ); + } + listeners.firePropertyChange( "key", key, null ); + } + + + /** + * Returns the id of they defined by the specified column ids. If this key does not yet exist, create it. + * + * @param tableId on which the key is defined + * @param columnIds all involved columns + * @param enforcementTime at which point during execution the key should be enforced + * @return the id of the key + * @throws GenericCatalogException if the key does not exist + */ + private long getOrAddKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { + Long keyId = keyColumns.get( columnIds.stream().mapToLong( Long::longValue ).toArray() ); + if ( keyId != null ) { + return keyId; + } + return addKey( tableId, columnIds, enforcementTime ); + } + + + private long addKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { + try { + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); + long id = keyIdBuilder.getAndIncrement(); + CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); + synchronized ( this ) { + keys.put( id, key ); + keyColumns.put( columnIds.stream().mapToLong( Long::longValue ).toArray(), id ); + } + listeners.firePropertyChange( "key", null, key ); + return id; + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public List getKeys() { + return new ArrayList<>( keys.values() ); + } + + + /** + * Get a key by its id + * + * @return The key + */ + private CatalogKey getKey( long keyId ) { + try { + return Objects.requireNonNull( keys.get( keyId ) ); + } catch ( NullPointerException e ) { + throw new UnknownKeyIdRuntimeException( keyId ); + } + } + + + static class CatalogValidator { + + public void validate() throws GenericCatalogException { + + } + + + public void startCheck() { + columns.forEach( ( key, column ) -> { + assert (schemas.containsKey( column.schemaId )); + assert (Objects.requireNonNull( schemaChildren.get( column.schemaId ) ).contains( column.tableId )); + + assert (tables.containsKey( column.tableId )); + assert (Objects.requireNonNull( tableChildren.get( column.tableId ) ).contains( column.id )); + + assert (columnNames.containsKey( new Object[]{ column.schemaId, column.tableId, column.name } )); + } ); + + columnPlacements.forEach( ( key, placement ) -> { + assert (columns.containsKey( placement.columnId )); + assert (adapters.containsKey( placement.adapterId )); + } ); + } + + } + +} diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogInfoPage.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogInfoPage.java index e6fda76bf2..54d9ab3bfc 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogInfoPage.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogInfoPage.java @@ -162,23 +162,23 @@ private void resetCatalogInformation() { catalog.getDatabases( null ).forEach( d -> { databaseInformation.addRow( d.id, d.name, d.defaultNamespaceId ); } ); - catalog.getSchemas( null, null ).forEach( s -> { + catalog.getSchemas( null ).forEach( s -> { namespaceInformation.addRow( s.id, s.name, s.databaseId, s.namespaceType, s.caseSensitive ); } ); - catalog.getTables( null, null, null ).forEach( t -> { + catalog.getTables( null, null ).forEach( t -> { tableInformation.addRow( t.id, t.name, t.databaseId, t.namespaceId, t.entityType, t.partitionProperty.partitionType.toString(), t.partitionProperty.partitionGroupIds.size() ); } ); - catalog.getColumns( null, null, null, null ).forEach( c -> { + catalog.getColumns( null, null, null ).forEach( c -> { String placements = catalog.getColumnPlacement( c.id ).stream().map( plac -> String.valueOf( plac.adapterId ) ).collect( Collectors.joining( "," ) ); columnInformation.addRow( c.id, c.name, c.databaseId, c.schemaId, c.tableId, placements ); } ); catalog.getIndexes().forEach( i -> { indexInformation.addRow( i.id, i.name, i.keyId, i.location, i.method, i.unique ); } ); - catalog.getPartitionGroups( null, null, null ).forEach( pg -> { + catalog.getPartitionGroups( null, null ).forEach( pg -> { partitionGroupInformation.addRow( pg.id, pg.partitionGroupName, pg.tableId, pg.partitionIds.size() ); } ); - catalog.getPartitions( null, null, null ).forEach( p -> { + catalog.getPartitions( null, null ).forEach( p -> { partitionInformation.addRow( p.id, p.partitionGroupId, p.tableId, p.partitionQualifiers ); } ); } catch ( Exception e ) { diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index bdaa153687..a7d11eb292 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -33,20 +33,14 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; -import org.polypheny.db.catalog.logistic.IndexType; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.CatalogImpl; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; @@ -54,6 +48,12 @@ import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.type.PolyType; @@ -93,7 +93,7 @@ public void testLayout() throws UnknownDatabaseException, UnknownSchemaException CatalogDatabase database = catalog.getDatabase( "test_db" ); assertEquals( databaseId, database.id ); - long schemaId = catalog.addNamespace( "test_schema", databaseId, userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "test_schema", userId, NamespaceType.RELATIONAL ); CatalogSchema schema = catalog.getSchema( databaseId, "test_schema" ); assertEquals( schemaId, schema.id ); @@ -102,7 +102,7 @@ public void testLayout() throws UnknownDatabaseException, UnknownSchemaException assertEquals( tableId, table.id ); long columnId = catalog.addColumn( "test_column", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); - CatalogColumn column = catalog.getColumn( tableId, "test_column" ); + LogicalColumn column = catalog.getColumn( tableId, "test_column" ); assertEquals( columnId, column.id ); } @@ -142,7 +142,7 @@ public void testSchema() throws UnknownSchemaException { // test adding of schema for ( String name : names ) { - ids.add( catalog.addNamespace( name, databaseId, userId, NamespaceType.RELATIONAL ) ); + ids.add( catalog.addNamespace( name, userId, NamespaceType.RELATIONAL ) ); } assertEquals( catalog.getSchemas( databaseId, null ).stream().map( s -> s.name ).collect( Collectors.toList() ), names ); @@ -170,7 +170,7 @@ public void testTable() throws GenericCatalogException { long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); - long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "schema1", userId, NamespaceType.RELATIONAL ); List names = new ArrayList<>( Arrays.asList( "table1", "table2", "table3", "table4", "table5" ) ); List ids = new ArrayList<>(); @@ -189,7 +189,7 @@ public void testTable() throws GenericCatalogException { catalog.renameTable( tableId, newTable ); assertEquals( names, - catalog.getTables( null, null, null ).stream().sorted().map( s -> s.name ).collect( Collectors.toList() ) ); + catalog.getTables( null, null ).stream().sorted().map( s -> s.name ).collect( Collectors.toList() ) ); // test change owner String newUserName = "newUser"; @@ -232,7 +232,7 @@ public void testColumn() throws GenericCatalogException { long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); - long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "schema1", userId, NamespaceType.RELATIONAL ); long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); @@ -265,8 +265,8 @@ public void testColumn() throws GenericCatalogException { // test replacing position long otherColumnId = columnIds.get( 1 ); - CatalogColumn column = catalog.getColumn( columnId ); - CatalogColumn otherColumn = catalog.getColumn( otherColumnId ); + LogicalColumn column = catalog.getColumn( columnId ); + LogicalColumn otherColumn = catalog.getColumn( otherColumnId ); catalog.setColumnPosition( columnId, otherColumn.position ); catalog.setColumnPosition( otherColumnId, column.position ); @@ -313,11 +313,11 @@ public void testColumnPlacement() throws UnknownAdapterException { CatalogUser user = catalog.getUser( userId ); long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); - long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "schema1", userId, NamespaceType.RELATIONAL ); long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); long columnId = catalog.addColumn( "column1", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); - CatalogColumn column = catalog.getColumn( columnId ); + LogicalColumn column = catalog.getColumn( columnId ); CatalogAdapter store1 = catalog.getAdapter( "store1" ); CatalogAdapter store2 = catalog.getAdapter( "store2" ); @@ -344,14 +344,14 @@ public void testKey() throws GenericCatalogException { CatalogUser user = catalog.getUser( userId ); long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); - long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "schema1", userId, NamespaceType.RELATIONAL ); long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); long columnId1 = catalog.addColumn( "column1", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); - CatalogColumn column1 = catalog.getColumn( columnId1 ); + LogicalColumn column1 = catalog.getColumn( columnId1 ); long columnId2 = catalog.addColumn( "column2", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); - CatalogColumn column2 = catalog.getColumn( columnId2 ); + LogicalColumn column2 = catalog.getColumn( columnId2 ); catalog.addPrimaryKey( tableId, Collections.singletonList( column1.id ) ); @@ -405,7 +405,7 @@ public void testKey() throws GenericCatalogException { // test foreign key long tableId2 = catalog.addTable( "table2", schemaId, userId, EntityType.ENTITY, true ); long columnId3 = catalog.addColumn( "column3", tableId2, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); - CatalogColumn column3 = catalog.getColumn( columnId3 ); + LogicalColumn column3 = catalog.getColumn( columnId3 ); catalog.addPrimaryKey( tableId, Collections.singletonList( columnId1 ) ); catalog.addForeignKey( tableId2, Collections.singletonList( columnId3 ), tableId, Collections.singletonList( columnId1 ), "name", ForeignKeyOption.RESTRICT, ForeignKeyOption.RESTRICT ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java index 2633263c52..95129a4f00 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java @@ -104,7 +104,7 @@ public Pair validate( Transaction transaction, Node parsed, b public boolean needsDdlGeneration( Node query, QueryParameters parameters ) { if ( query instanceof MqlCollectionStatement ) { return Catalog.getInstance() - .getTables( Catalog.defaultDatabaseId, new Pattern( ((MqlQueryParameters) parameters).getDatabase() ), null ) + .getTables( new Pattern( ((MqlQueryParameters) parameters).getDatabase() ), null ) .stream() .noneMatch( t -> t.name.equals( ((MqlCollectionStatement) query).getCollection() ) ); } @@ -145,7 +145,7 @@ public AlgRoot translate( Statement statement, Node mql, QueryParameters paramet stopWatch.start(); final RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); - final AlgOptCluster cluster = AlgOptCluster.createDocument( statement.getQueryProcessor().getPlanner(), rexBuilder ); + final AlgOptCluster cluster = AlgOptCluster.createDocument( statement.getQueryProcessor().getPlanner(), rexBuilder, statement.getTransaction().getSnapshot() ); final MqlToAlgConverter mqlToAlgConverter = new MqlToAlgConverter( this, statement.getTransaction().getSnapshot(), cluster ); AlgRoot logicalRoot = mqlToAlgConverter.convert( mql, parameters ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 082f7e166d..9f8e92c7ea 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -47,10 +47,10 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.core.CorrelationId; -import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.document.DocumentProject; +import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.document.LogicalDocumentAggregate; import org.polypheny.db.algebra.logical.document.LogicalDocumentFilter; @@ -61,13 +61,10 @@ import org.polypheny.db.algebra.logical.document.LogicalDocumentValues; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.languages.QueryParameters; @@ -84,10 +81,6 @@ import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.prepare.AlgOptEntityImpl; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.processing.Processor; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -96,7 +89,6 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.document.DocumentUtil; import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.DateString; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.Pair; @@ -108,7 +100,7 @@ */ public class MqlToAlgConverter { - private final PolyphenyDbCatalogReader catalogReader; + private final Snapshot snapshot; private final AlgOptCluster cluster; private RexBuilder builder; private final static Map mappings; @@ -222,12 +214,12 @@ public class MqlToAlgConverter { private String defaultDatabase; private boolean notActive = false; private boolean usesDocumentModel; - private AlgOptEntity entity; + private CatalogEntity entity; private MqlQueryParameters parameters; - public MqlToAlgConverter( Processor mqlProcessor, PolyphenyDbCatalogReader catalogReader, AlgOptCluster cluster ) { - this.catalogReader = catalogReader; + public MqlToAlgConverter( Processor mqlProcessor, Snapshot snapshot, AlgOptCluster cluster ) { + this.snapshot = snapshot; this.cluster = Objects.requireNonNull( cluster ); this.any = this.cluster.getTypeFactory().createPolyType( PolyType.ANY ); this.nullableAny = this.cluster.getTypeFactory().createTypeWithNullability( any, true ); @@ -277,7 +269,7 @@ public AlgRoot convert( MqlCollectionStatement query ) { AlgNode node; - if ( entity.getCatalogEntity().namespaceType == NamespaceType.RELATIONAL ) { + if ( entity.namespaceType == NamespaceType.RELATIONAL ) { _dataExists = false; } @@ -324,14 +316,15 @@ public AlgRoot convert( MqlCollectionStatement query ) { } - private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaName ) { + private CatalogEntity getEntity( MqlCollectionStatement query, String dbSchemaName ) { List names = ImmutableList.of( dbSchemaName, query.getCollection() ); - PreparingEntity table = catalogReader.getTable( names ); + return snapshot.getEntity( names ); + /* if ( table == null || table.getEntity() == null ) { - return catalogReader.getCollection( names ); - } else if ( table.getCatalogEntity().namespaceType == NamespaceType.GRAPH ) { + return snapshot.getCollection( names ); + } else if ( table.namespaceType == NamespaceType.GRAPH ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); @@ -347,24 +340,24 @@ private AlgOptEntity getEntity( MqlCollectionStatement query, String dbSchemaNam 1.0 ); } - return table; + return table;*/ } /** * Starts converting a db.collection.update(); */ - private AlgNode convertUpdate( MqlUpdate query, AlgOptEntity table, AlgNode node ) { + private AlgNode convertUpdate( MqlUpdate query, CatalogEntity entity, AlgNode node ) { if ( !query.getQuery().isEmpty() ) { - node = convertQuery( query, table.getRowType(), node ); + node = convertQuery( query, entity.getRowType(), node ); if ( query.isOnlyOne() ) { node = wrapLimit( node, 1 ); } } if ( query.isUsesPipeline() ) { - node = convertReducedPipeline( query, table.getRowType(), node, table ); + node = convertReducedPipeline( query, entity.getRowType(), node, entity ); } else { - node = translateUpdate( query, table.getRowType(), node, table ); + node = translateUpdate( query, entity.getRowType(), node, entity ); } return node; @@ -377,7 +370,7 @@ private AlgNode convertUpdate( MqlUpdate query, AlgOptEntity table, AlgNode node * this method is implemented like the reduced update pipeline, * but in fact could be combined and therefore optimized a lot more */ - private AlgNode translateUpdate( MqlUpdate query, AlgDataType rowType, AlgNode node, AlgOptEntity table ) { + private AlgNode translateUpdate( MqlUpdate query, AlgDataType rowType, AlgNode node, CatalogEntity entity ) { Map updates = new HashMap<>(); Map>> mergedUpdates = new HashMap<>(); mergedUpdates.put( UpdateOperation.REMOVE, new ArrayList<>() ); @@ -449,7 +442,7 @@ private AlgNode translateUpdate( MqlUpdate query, AlgDataType rowType, AlgNode n updates.clear(); } - return finalizeUpdates( "d", mergedUpdates, rowType, node, table ); + return finalizeUpdates( "d", mergedUpdates, rowType, node, entity ); } @@ -528,10 +521,10 @@ private void combineUpdate( Map>> me * @param mergedUpdates collection, which combines all performed update steps according to the operation * @param rowType the default rowtype at this point * @param node the transformed operation up to this step e.g. {@link RelScan} or {@link LogicalDocumentAggregate} - * @param table the active table + * @param entity the active entity * @return the unified UPDATE AlgNode */ - private AlgNode finalizeUpdates( String key, Map>> mergedUpdates, AlgDataType rowType, AlgNode node, AlgOptEntity table ) { + private AlgNode finalizeUpdates( String key, Map>> mergedUpdates, AlgDataType rowType, AlgNode node, CatalogEntity entity ) { RexNode updateChain = getIdentifier( key, rowType ); // replace List> replaceNodes = mergedUpdates.get( UpdateOperation.REPLACE ); @@ -583,9 +576,8 @@ private AlgNode finalizeUpdates( String key, Map translateCurrentDate( BsonDocument value, AlgDataTy /** * Starts translating an update pipeline */ - private AlgNode convertReducedPipeline( MqlUpdate query, AlgDataType rowType, AlgNode node, AlgOptEntity table ) { + private AlgNode convertReducedPipeline( MqlUpdate query, AlgDataType rowType, AlgNode node, CatalogEntity entity ) { Map updates = new HashMap<>(); Map>> mergedUpdates = new HashMap<>(); mergedUpdates.put( UpdateOperation.REMOVE, new ArrayList<>() ); @@ -748,7 +740,7 @@ private AlgNode convertReducedPipeline( MqlUpdate query, AlgDataType rowType, Al updates.clear(); } - return finalizeUpdates( "_data", mergedUpdates, rowType, node, table ); + return finalizeUpdates( "_data", mergedUpdates, rowType, node, entity ); } @@ -756,7 +748,7 @@ private AlgNode convertReducedPipeline( MqlUpdate query, AlgDataType rowType, Al /** * Translates a delete operation from its MqlNode format to the {@link AlgNode} form */ - private AlgNode convertDelete( MqlDelete query, AlgOptEntity table, AlgNode node ) { + private AlgNode convertDelete( MqlDelete query, CatalogEntity table, AlgNode node ) { if ( !query.getQuery().isEmpty() ) { node = convertQuery( query, table.getRowType(), node ); } @@ -767,7 +759,7 @@ private AlgNode convertDelete( MqlDelete query, AlgOptEntity table, AlgNode node return LogicalDocumentModify.create( table, node, - catalogReader, Modify.Operation.DELETE, + Modify.Operation.DELETE, null, null ); } @@ -777,14 +769,14 @@ private AlgNode convertDelete( MqlDelete query, AlgOptEntity table, AlgNode node * Method transforms an insert into the appropriate {@link LogicalDocumentValues} * * @param query the insert statement as Mql object - * @param table the table/collection into which the values are inserted + * @param entity the table/collection into which the values are inserted * @return the modified AlgNode */ - private AlgNode convertInsert( MqlInsert query, AlgOptEntity table ) { + private AlgNode convertInsert( MqlInsert query, CatalogEntity entity ) { return LogicalDocumentModify.create( - table, - convertMultipleValues( query.getValues(), table.getRowType() ), - catalogReader, Modify.Operation.INSERT, + entity, + convertMultipleValues( query.getValues(), entity.getRowType() ), + Modify.Operation.INSERT, null, null ); } diff --git a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/Mql2AlgTest.java b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/Mql2AlgTest.java index 426550d6b8..688b12b35c 100644 --- a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/Mql2AlgTest.java +++ b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/Mql2AlgTest.java @@ -18,10 +18,9 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.MockCatalogReader; import org.polypheny.db.catalog.MockCatalogReaderDocument; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.languages.mql.MqlQueryParameters; import org.polypheny.db.languages.mql2alg.MqlToAlgConverter; import org.polypheny.db.mql.mql.MqlTest; @@ -29,7 +28,6 @@ import org.polypheny.db.plan.Contexts; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.test.MockRelOptPlanner; -import org.polypheny.db.type.PolyTypeFactoryImpl; public abstract class Mql2AlgTest extends MqlTest { @@ -42,8 +40,8 @@ public abstract class Mql2AlgTest extends MqlTest { static { - factory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - cluster = AlgOptCluster.create( new MockRelOptPlanner( Contexts.empty() ), new RexBuilder( factory ), traitSet, rootSchema ); + factory = AlgDataTypeFactory.DEFAULT; + cluster = AlgOptCluster.create( new MockRelOptPlanner( Contexts.empty() ), new RexBuilder( factory ), null, null ); reader = new MockCatalogReaderDocument( factory, false ); reader.init(); MQL_TO_ALG_CONVERTER = new MqlToAlgConverter( null, reader, cluster ); diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java index 788ad7bfd7..701b53967a 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java @@ -43,13 +43,13 @@ import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.Context; import org.polypheny.db.plan.Contexts; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Util; @@ -62,8 +62,8 @@ public class PigAlgBuilder extends AlgBuilder { private String lastAlias; - private PigAlgBuilder( Context context, AlgOptCluster cluster, PolyphenyDbSchema schema ) { - super( context, cluster, schema ); + private PigAlgBuilder( Context context, AlgOptCluster cluster, Snapshot snapshot ) { + super( context, cluster, snapshot ); } @@ -72,7 +72,7 @@ private PigAlgBuilder( Context context, AlgOptCluster cluster, PolyphenyDbSchema */ public static PigAlgBuilder create( FrameworkConfig config ) { final AlgBuilder algBuilder = AlgBuilder.create( config ); - return new PigAlgBuilder( config.getContext(), algBuilder.cluster, algBuilder.schema ); + return new PigAlgBuilder( config.getContext(), algBuilder.cluster, algBuilder.snapshot ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java index 5e4f764d7b..8cc417b51d 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java @@ -20,24 +20,27 @@ public class IdBuilder { + private final AtomicLong snapshotId; private final AtomicLong databaseId; - public final AtomicLong namespaceId; - public final AtomicLong entityId; - public final AtomicLong fieldId; + private final AtomicLong namespaceId; + private final AtomicLong entityId; + private final AtomicLong fieldId; - public final AtomicLong userId; + private final AtomicLong userId; - public final AtomicLong verticalId; + private final AtomicLong verticalId; - public final AtomicLong horizontalId; + private final AtomicLong horizontalId; public IdBuilder() { - this( new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ) ); + this( new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ) ); } - public IdBuilder( AtomicLong databaseId, AtomicLong namespaceId, AtomicLong entityId, AtomicLong fieldId, AtomicLong userId, AtomicLong verticalId, AtomicLong horizontalId ) { + public IdBuilder( AtomicLong snapshotId, AtomicLong databaseId, AtomicLong namespaceId, AtomicLong entityId, AtomicLong fieldId, AtomicLong userId, AtomicLong verticalId, AtomicLong horizontalId ) { + this.snapshotId = snapshotId; + this.databaseId = databaseId; this.namespaceId = namespaceId; this.entityId = entityId; @@ -49,6 +52,11 @@ public IdBuilder( AtomicLong databaseId, AtomicLong namespaceId, AtomicLong enti } + public long getNewSnapshotId() { + return snapshotId.getAndIncrement(); + } + + public long getNewEntityId() { return entityId.getAndIncrement(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index d04ad2430e..df93627dd4 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -19,28 +19,18 @@ import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; -import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.algebra.constant.FunctionCategory; -import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entities.CatalogUser; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.FullSnapshot; -import org.polypheny.db.nodes.Identifier; -import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.prepare.Prepare.CatalogReader; -import org.polypheny.db.util.Moniker; /** @@ -51,7 +41,7 @@ * Field -> Column (Relational), does not exist (Graph), Field (Document) */ @Slf4j -public class PolyCatalog implements Serializable, CatalogReader { +public class PolyCatalog implements Serializable { @Getter public final BinarySerializer serializer = Serializable.builder.get().build( PolyCatalog.class ); @@ -82,7 +72,7 @@ public PolyCatalog( private void updateSnapshot() { - this.fullSnapshot = new FullSnapshot( catalogs ); + this.fullSnapshot = new FullSnapshot( idBuilder.getNewSnapshotId(), catalogs ); } @@ -137,72 +127,12 @@ public long addTable( String name, long namespaceId ) { public long addColumn( String name, long namespaceId, long entityId, AlgDataType type ) { long id = idBuilder.getNewFieldId(); - catalogs.get( namespaceId ).asRelational().addColumn( id, name, entityId ); + catalogs.get( namespaceId ).asRelational().addColumn( id, name, entityId, type ); return id; } - @Override - public void lookupOperatorOverloads( Identifier opName, FunctionCategory category, Syntax syntax, List operatorList ) { - - } - - - @Override - public List getOperatorList() { - return null; - } - - - @Override - public AlgDataType getNamedType( Identifier typeName ) { - return null; - } - - - @Override - public List getAllSchemaObjectNames( List names ) { - return null; - } - - - @Override - public AlgDataType createTypeFromProjection( AlgDataType type, List columnNameList ) { - return null; - } - - - @Override - public Snapshot getSnapshot() { - return null; - } - - - @Override - public LogicalTable getTableForMember( List names ) { - return null; - } - - - @Override - public LogicalTable getTable( List names ) { - return null; - } - - - @Override - public AlgOptEntity getCollection( List names ) { - return null; - } - - - @Override - public LogicalGraph getGraph( String name ) { - return null; - } - - @Override public PolyCatalog copy() { return deserialize( serialize(), PolyCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogColumn.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogColumn.java deleted file mode 100644 index 2f418049b5..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogColumn.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.logical.relational; - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import lombok.Value; - -@Value -public class CatalogColumn { - - @Serialize - public long id; - - @Serialize - public String name; - @Serialize - public long tableId; - - - public CatalogColumn( - @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("tableId") long tableId ) { - this.id = id; - this.name = name; - this.tableId = tableId; - - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogTable.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogTable.java deleted file mode 100644 index 3d69e3a8b0..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogTable.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.logical.relational; - -import com.google.common.collect.ImmutableMap; -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import java.util.HashMap; -import java.util.Map; -import lombok.Value; -import lombok.With; - -@Value -public class CatalogTable { - - @Serialize - @With - public long id; - - @Serialize - @With - public String name; - - @Serialize - @With - public long namespaceId; - - - @Serialize - @With - public ImmutableMap columns; - - - public CatalogTable( long id, String name, long namespaceId ) { - this( id, name, namespaceId, new HashMap<>() ); - } - - - public CatalogTable( - @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("namespaceId") long namespaceId, - @Deserialize("columns") Map columns ) { - this.id = id; - this.name = name; - this.namespaceId = namespaceId; - this.columns = ImmutableMap.copyOf( columns ); - } - - - public CatalogTable withAddedColumn( long id, String name ) { - Map columns = new HashMap<>( this.columns ); - columns.put( id, new CatalogColumn( id, name, this.id ) ); - return withColumns( ImmutableMap.copyOf( columns ) ); - } - - - public CatalogTable withDeletedColumn( long id ) { - Map columns = new HashMap<>( this.columns ); - columns.remove( id ); - return withColumns( ImmutableMap.copyOf( columns ) ); - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java index 5e51221d61..b629b3fe45 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java @@ -24,9 +24,10 @@ import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; @Value public class RelationalCatalog implements NCatalog, Serializable { @@ -35,7 +36,7 @@ public class RelationalCatalog implements NCatalog, Serializable { public BinarySerializer serializer = Serializable.builder.get().build( RelationalCatalog.class ); @Serialize - public Map tables; + public Map tables; @Serialize public long id; @@ -50,7 +51,7 @@ public class RelationalCatalog implements NCatalog, Serializable { public RelationalCatalog( @Deserialize("id") long id, @Deserialize("name") String name, - @Deserialize("tables") Map tables ) { + @Deserialize("tables") Map tables ) { this.id = id; this.name = name; @@ -94,24 +95,6 @@ public NamespaceType getType() { } - public void addTable( long id, String name ) { - tables.put( id, new CatalogTable( id, name, this.id ) ); - change(); - } - - - public void addColumn( long id, String name, long entityId ) { - tables.put( entityId, tables.get( entityId ).withAddedColumn( id, name ) ); - change(); - } - - - public void deleteColumn( long id, long entityId ) { - tables.put( entityId, tables.get( id ).withDeletedColumn( id ) ); - change(); - } - - @Override public RelationalCatalog copy() { return deserialize( serialize(), RelationalCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java index ac6c2149bb..f3c01dbd19 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java @@ -16,16 +16,208 @@ package org.polypheny.db.catalog.snapshot; +import java.util.List; import java.util.Map; +import lombok.Getter; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogNamespace; +import org.polypheny.db.catalog.entity.allocation.AllocationCollection; +import org.polypheny.db.catalog.entity.allocation.AllocationGraph; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalGraph; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.logistic.Pattern; public class FullSnapshot implements Snapshot { + @Getter + private final long id; - public FullSnapshot( Map catalogs ) { + public FullSnapshot( long id, Map catalogs ) { + this.id = id; + + + } + + + @Override + public CatalogNamespace getNamespace( long id ) { + return null; + } + + + @Override + public CatalogNamespace getNamespace( String name ) { + return null; + } + + + @Override + public List getNamespaces( Pattern name ) { + return null; + } + + + @Override + public CatalogEntity getEntity( long id ) { + return null; + } + + + @Override + public CatalogEntity getEntity( long namespaceId, String name ) { + return null; + } + + + @Override + public CatalogEntity getEntity( long namespaceId, Pattern name ) { + return null; + } + + + @Override + public CatalogEntity getEntity( List names ) { + return null; + } + + + @Override + public LogicalTable getLogicalTable( List names ) { + return null; + } + + + @Override + public LogicalCollection getLogicalCollection( List names ) { + return null; + } + + + @Override + public LogicalGraph getLogicalGraph( List names ) { + return null; + } + + + @Override + public LogicalTable getLogicalTable( long id ) { + return null; + } + + + @Override + public LogicalTable getLogicalTable( long namespaceId, String name ) { + return null; + } + + + @Override + public List getLogicalTables( long namespaceId, Pattern name ) { + return null; + } + + + @Override + public LogicalCollection getLogicalCollection( long id ) { + return null; } + @Override + public LogicalCollection getLogicalCollection( long namespaceId, String name ) { + return null; + } + + + @Override + public List getLogicalCollections( long namespaceId, Pattern name ) { + return null; + } + + + @Override + public LogicalGraph getLogicalGraph( long id ) { + return null; + } + + + @Override + public LogicalGraph getLogicalGraph( long namespaceId, String name ) { + return null; + } + + + @Override + public List getLogicalGraphs( long namespaceId, Pattern name ) { + return null; + } + + + @Override + public AllocationTable getAllocTable( long id ) { + return null; + } + + + @Override + public AllocationCollection getAllocCollection( long id ) { + return null; + } + + + @Override + public AllocationGraph getAllocGraph( long id ) { + return null; + } + + + @Override + public PhysicalTable getPhysicalTable( long id ) { + return null; + } + + + @Override + public PhysicalTable getPhysicalTable( long logicalId, long adapterId ) { + return null; + } + + + @Override + public PhysicalCollection getPhysicalCollection( long id ) { + return null; + } + + + @Override + public PhysicalCollection getPhysicalCollection( long logicalId, long adapterId ) { + return null; + } + + + @Override + public PhysicalGraph getPhysicalGraph( long id ) { + return null; + } + + + @Override + public PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ) { + return null; + } + + + @Override + public boolean isPartitioned( long id ) { + return false; + } + } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestColumn.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestColumn.java index 844584e156..1da84e6c19 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestColumn.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestColumn.java @@ -19,12 +19,12 @@ import java.util.Objects; import org.polypheny.db.algebra.fun.AggFunction; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; public class RequestColumn { - private final CatalogColumn column; + private final LogicalColumn column; private final int tableScanIndex; private int logicalIndex; private final String fullyQualifiedName; @@ -33,7 +33,7 @@ public class RequestColumn { private final boolean explicit; - RequestColumn( CatalogColumn column, int tableScanIndex, int logicalIndex, String alias, AggFunction aggregate, boolean explicit ) { + RequestColumn( LogicalColumn column, int tableScanIndex, int logicalIndex, String alias, AggFunction aggregate, boolean explicit ) { this.column = Objects.requireNonNull( column ); this.tableScanIndex = tableScanIndex; this.logicalIndex = logicalIndex; @@ -48,7 +48,7 @@ public class RequestColumn { } - RequestColumn( CatalogColumn column, int tableScanIndex, int logicalIndex, String alias, AggFunction aggregate ) { + RequestColumn( LogicalColumn column, int tableScanIndex, int logicalIndex, String alias, AggFunction aggregate ) { this( column, tableScanIndex, logicalIndex, alias, aggregate, true ); } @@ -83,7 +83,7 @@ public String getAlias() { } - public CatalogColumn getColumn() { + public LogicalColumn getColumn() { return column; } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 4c061934e0..da231b1002 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -42,9 +42,9 @@ import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -259,7 +259,7 @@ LogicalTable parseCatalogTableName( String tableName ) throws ParserException { } try { - LogicalTable table = this.catalog.getTable( this.databaseName, tableElements[0], tableElements[1] ); + LogicalTable table = this.catalog.getTable( tableElements[0], tableElements[1] ); if ( log.isDebugEnabled() ) { log.debug( "Finished parsing table \"{}\".", tableName ); } @@ -309,7 +309,7 @@ List generateRequestColumnsWithoutProject( List tab long internalPosition = 0L; for ( LogicalTable table : tables ) { for ( long columnId : table.fieldIds ) { - CatalogColumn column = this.catalog.getColumn( columnId ); + LogicalColumn column = this.catalog.getColumn( columnId ); int calculatedPosition = tableOffsets.get( table.id ) + column.position - 1; RequestColumn requestColumn = new RequestColumn( column, calculatedPosition, calculatedPosition, null, null, true ); columns.add( requestColumn ); @@ -332,24 +332,24 @@ List generateRequestColumnsWithProject( String projectionString, Matcher matcher = PROJECTION_ENTRY_PATTERN.matcher( projectionToParse ); if ( matcher.find() ) { String columnName = matcher.group( "column" ); - CatalogColumn catalogColumn; + LogicalColumn logicalColumn; try { - catalogColumn = this.getCatalogColumnFromString( columnName ); + logicalColumn = this.getCatalogColumnFromString( columnName ); log.debug( "Fetched catalog column for projection key: {}.", columnName ); } catch ( UnknownColumnException | UnknownDatabaseException | UnknownSchemaException | UnknownTableException e ) { log.warn( "Unable to fetch column: {}.", columnName, e ); throw new ParserException( ParserErrorCode.PROJECTION_MALFORMED, columnName ); } - if ( !validColumns.contains( catalogColumn.id ) ) { + if ( !validColumns.contains( logicalColumn.id ) ) { log.warn( "Column isn't valid. Column: {}.", columnName ); throw new ParserException( ParserErrorCode.PROJECTION_INVALID_COLUMN, columnName ); } - projectedColumns.add( catalogColumn.id ); - int calculatedPosition = tableOffsets.get( catalogColumn.tableId ) + catalogColumn.position - 1; - RequestColumn requestColumn = new RequestColumn( catalogColumn, calculatedPosition, internalPosition, matcher.group( "alias" ), this.decodeAggregateFunction( matcher.group( "agg" ) ) ); + projectedColumns.add( logicalColumn.id ); + int calculatedPosition = tableOffsets.get( logicalColumn.tableId ) + logicalColumn.position - 1; + RequestColumn requestColumn = new RequestColumn( logicalColumn, calculatedPosition, internalPosition, matcher.group( "alias" ), this.decodeAggregateFunction( matcher.group( "agg" ) ) ); internalPosition++; columns.add( requestColumn ); @@ -362,7 +362,7 @@ List generateRequestColumnsWithProject( String projectionString, Set notYetAdded = new HashSet<>( validColumns ); notYetAdded.removeAll( projectedColumns ); for ( long columnId : notYetAdded ) { - CatalogColumn column = this.catalog.getColumn( columnId ); + LogicalColumn column = this.catalog.getColumn( columnId ); int calculatedPosition = tableOffsets.get( column.tableId ) + column.position - 1; RequestColumn requestColumn = new RequestColumn( column, calculatedPosition, calculatedPosition, null, null, false ); columns.add( requestColumn ); @@ -412,14 +412,14 @@ AggFunction decodeAggregateFunction( String function ) { } - private CatalogColumn getCatalogColumnFromString( String name ) throws ParserException, UnknownColumnException, UnknownDatabaseException, UnknownSchemaException, UnknownTableException { + private LogicalColumn getCatalogColumnFromString( String name ) throws ParserException, UnknownColumnException, UnknownDatabaseException, UnknownSchemaException, UnknownTableException { String[] splitString = name.split( "\\." ); if ( splitString.length != 3 ) { log.warn( "Column name is not 3 fields long. Got: {}", name ); throw new ParserException( ParserErrorCode.PROJECTION_MALFORMED, name ); } - return this.catalog.getColumn( this.databaseName, splitString[0], splitString[1], splitString[2] ); + return this.catalog.getColumn( splitString[0], splitString[1], splitString[2] ); } @@ -744,10 +744,10 @@ private List> parseInsertStatementValues( Map rowVal } - public Map generateNameMapping( List tables ) { - Map nameMapping = new HashMap<>(); + public Map generateNameMapping( List tables ) { + Map nameMapping = new HashMap<>(); for ( LogicalTable table : tables ) { - for ( CatalogColumn column : this.catalog.getColumns( table.id ) ) { + for ( LogicalColumn column : this.catalog.getColumns( table.id ) ) { nameMapping.put( column.getSchemaName() + "." + column.getTableName() + "." + column.name, column ); } } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/exception/IllegalColumnException.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/exception/IllegalColumnException.java index 694611e823..4298218264 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/exception/IllegalColumnException.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/exception/IllegalColumnException.java @@ -18,18 +18,18 @@ import lombok.Getter; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; public class IllegalColumnException extends RuntimeException { @Getter - private final CatalogColumn catalogColumn; + private final LogicalColumn logicalColumn; - public IllegalColumnException( CatalogColumn catalogColumn ) { - super( "Column ID '" + catalogColumn.id + "' cannot be used as it is not part of any of the queried tables." ); - this.catalogColumn = catalogColumn; + public IllegalColumnException( LogicalColumn logicalColumn ) { + super( "Column ID '" + logicalColumn.id + "' cannot be used as it is not part of any of the queried tables." ); + this.logicalColumn = logicalColumn; } } diff --git a/plugins/rest-interface/src/test/java/org/polypheny/db/restapi/RequestParserTest.java b/plugins/rest-interface/src/test/java/org/polypheny/db/restapi/RequestParserTest.java index eecc063b52..42571f5c89 100644 --- a/plugins/rest-interface/src/test/java/org/polypheny/db/restapi/RequestParserTest.java +++ b/plugins/rest-interface/src/test/java/org/polypheny/db/restapi/RequestParserTest.java @@ -65,7 +65,7 @@ public void testBasicAuthorizationDecodingGarbageHeader() { @Test public void testParseCatalogTableName() throws UnknownTableException, UnknownSchemaException, UnknownDatabaseException { Catalog mockedCatalog = mock( Catalog.class ); - when( mockedCatalog.getTable( "testdb", "schema1", "table1" ) ).thenReturn( null ); + when( mockedCatalog.getTable( "schema1", "table1" ) ).thenReturn( null ); RequestParser requestParser = new RequestParser( mockedCatalog, null, @@ -73,7 +73,7 @@ public void testParseCatalogTableName() throws UnknownTableException, UnknownSch "username", "testdb" ); LogicalTable table = requestParser.parseCatalogTableName( "schema1.table1." ); - verify( mockedCatalog ).getTable( "testdb", "schema1", "table1" ); + verify( mockedCatalog ).getTable( "schema1", "table1" ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index 61b0c7b090..7c7e642cab 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -38,10 +38,9 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogDefaultValue; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -56,7 +55,6 @@ import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptUtil; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.processing.Processor; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.runtime.PolyphenyDbException; @@ -266,8 +264,8 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { SqlNode[][] newValues = new SqlNode[((SqlBasicCall) insert.getSource()).getOperands().length][size]; int pos = 0; - List columns = Catalog.getInstance().getColumns( catalogTable.id ); - for ( CatalogColumn column : columns ) { + List columns = Catalog.getInstance().getColumns( catalogTable.id ); + for ( LogicalColumn column : columns ) { // Add column newColumnList.add( new SqlIdentifier( column.name, ParserPos.ZERO ) ); @@ -368,7 +366,7 @@ private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tab long schemaId; String tableOldName; if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = Catalog.getInstance().getSchema( tableName.names.get( 0 ), tableName.names.get( 1 ) ).id; + schemaId = Catalog.getInstance().getSchema( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName schemaId = Catalog.getInstance().getSchema( transaction.getDefaultSchema().databaseId, tableName.names.get( 0 ) ).id; @@ -378,8 +376,6 @@ private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tab tableOldName = tableName.names.get( 0 ); } catalogTable = Catalog.getInstance().getTable( schemaId, tableOldName ); - } catch ( UnknownDatabaseException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.databaseNotFound( tableName.toString() ) ); } catch ( UnknownSchemaException e ) { throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.schemaNotFound( tableName.toString() ) ); } catch ( UnknownTableException e ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 6852e8040f..1fd64cfdba 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -26,10 +26,9 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.languages.ParserPos; @@ -73,7 +72,7 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName String tableOldName; Catalog catalog = Catalog.getInstance(); if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( tableName.names.get( 0 ), tableName.names.get( 1 ) ).id; + schemaId = catalog.getSchema( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName schemaId = catalog.getSchema( context.getDatabaseId(), tableName.names.get( 0 ) ).id; @@ -83,8 +82,6 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName tableOldName = tableName.names.get( 0 ); } catalogTable = catalog.getTable( schemaId, tableOldName ); - } catch ( UnknownDatabaseException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.databaseNotFound( tableName.toString() ) ); } catch ( UnknownSchemaException e ) { throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.schemaNotFound( tableName.toString() ) ); } catch ( UnknownTableException e ) { @@ -94,14 +91,14 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName } - protected CatalogColumn getCatalogColumn( long tableId, SqlIdentifier columnName ) { - CatalogColumn catalogColumn; + protected LogicalColumn getCatalogColumn( long tableId, SqlIdentifier columnName ) { + LogicalColumn logicalColumn; try { - catalogColumn = Catalog.getInstance().getColumn( tableId, columnName.getSimple() ); + logicalColumn = Catalog.getInstance().getColumn( tableId, columnName.getSimple() ); } catch ( UnknownColumnException e ) { throw CoreUtil.newContextException( columnName.getPos(), RESOURCE.columnNotFoundInTable( columnName.getSimple(), tableId + "" ) ); } - return catalogColumn; + return logicalColumn; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java index 91edaba84c..30f759eb45 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java @@ -29,15 +29,14 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -123,7 +122,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( name.names.get( 0 ), name.names.get( 1 ) ).id; + schemaId = catalog.getSchema( name.names.get( 1 ) ).id; viewName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; @@ -132,8 +131,6 @@ public void execute( Context context, Statement statement, QueryParameters param schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; viewName = name.names.get( 0 ); } - } catch ( UnknownDatabaseException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.databaseNotFound( name.toString() ) ); } catch ( UnknownSchemaException e ) { throw CoreUtil.newContextException( name.getPos(), RESOURCE.schemaNotFound( name.toString() ) ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 6a98571eff..b72ba0fc64 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -29,7 +29,6 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -39,6 +38,7 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; import org.polypheny.db.ddl.DdlManager.ConstraintInformation; @@ -211,7 +211,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { // Cannot use getLogicalTable() here since table does not yet exist if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( name.names.get( 0 ), name.names.get( 1 ) ).id; + schemaId = catalog.getSchema( name.names.get( 1 ) ).id; tableName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; @@ -220,8 +220,6 @@ public void execute( Context context, Statement statement, QueryParameters param schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; tableName = name.names.get( 0 ); } - } catch ( UnknownDatabaseException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.databaseNotFound( name.toString() ) ); } catch ( UnknownSchemaException e ) { throw CoreUtil.newContextException( name.getPos(), RESOURCE.schemaNotFound( name.toString() ) ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java index 4b4410746d..b666b2ed78 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java @@ -29,12 +29,11 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -106,7 +105,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( name.names.get( 0 ), name.names.get( 1 ) ).id; + schemaId = catalog.getSchema( name.names.get( 1 ) ).id; viewName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; @@ -115,8 +114,6 @@ public void execute( Context context, Statement statement, QueryParameters param schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; viewName = name.names.get( 0 ); } - } catch ( UnknownDatabaseException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.databaseNotFound( name.toString() ) ); } catch ( UnknownSchemaException e ) { throw CoreUtil.newContextException( name.getPos(), RESOURCE.schemaNotFound( name.toString() ) ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index 8619003e09..0df524c664 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -25,9 +25,9 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.PlacementAlreadyExistsException; import org.polypheny.db.languages.ParserPos; @@ -131,8 +131,8 @@ public void execute( Context context, Statement statement, QueryParameters param List columnIds = new LinkedList<>(); for ( SqlNode node : columnList.getSqlList() ) { - CatalogColumn catalogColumn = getCatalogColumn( catalogTable.id, (SqlIdentifier) node ); - columnIds.add( catalogColumn.id ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, (SqlIdentifier) node ); + columnIds.add( logicalColumn.id ); } try { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java index 3f76942710..8d3c6a7ba5 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java @@ -25,8 +25,8 @@ import lombok.Getter; import lombok.NonNull; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.dialect.PolyphenyDbSqlDialect; @@ -41,21 +41,21 @@ public class SchemaToJsonMapper { public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogTable, boolean exportPrimaryKey, boolean exportDefaultValues ) { List columns = new LinkedList<>(); - for ( CatalogColumn catalogColumn : Catalog.getInstance().getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : Catalog.getInstance().getColumns( catalogTable.id ) ) { String defaultValue = null; String defaultFunctionName = null; if ( exportDefaultValues ) { - if ( catalogColumn.defaultValue != null ) { - defaultValue = catalogColumn.defaultValue.value; - defaultFunctionName = catalogColumn.defaultValue.functionName; + if ( logicalColumn.defaultValue != null ) { + defaultValue = logicalColumn.defaultValue.value; + defaultFunctionName = logicalColumn.defaultValue.functionName; } } columns.add( new JsonColumn( - catalogColumn.name, - catalogColumn.type.name(), - catalogColumn.length, - catalogColumn.scale, - catalogColumn.nullable, + logicalColumn.name, + logicalColumn.type.name(), + logicalColumn.length, + logicalColumn.scale, + logicalColumn.nullable, defaultValue, defaultFunctionName ) ); } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java index 19d9089bac..1179e22426 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java @@ -25,16 +25,16 @@ import org.junit.Ignore; import org.junit.Test; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.sql.SqlLanguageDependent; import org.polypheny.db.sql.web.SchemaToJsonMapper; @@ -66,9 +66,9 @@ public void exportTest() { ImmutableList.of() ); Catalog catalog = Catalog.getInstance(); Arrays.asList( - new CatalogColumn( 5, "sid", 4, 1, 1, 1, PolyType.INTEGER, null, null, null, null, null, false, null, null ), - new CatalogColumn( 6, "name", 4, 1, 1, 2, PolyType.VARCHAR, null, 50, null, null, null, false, null, null ), - new CatalogColumn( 7, "location", 4, 1, 1, 3, PolyType.VARCHAR, null, 30, null, null, null, true, null, new CatalogDefaultValue( 7, PolyType.VARCHAR, "Basel", null ) ) + new LogicalColumn( 5, "sid", 4, 1, 1, 1, PolyType.INTEGER, null, null, null, null, null, false, null, null ), + new LogicalColumn( 6, "name", 4, 1, 1, 2, PolyType.VARCHAR, null, 50, null, null, null, false, null, null ), + new LogicalColumn( 7, "location", 4, 1, 1, 3, PolyType.VARCHAR, null, 30, null, null, null, true, null, new CatalogDefaultValue( 7, PolyType.VARCHAR, "Basel", null ) ) ); diff --git a/settings.gradle b/settings.gradle index bbbb3aa437..c80a44a38e 100644 --- a/settings.gradle +++ b/settings.gradle @@ -34,16 +34,16 @@ include 'plugins:http-interface' // adapters plugins include 'plugins:hsqldb-adapter' -include 'plugins:neo4j-adapter' -include 'plugins:cottontail-adapter' +//include 'plugins:neo4j-adapter' +//include 'plugins:cottontail-adapter' //include 'plugins:ethereum-adapter' //include 'plugins:cassandra-adapter' include 'plugins:csv-adapter' -include 'plugins:mysql-adapter' -include 'plugins:postgres-adapter' -include 'plugins:monetdb-adapter' -include 'plugins:mongodb-adapter' -include 'plugins:file-adapter' +//include 'plugins:mysql-adapter' +//include 'plugins:postgres-adapter' +//include 'plugins:monetdb-adapter' +//include 'plugins:mongodb-adapter' +//include 'plugins:file-adapter' include 'plugins:google-sheet-adapter' include 'plugins:excel-adapter' @@ -51,8 +51,8 @@ include 'plugins:excel-adapter' include 'plugins:explore-by-example' // disabled adapter plugins -include 'plugins:pig-adapter' -include 'plugins:html-adapter' -include 'plugins:druid-adapter' -include 'plugins:elasticsearch-adapter' -include 'plugins:geode-adapter' +//include 'plugins:pig-adapter' +//include 'plugins:html-adapter' +//include 'plugins:druid-adapter' +//include 'plugins:elasticsearch-adapter' +//include 'plugins:geode-adapter' diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 8943911508..fe7dc48e49 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -100,15 +100,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.ConstraintType; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.PlacementType; -import org.polypheny.db.catalog.logistic.NameGenerator; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -116,10 +108,11 @@ import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -130,6 +123,13 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.NameGenerator; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -302,7 +302,7 @@ Result getTable( final UIRequest request ) { // determine if it is a view or a table LogicalTable catalogTable; try { - catalogTable = catalog.getTable( this.databaseId, t[0], t[1] ); + catalogTable = catalog.getTable( t[0], t[1] ); result.setNamespaceType( catalogTable.getNamespaceType() ); if ( catalogTable.modifiable ) { result.setType( ResultType.TABLE ); @@ -312,6 +312,8 @@ Result getTable( final UIRequest request ) { } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); return result.setError( "Could not retrieve type of Result (table/view)." ); + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } //get headers with default values @@ -323,23 +325,23 @@ Result getTable( final UIRequest request ) { } else { primaryColumns = new ArrayList<>(); } - for ( CatalogColumn catalogColumn : catalog.getColumns( catalogTable.id ) ) { - String defaultValue = catalogColumn.defaultValue == null ? null : catalogColumn.defaultValue.value; - String collectionsType = catalogColumn.collectionsType == null ? "" : catalogColumn.collectionsType.getName(); + for ( LogicalColumn logicalColumn : catalog.getColumns( catalogTable.id ) ) { + String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; + String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( new DbColumn( - catalogColumn.name, - catalogColumn.type.getName(), + logicalColumn.name, + logicalColumn.type.getName(), collectionsType, - catalogColumn.nullable, - catalogColumn.length, - catalogColumn.scale, - catalogColumn.dimension, - catalogColumn.cardinality, - primaryColumns.contains( catalogColumn.name ), + logicalColumn.nullable, + logicalColumn.length, + logicalColumn.scale, + logicalColumn.dimension, + logicalColumn.cardinality, + primaryColumns.contains( logicalColumn.name ), defaultValue, - request.sortState == null ? new SortState() : request.sortState.get( catalogColumn.name ), - request.filter == null || request.filter.get( catalogColumn.name ) == null ? "" : request.filter.get( catalogColumn.name ) ) ); + request.sortState == null ? new SortState() : request.sortState.get( logicalColumn.name ), + request.filter == null || request.filter.get( logicalColumn.name ) == null ? "" : request.filter.get( logicalColumn.name ) ) ); } result.setHeader( cols.toArray( new DbColumn[0] ) ); @@ -398,8 +400,8 @@ void getSchemaTree( final Context ctx ) { SidebarElement tableElement = new SidebarElement( schema.name + "." + table.name, table.name, schema.namespaceType, request.routerLinkRoot, icon ); if ( request.depth > 2 ) { - List columns = catalog.getColumns( table.id ); - for ( CatalogColumn column : columns ) { + List columns = catalog.getColumns( table.id ); + for ( LogicalColumn column : columns ) { tableElement.addChild( new SidebarElement( schema.name + "." + table.name + "." + column.name, column.name, schema.namespaceType, request.routerLinkRoot, icon ).setCssClass( "sidebarColumn" ) ); } } @@ -471,7 +473,7 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getTables( databaseId, new org.polypheny.db.catalog.logistic.Pattern( requestedSchema ), null ); + List tables = catalog.getTables( new org.polypheny.db.catalog.logistic.Pattern( requestedSchema ), null ); ArrayList result = new ArrayList<>(); for ( LogicalTable t : tables ) { result.add( new DbTable( t.name, t.getNamespaceName(), t.modifiable, t.entityType ) ); @@ -657,32 +659,32 @@ void insertRow( final Context ctx ) { StringJoiner columns = new StringJoiner( ",", "(", ")" ); StringJoiner values = new StringJoiner( ",", "(", ")" ); - List catalogColumns = catalog.getColumns( new org.polypheny.db.catalog.logistic.Pattern( "APP" ), new org.polypheny.db.catalog.logistic.Pattern( split[0] ), new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[0] ), new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); try { int i = 0; - for ( CatalogColumn catalogColumn : catalogColumns ) { + for ( LogicalColumn logicalColumn : logicalColumns ) { //part is null if it does not exist - Part part = ctx.req.getPart( catalogColumn.name ); + Part part = ctx.req.getPart( logicalColumn.name ); if ( part == null ) { //don't add if default value is set - if ( catalogColumn.defaultValue == null ) { + if ( logicalColumn.defaultValue == null ) { values.add( "NULL" ); - columns.add( "\"" + catalogColumn.name + "\"" ); + columns.add( "\"" + logicalColumn.name + "\"" ); } } else { - columns.add( "\"" + catalogColumn.name + "\"" ); + columns.add( "\"" + logicalColumn.name + "\"" ); if ( part.getSubmittedFileName() == null ) { String value = new BufferedReader( new InputStreamReader( part.getInputStream(), StandardCharsets.UTF_8 ) ).lines().collect( Collectors.joining( System.lineSeparator() ) ); - if ( catalogColumn.name.equals( "_id" ) ) { + if ( logicalColumn.name.equals( "_id" ) ) { if ( value.length() == 0 ) { value = BsonUtil.getObjectId(); } } - values.add( uiValueToSql( value, catalogColumn.type, catalogColumn.collectionsType ) ); + values.add( uiValueToSql( value, logicalColumn.type, logicalColumn.collectionsType ) ); } else { values.add( "?" ); FileInputHandle fih = new FileInputHandle( statement, part.getInputStream() ); - statement.getDataContext().addParameterValues( i++, catalogColumn.getAlgDataType( transaction.getTypeFactory() ), ImmutableList.of( fih ) ); + statement.getDataContext().addParameterValues( i++, logicalColumn.getAlgDataType( transaction.getTypeFactory() ), ImmutableList.of( fih ) ); } } } @@ -946,17 +948,17 @@ public static String uiValueToSql( final String value, final PolyType type, fina */ private String computeWherePK( final String tableName, final String columnName, final Map filter ) { StringJoiner joiner = new StringJoiner( " AND ", "", "" ); - Map catalogColumns = getCatalogColumns( tableName, columnName ); + Map catalogColumns = getCatalogColumns( tableName, columnName ); LogicalTable catalogTable; try { - catalogTable = catalog.getTable( databaseId, tableName, columnName ); + catalogTable = catalog.getTable( tableName, columnName ); CatalogPrimaryKey pk = catalog.getPrimaryKey( catalogTable.primaryKey ); for ( long colId : pk.columnIds ) { String colName = catalog.getColumn( colId ).name; String condition; if ( filter.containsKey( colName ) ) { String val = filter.get( colName ); - CatalogColumn col = catalogColumns.get( colName ); + LogicalColumn col = catalogColumns.get( colName ); condition = uiValueToSql( val, col.type, col.collectionsType ); condition = String.format( "\"%s\" = %s", colName, condition ); joiner.add( condition ); @@ -964,6 +966,8 @@ private String computeWherePK( final String tableName, final String columnName, } } catch ( UnknownTableException e ) { throw new RuntimeException( "Error while deriving PK WHERE condition", e ); + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } return " WHERE " + joiner.toString(); } @@ -1025,11 +1029,11 @@ void updateRow( final Context ctx ) throws ServletException, IOException { Statement statement = transaction.createStatement(); StringJoiner setStatements = new StringJoiner( ",", "", "" ); - List catalogColumns = catalog.getColumns( new org.polypheny.db.catalog.logistic.Pattern( "APP" ), new org.polypheny.db.catalog.logistic.Pattern( split[0] ), new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[0] ), new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); int i = 0; - for ( CatalogColumn catalogColumn : catalogColumns ) { - Part part = ctx.req.getPart( catalogColumn.name ); + for ( LogicalColumn logicalColumn : logicalColumns ) { + Part part = ctx.req.getPart( logicalColumn.name ); if ( part == null ) { continue; } @@ -1037,12 +1041,12 @@ void updateRow( final Context ctx ) throws ServletException, IOException { String value = new BufferedReader( new InputStreamReader( part.getInputStream(), StandardCharsets.UTF_8 ) ).lines().collect( Collectors.joining( System.lineSeparator() ) ); String parsed = gson.fromJson( value, String.class ); if ( parsed == null ) { - setStatements.add( String.format( "\"%s\" = NULL", catalogColumn.name ) ); + setStatements.add( String.format( "\"%s\" = NULL", logicalColumn.name ) ); } else { - setStatements.add( String.format( "\"%s\" = %s", catalogColumn.name, uiValueToSql( parsed, catalogColumn.type, catalogColumn.collectionsType ) ) ); + setStatements.add( String.format( "\"%s\" = %s", logicalColumn.name, uiValueToSql( parsed, logicalColumn.type, logicalColumn.collectionsType ) ) ); } } else { - setStatements.add( String.format( "\"%s\" = ?", catalogColumn.name ) ); + setStatements.add( String.format( "\"%s\" = ?", logicalColumn.name ) ); FileInputHandle fih = new FileInputHandle( statement, part.getInputStream() ); statement.getDataContext().addParameterValues( i++, null, ImmutableList.of( fih ) ); } @@ -1122,7 +1126,7 @@ void getColumns( final Context ctx ) { ArrayList cols = new ArrayList<>(); try { - LogicalTable catalogTable = catalog.getTable( databaseId, t[0], t[1] ); + LogicalTable catalogTable = catalog.getTable( t[0], t[1] ); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); @@ -1130,20 +1134,20 @@ void getColumns( final Context ctx ) { } else { primaryColumns = new ArrayList<>(); } - for ( CatalogColumn catalogColumn : catalog.getColumns( catalogTable.id ) ) { - String defaultValue = catalogColumn.defaultValue == null ? null : catalogColumn.defaultValue.value; - String collectionsType = catalogColumn.collectionsType == null ? "" : catalogColumn.collectionsType.getName(); + for ( LogicalColumn logicalColumn : catalog.getColumns( catalogTable.id ) ) { + String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; + String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( new DbColumn( - catalogColumn.name, - catalogColumn.type.getName(), + logicalColumn.name, + logicalColumn.type.getName(), collectionsType, - catalogColumn.nullable, - catalogColumn.length, - catalogColumn.scale, - catalogColumn.dimension, - catalogColumn.cardinality, - primaryColumns.contains( catalogColumn.name ), + logicalColumn.nullable, + logicalColumn.length, + logicalColumn.scale, + logicalColumn.dimension, + logicalColumn.cardinality, + primaryColumns.contains( logicalColumn.name ), defaultValue ) ); } result = new Result( cols.toArray( new DbColumn[0] ), null ); @@ -1158,6 +1162,8 @@ void getColumns( final Context ctx ) { log.error( "Caught exception while getting a column", e ); ctx.status( 400 ).json( new Result( e ) ); return; + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } ctx.json( result ); @@ -1167,14 +1173,14 @@ void getColumns( final Context ctx ) { void getDataSourceColumns( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalTable catalogTable = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); + LogicalTable catalogTable = catalog.getTable( request.getSchemaName(), request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { ImmutableMap> underlyingTable = ((CatalogView) catalogTable).getUnderlyingTables(); List columns = new ArrayList<>(); for ( Long columnIds : catalogTable.fieldIds ) { - CatalogColumn col = catalog.getColumn( columnIds ); + LogicalColumn col = catalog.getColumn( columnIds ); columns.add( new DbColumn( col.name, col.type.getName(), @@ -1200,7 +1206,7 @@ void getDataSourceColumns( final Context ctx ) throws UnknownDatabaseException, List pkColumnNames = primaryKey.getColumnNames(); List columns = new ArrayList<>(); for ( CatalogColumnPlacement ccp : catalog.getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { - CatalogColumn col = catalog.getColumn( ccp.columnId ); + LogicalColumn col = catalog.getColumn( ccp.columnId ); columns.add( new DbColumn( col.name, col.type.getName(), @@ -1225,7 +1231,7 @@ void getDataSourceColumns( final Context ctx ) throws UnknownDatabaseException, void getAvailableSourceColumns( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalTable table = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); + LogicalTable table = catalog.getTable( request.getSchemaName(), request.getTableName() ); ImmutableMap> placements = catalog.getColumnPlacementsByAdapter( table.id ); Set adapterIds = placements.keySet(); if ( adapterIds.size() > 1 ) { @@ -1268,7 +1274,7 @@ void getAvailableSourceColumns( final Context ctx ) throws UnknownDatabaseExcept void getMaterializedInfo( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); - LogicalTable catalogTable = catalog.getTable( databaseId, request.schema, request.table ); + LogicalTable catalogTable = catalog.getTable( request.schema, request.table ); if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW ) { CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalogTable; @@ -1592,7 +1598,7 @@ void getConstraints( final Context ctx ) { Map> temp = new HashMap<>(); try { - LogicalTable catalogTable = catalog.getTable( databaseId, t[0], t[1] ); + LogicalTable catalogTable = catalog.getTable( t[0], t[1] ); // get primary key if ( catalogTable.primaryKey != null ) { @@ -1630,6 +1636,8 @@ void getConstraints( final Context ctx ) { } catch ( UnknownTableException e ) { log.error( "Caught exception while fetching constraints", e ); result = new Result( e ); + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } ctx.json( result ); @@ -1750,7 +1758,7 @@ void getIndexes( final Context ctx ) { EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); Result result; try { - LogicalTable catalogTable = catalog.getTable( databaseId, request.schema, request.table ); + LogicalTable catalogTable = catalog.getTable( request.schema, request.table ); List catalogIndexes = catalog.getIndexes( catalogTable.id, false ); DbColumn[] header = { @@ -1805,6 +1813,8 @@ void getIndexes( final Context ctx ) { } catch ( UnknownTableException e ) { log.error( "Caught exception while fetching indexes", e ); result = new Result( e ); + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } ctx.json( result ); } @@ -1878,7 +1888,7 @@ void getUnderlyingTable( final Context ctx ) throws UnknownDatabaseException, Un UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalTable catalogTable = catalog.getTable( "APP", request.getSchemaName(), request.getTableName() ); + LogicalTable catalogTable = catalog.getTable( request.getSchemaName(), request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { ImmutableMap> underlyingTableOriginal = ((CatalogView) catalogTable).getUnderlyingTables(); @@ -1910,7 +1920,7 @@ private Placement getPlacements( final Index index ) { String schemaName = index.getSchema(); String tableName = index.getTable(); try { - LogicalTable table = catalog.getTable( databaseId, schemaName, tableName ); + LogicalTable table = catalog.getTable( schemaName, tableName ); Placement p = new Placement( table.partitionProperty.isPartitioned, catalog.getPartitionGroupNames( table.id ), table.entityType ); if ( table.entityType == EntityType.VIEW ) { @@ -1918,7 +1928,7 @@ private Placement getPlacements( final Index index ) { } else { long pkid = table.primaryKey; List pkColumnIds = Catalog.getInstance().getPrimaryKey( pkid ).columnIds; - CatalogColumn pkColumn = Catalog.getInstance().getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = Catalog.getInstance().getColumn( pkColumnIds.get( 0 ) ); List pkPlacements = catalog.getColumnPlacement( pkColumn.id ); for ( CatalogColumnPlacement placement : pkPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); @@ -1935,6 +1945,8 @@ private Placement getPlacements( final Index index ) { } catch ( UnknownTableException e ) { log.error( "Caught exception while getting placements", e ); return new Placement( e ); + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } } @@ -2049,9 +2061,9 @@ void getPartitionFunctionModel( final Context ctx ) throws UnknownDatabaseExcept PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( request.method ); // Check whether the selected partition function supports the selected partition column - CatalogColumn partitionColumn; + LogicalColumn partitionColumn; - partitionColumn = Catalog.getInstance().getColumn( "APP", request.schemaName, request.tableName, request.column ); + partitionColumn = Catalog.getInstance().getColumn( request.schemaName, request.tableName, request.column ); if ( !partitionManager.supportsColumnOfType( partitionColumn.type ) ) { ctx.json( new PartitionFunctionModel( "The partition function " + request.method + " does not support columns of type " + partitionColumn.type ) ); @@ -2502,7 +2514,7 @@ void getUml( final Context ctx ) { ArrayList fKeys = new ArrayList<>(); ArrayList tables = new ArrayList<>(); - List catalogEntities = catalog.getTables( databaseId, new org.polypheny.db.catalog.logistic.Pattern( request.schema ), null ); + List catalogEntities = catalog.getTables( new org.polypheny.db.catalog.logistic.Pattern( request.schema ), null ); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { // get foreign keys @@ -3182,10 +3194,12 @@ public static Result executeSqlSelect( final Statement statement, final UIReques if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); try { - catalogTable = crud.catalog.getTable( crud.databaseId, t[0], t[1] ); + catalogTable = crud.catalog.getTable( t[0], t[1] ); entityType = catalogTable.entityType; } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } } @@ -3217,9 +3231,9 @@ public static Result executeSqlSelect( final Statement statement, final UIReques if ( catalogTable != null ) { try { if ( crud.catalog.checkIfExistsColumn( catalogTable.id, columnName ) ) { - CatalogColumn catalogColumn = crud.catalog.getColumn( catalogTable.id, columnName ); - if ( catalogColumn.defaultValue != null ) { - dbCol.defaultValue = catalogColumn.defaultValue.value; + LogicalColumn logicalColumn = crud.catalog.getColumn( catalogTable.id, columnName ); + if ( logicalColumn.defaultValue != null ) { + dbCol.defaultValue = logicalColumn.defaultValue.value; } } } catch ( UnknownColumnException e ) { @@ -3586,16 +3600,18 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Cru * @param tableName name of the table * @return HashMap containing the type of each column. The key is the name of the column and the value is the Sql ExpressionType (java.sql.Types). */ - private Map getCatalogColumns( String schemaName, String tableName ) { - Map dataTypes = new HashMap<>(); + private Map getCatalogColumns( String schemaName, String tableName ) { + Map dataTypes = new HashMap<>(); try { - LogicalTable table = catalog.getTable( this.databaseId, schemaName, tableName ); - List catalogColumns = catalog.getColumns( table.id ); - for ( CatalogColumn catalogColumn : catalogColumns ) { - dataTypes.put( catalogColumn.name, catalogColumn ); + LogicalTable table = catalog.getTable( schemaName, tableName ); + List logicalColumns = catalog.getColumns( table.id ); + for ( LogicalColumn logicalColumn : logicalColumns ) { + dataTypes.put( logicalColumn.name, logicalColumn ); } } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); + } catch ( UnknownSchemaException e ) { + throw new RuntimeException( e ); } return dataTypes; } diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index e978b5a391..b7b7287cf1 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -34,21 +34,21 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumn; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownCollectionException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationObserver; @@ -194,7 +194,7 @@ public static Result getResult( QueryLanguage language, Statement statement, Que if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); try { - catalogTable = catalog.getTable( statement.getPrepareContext().getDefaultSchemaName(), t[0], t[1] ); + catalogTable = catalog.getTable( t[0], t[1] ); } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { log.error( "Caught exception", e ); } @@ -228,9 +228,9 @@ public static Result getResult( QueryLanguage language, Statement statement, Que if ( catalogTable != null ) { try { if ( catalog.checkIfExistsColumn( catalogTable.id, columnName ) ) { - CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); - if ( catalogColumn.defaultValue != null ) { - dbCol.defaultValue = catalogColumn.defaultValue.value; + LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + if ( logicalColumn.defaultValue != null ) { + dbCol.defaultValue = logicalColumn.defaultValue.value; } } } catch ( UnknownColumnException e ) { @@ -324,7 +324,7 @@ public void getGraphPlacements( final Context ctx ) { private Placement getPlacements( final Index index ) { Catalog catalog = Catalog.getInstance(); String graphName = index.getSchema(); - List graphs = catalog.getGraphs( Catalog.defaultDatabaseId, new Pattern( graphName ) ); + List graphs = catalog.getGraphs( new Pattern( graphName ) ); if ( graphs.size() != 1 ) { log.error( "The requested graph does not exist." ); return new Placement( new RuntimeException( "The requested graph does not exist." ) ); diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java index 75e9aaa775..4033b31f1e 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java @@ -30,7 +30,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -63,9 +63,9 @@ public String getQuery( String tableId, Statement statement, HttpServletRequest String value = entry.getValue().value; Catalog catalog = Catalog.getInstance(); String[] split = tableId.split( "\\." ); - CatalogColumn catalogColumn; + LogicalColumn logicalColumn; try { - catalogColumn = catalog.getColumn( catalog.getTable( "APP", split[0], split[1] ).id, entry.getKey() ); + logicalColumn = catalog.getColumn( catalog.getTable( split[0], split[1] ).id, entry.getKey() ); } catch ( UnknownColumnException | UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { log.error( "Could not determine column type", e ); return null; @@ -73,14 +73,14 @@ public String getQuery( String tableId, Statement statement, HttpServletRequest if ( fileName == null && value == null ) { setClauses.add( String.format( "\"%s\"=NULL", entry.getKey() ) ); } else if ( value != null && fileName == null ) { - setClauses.add( String.format( "\"%s\"=%s", entry.getKey(), Crud.uiValueToSql( value, catalogColumn.type, catalogColumn.collectionsType ) ) ); + setClauses.add( String.format( "\"%s\"=%s", entry.getKey(), Crud.uiValueToSql( value, logicalColumn.type, logicalColumn.collectionsType ) ) ); } else if ( value == null ) {// && fileName != null - if ( catalogColumn.type.getFamily() == PolyTypeFamily.MULTIMEDIA ) { + if ( logicalColumn.type.getFamily() == PolyTypeFamily.MULTIMEDIA ) { setClauses.add( String.format( "\"%s\"=?", entry.getKey() ) ); statement.getDataContext().addParameterValues( counter++, null, ImmutableList.of( httpRequest.getPart( fileName ).getInputStream() ) ); } else { String data = IOUtils.toString( httpRequest.getPart( fileName ).getInputStream(), StandardCharsets.UTF_8 ); - setClauses.add( String.format( "\"%s\"=%s", entry.getKey(), Crud.uiValueToSql( data, catalogColumn.type, catalogColumn.collectionsType ) ) ); + setClauses.add( String.format( "\"%s\"=%s", entry.getKey(), Crud.uiValueToSql( data, logicalColumn.type, logicalColumn.collectionsType ) ) ); } } else { log.warn( "This should not happen" ); From 1cf04e42c660a27f6141e8e218347794419de79b Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 1 Mar 2023 16:02:21 +0100 Subject: [PATCH 030/436] added logistic PolyCatalog functions --- .../org/polypheny/db/StatisticsManager.java | 4 +- .../org/polypheny/db/adapter/Adapter.java | 2 +- .../org/polypheny/db/adapter/DataContext.java | 2 +- .../EnumerableConditionalExecute.java | 2 +- .../EnumerableConditionalExecuteRule.java | 2 +- .../db/adapter/index/CoWHashIndex.java | 8 +- .../db/adapter/index/CowMultiHashIndex.java | 8 +- .../org/polypheny/db/adapter/index/Index.java | 6 +- .../db/adapter/index/IndexManager.java | 10 +- .../adapter/java/AbstractQueryableEntity.java | 2 +- .../db/adapter/java/ReflectiveSchema.java | 9 +- .../core/common/ConditionalExecute.java | 4 +- .../algebra/core/document/DocumentValues.java | 2 +- .../relational/RelationalTransformable.java | 2 +- .../common/LogicalConditionalExecute.java | 4 +- .../document/LogicalDocumentModify.java | 2 +- .../logical/document/LogicalDocumentScan.java | 4 +- .../db/algebra/logical/lpg/LogicalGraph.java | 2 +- .../algebra/logical/lpg/LogicalLpgModify.java | 2 +- .../algebra/logical/lpg/LogicalLpgScan.java | 4 +- .../algebra/logical/lpg/LogicalLpgValues.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 1635 +---------------- .../catalog/catalogs/AllocationCatalog.java | 12 +- .../catalogs/AllocationDocumentCatalog.java | 36 + .../catalogs/AllocationGraphCatalog.java | 59 + .../catalogs/AllocationRelationalCatalog.java | 673 +++++++ .../db/catalog/catalogs/LogicalCatalog.java | 45 + .../catalogs/LogicalDocumentCatalog.java | 64 + .../catalog/catalogs/LogicalGraphCatalog.java | 93 + .../catalogs/LogicalRelationalCatalog.java | 613 ++++++ .../db/catalog/catalogs/PhysicalCatalog.java | 21 +- .../db/catalog/entity/CatalogAdapter.java | 53 +- .../db/catalog/entity/CatalogForeignKey.java | 2 +- .../db/catalog/entity/CatalogKey.java | 2 +- .../catalog/entity/CatalogPartitionGroup.java | 2 +- .../catalog/entity/CatalogQueryInterface.java | 14 +- ...talogSchema.java => LogicalNamespace.java} | 28 +- .../entity/allocation/AllocationTable.java | 6 +- .../entity/logical/LogicalCollection.java | 2 +- .../catalog/entity/logical/LogicalColumn.java | 4 +- .../catalog/entity/logical/LogicalTable.java | 4 +- .../entity/physical/PhysicalTable.java | 9 +- .../db/catalog/refactor/QueryableEntity.java | 2 +- .../db/catalog/{ => snapshot}/Snapshot.java | 6 +- .../db/languages/LanguageManager.java | 3 +- .../polypheny/db/languages/QueryLanguage.java | 3 +- .../validate/ValidatorCatalogReader.java | 3 +- .../org/polypheny/db/plan/AlgOptCluster.java | 4 +- .../polypheny/db/plan/VisitorDataContext.java | 2 +- .../org/polypheny/db/prepare/Context.java | 3 +- .../org/polypheny/db/prepare/ContextImpl.java | 3 +- .../org/polypheny/db/prepare/PlannerImpl.java | 3 +- .../db/prepare/PolyphenyDbCatalogReader.java | 2 +- .../db/prepare/PolyphenyDbPrepareImpl.java | 2 +- .../org/polypheny/db/prepare/Prepare.java | 2 +- .../db/schema/AbstractPolyphenyDbSchema.java | 5 +- .../polypheny/db/schema/LogicalSchema.java | 2 +- .../org/polypheny/db/schema/Namespace.java | 2 +- .../db/schema/PolySchemaBuilder.java | 318 ---- .../db/schema/PolyphenyDbSchema.java | 8 +- .../polypheny/db/schema/QueryableEntity.java | 2 +- .../java/org/polypheny/db/schema/Schemas.java | 2 +- .../db/schema/impl/AbstractNamespace.java | 2 +- .../schema/impl/AbstractTableQueryable.java | 4 +- .../db/schema/impl/DelegatingNamespace.java | 2 +- .../org/polypheny/db/tools/AlgBuilder.java | 2 +- .../polypheny/db/tools/AlgBuilderFactory.java | 3 +- .../polypheny/db/tools/FrameworkConfig.java | 3 +- .../org/polypheny/db/tools/Frameworks.java | 2 +- .../polypheny/db/tools/RoutedAlgBuilder.java | 2 +- .../polypheny/db/transaction/Transaction.java | 6 +- .../db/transaction/TransactionManager.java | 6 +- .../org/polypheny/db/util/ValidatorUtil.java | 2 +- .../org/polypheny/db/catalog/MockCatalog.java | 85 +- .../db/catalog/MockCatalogReader.java | 9 +- .../db/docker/MockCatalogDocker.java | 12 +- .../org/polypheny/db/plan/RelOptUtilTest.java | 2 +- .../db/test/RexProgramBuilderBase.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 49 +- .../db/processing/AbstractQueryProcessor.java | 8 +- .../db/processing/DataContextImpl.java | 2 +- .../db/routing/routers/AbstractDqlRouter.java | 2 +- .../db/routing/routers/BaseRouter.java | 4 +- .../db/routing/routers/DmlRouterImpl.java | 4 +- .../db/transaction/TransactionImpl.java | 8 +- .../transaction/TransactionManagerImpl.java | 10 +- .../org/polypheny/db/catalog/CatalogTest.java | 2 +- .../org/polypheny/db/misc/AlgBuilderTest.java | 3 +- .../java/org/polypheny/db/mql/DdlTest.java | 10 +- .../statistics/DashboardInformation.java | 4 +- .../statistics/StatisticColumn.java | 2 +- .../statistics/StatisticQueryProcessor.java | 6 +- .../statistics/StatisticsManagerImpl.java | 14 +- .../org/polypheny/db/avatica/DbmsMeta.java | 10 +- .../avatica/PolyphenyDbConnectionHandle.java | 8 +- .../polypheny/db/adapter/csv/CsvSource.java | 2 +- .../db/adapter/csv/CsvTranslatableTable.java | 10 - .../cypher2alg/CypherToAlgConverter.java | 3 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 2 +- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 2 +- .../jdbc/sources/AbstractJdbcSource.java | 2 +- .../jdbc/stores/AbstractJdbcStore.java | 2 +- .../rel2sql/RelToSqlConverterStructsTest.java | 2 +- .../org/polypheny/db/catalog/CatalogImpl.java | 356 +--- .../db/catalog/CatalogImplBackup.java | 256 +-- .../polypheny/db/catalog/CatalogInfoPage.java | 2 +- .../org/polypheny/db/test/CatalogTest.java | 24 +- .../polypheny/db/languages/mql/MqlDrop.java | 8 +- .../db/languages/mql/MqlRenameCollection.java | 4 +- .../languages/mql2alg/MqlToAlgConverter.java | 2 +- .../db/mql/mql2alg/MqlMockCatalog.java | 10 +- .../org/polypheny/db/tools/PigAlgBuilder.java | 2 +- .../polypheny/db/catalog/CatalogPlugin.java | 4 +- .../org/polypheny/db/catalog/IdBuilder.java | 60 +- .../org/polypheny/db/catalog/NCatalog.java | 1 - .../org/polypheny/db/catalog/PolyCatalog.java | 256 ++- .../db/catalog/entities/CatalogDatabase.java | 38 - .../logical/document/CatalogCollection.java | 40 - .../logical/document/CatalogDatabase.java | 37 - .../logical/document/DocumentCatalog.java | 73 +- .../catalog/logical/graph/CatalogGraph.java | 37 - .../catalog/logical/graph/GraphCatalog.java | 88 +- .../logical/relational/RelationalCatalog.java | 527 +++++- .../db/catalog/snapshot/FullSnapshot.java | 5 +- .../polypheny/db/sql/SqlLanguagePlugin.java | 3 +- .../polypheny/db/sql/SqlProcessorImpl.java | 6 +- .../org/polypheny/db/sql/language/SqlDdl.java | 2 +- .../polypheny/db/sql/language/SqlUtil.java | 2 +- .../language/advise/SqlAdvisorValidator.java | 3 +- .../ddl/SqlCreateMaterializedView.java | 2 +- .../db/sql/language/ddl/SqlCreateTable.java | 2 +- .../db/sql/language/ddl/SqlCreateView.java | 2 +- .../db/sql/language/util/SqlTypeUtil.java | 4 +- .../validate/PolyphenyDbSqlValidator.java | 3 +- .../sql/language/validate/SqlValidator.java | 3 +- .../language/validate/SqlValidatorImpl.java | 6 +- .../language/validate/SqlValidatorUtil.java | 3 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 2 +- .../org/polypheny/db/sql/FrameworksTest.java | 4 +- .../org/polypheny/db/sql/InterpreterTest.java | 2 +- .../org/polypheny/db/sql/RexExecutorTest.java | 2 +- .../db/sql/map/NamespaceToJsonMapperTest.java | 4 +- .../java/org/polypheny/db/webui/Crud.java | 14 +- .../org/polypheny/db/webui/WebSocket.java | 4 +- .../polypheny/db/webui/crud/LanguageCrud.java | 6 +- 145 files changed, 3070 insertions(+), 2991 deletions(-) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogSchema.java => core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java (74%) create mode 100644 core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogUser.java => core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java (58%) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogSchema.java => LogicalNamespace.java} (72%) rename core/src/main/java/org/polypheny/db/catalog/{ => snapshot}/Snapshot.java (95%) delete mode 100644 core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogDatabase.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogDatabase.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java diff --git a/core/src/main/java/org/polypheny/db/StatisticsManager.java b/core/src/main/java/org/polypheny/db/StatisticsManager.java index c25940e90c..546e374127 100644 --- a/core/src/main/java/org/polypheny/db/StatisticsManager.java +++ b/core/src/main/java/org/polypheny/db/StatisticsManager.java @@ -19,7 +19,7 @@ import java.beans.PropertyChangeListener; import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -88,6 +88,6 @@ public static StatisticsManager getInstance() { public abstract void updateTableName( LogicalTable catalogTable, String newName ); - public abstract void updateSchemaName( CatalogSchema catalogSchema, String newName ); + public abstract void updateSchemaName( LogicalNamespace logicalNamespace, String newName ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index e2a54f1cd0..cd6170d3d5 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -47,7 +47,6 @@ import lombok.experimental.Accessors; import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; @@ -56,6 +55,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.ConfigDocker; diff --git a/core/src/main/java/org/polypheny/db/adapter/DataContext.java b/core/src/main/java/org/polypheny/db/adapter/DataContext.java index 9a0139dc96..89840cc19b 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataContext.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataContext.java @@ -32,7 +32,7 @@ import org.apache.calcite.linq4j.tree.ParameterExpression; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Advisor; diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableConditionalExecute.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableConditionalExecute.java index 69d6671e9d..1b9d66b61d 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableConditionalExecute.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableConditionalExecute.java @@ -97,7 +97,7 @@ public EnumerableConditionalExecute copy( AlgTraitSet traitSet, List in exceptionClass, exceptionMessage ); ece.setCheckDescription( checkDescription ); - ece.setCatalogSchema( catalogSchema ); + ece.setLogicalNamespace( logicalNamespace ); ece.setCatalogTable( catalogTable ); ece.setCatalogColumns( catalogColumns ); ece.setValues( values ); diff --git a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableConditionalExecuteRule.java b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableConditionalExecuteRule.java index 5da2e451ec..ee4219fd8d 100644 --- a/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableConditionalExecuteRule.java +++ b/core/src/main/java/org/polypheny/db/adapter/enumerable/EnumerableConditionalExecuteRule.java @@ -42,7 +42,7 @@ public AlgNode convert( AlgNode alg ) { final AlgNode action = AlgOptRule.convert( lce.getRight(), lce.getRight().getTraitSet().replace( EnumerableConvention.INSTANCE ) ); final EnumerableConditionalExecute ece = EnumerableConditionalExecute.create( input, action, lce.getCondition(), lce.getExceptionClass(), lce.getExceptionMessage() ); ece.setCheckDescription( lce.getCheckDescription() ); - ece.setCatalogSchema( lce.getCatalogSchema() ); + ece.setLogicalNamespace( lce.getLogicalNamespace() ); ece.setCatalogTable( lce.getCatalogTable() ); ece.setCatalogColumns( lce.getCatalogColumns() ); ece.setValues( lce.getValues() ); diff --git a/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java b/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java index 4e956fa073..2f56b07690 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java @@ -29,7 +29,7 @@ import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.exceptions.ConstraintViolationException; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; @@ -52,7 +52,7 @@ class CoWHashIndex extends Index { public CoWHashIndex( final long id, final String name, - final CatalogSchema schema, + final LogicalNamespace schema, final LogicalTable table, final List columns, final List targetColumns ) { @@ -68,7 +68,7 @@ public CoWHashIndex( public CoWHashIndex( final long id, final String name, - final CatalogSchema schema, + final LogicalNamespace schema, final LogicalTable table, final String[] columns, final String[] targetColumns ) { @@ -375,7 +375,7 @@ public Index create( String method, Boolean unique, Boolean persistent, - CatalogSchema schema, + LogicalNamespace schema, LogicalTable table, List columns, List targetColumns ) { diff --git a/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java b/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java index b0da915bfe..969dee07d1 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java @@ -30,7 +30,7 @@ import org.apache.commons.lang3.tuple.Triple; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; @@ -49,7 +49,7 @@ public class CowMultiHashIndex extends Index { private Map, List, Boolean>>> barrierIndex = new HashMap<>(); - public CowMultiHashIndex( long id, String name, CatalogSchema schema, LogicalTable table, List columns, List targetColumns ) { + public CowMultiHashIndex( long id, String name, LogicalNamespace schema, LogicalTable table, List columns, List targetColumns ) { this.id = id; this.name = name; this.schema = schema; @@ -59,7 +59,7 @@ public CowMultiHashIndex( long id, String name, CatalogSchema schema, LogicalTab } - public CowMultiHashIndex( long id, String name, CatalogSchema schema, LogicalTable table, String[] columns, String[] targetColumns ) { + public CowMultiHashIndex( long id, String name, LogicalNamespace schema, LogicalTable table, String[] columns, String[] targetColumns ) { this( id, name, schema, table, Arrays.asList( columns ), Arrays.asList( targetColumns ) ); } @@ -369,7 +369,7 @@ public Index create( String method, Boolean unique, Boolean persistent, - CatalogSchema schema, + LogicalNamespace schema, LogicalTable table, List columns, List targetColumns ) { diff --git a/core/src/main/java/org/polypheny/db/adapter/index/Index.java b/core/src/main/java/org/polypheny/db/adapter/index/Index.java index 859f446f02..281eb60b29 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/Index.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/Index.java @@ -28,7 +28,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.processing.QueryProcessor; import org.polypheny.db.rex.RexBuilder; @@ -51,7 +51,7 @@ public abstract class Index { // The logical schema of the table this index is for @Getter - protected CatalogSchema schema; + protected LogicalNamespace schema; // The logical table this index is for @Getter @@ -239,7 +239,7 @@ Index create( final String method, final Boolean unique, final Boolean persitent, - final CatalogSchema schema, + final LogicalNamespace schema, final LogicalTable table, final List columns, final List targetColumns ); diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index 74b58a94e0..5a2342c37c 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -33,7 +33,7 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -182,7 +182,7 @@ protected void addIndex( final long id, final String name, final CatalogKey key, method, unique, persistent, - Catalog.getInstance().getSchema( key.schemaId ), + Catalog.getInstance().getNamespace( key.schemaId ), table, key.getColumnNames(), pk.getColumnNames() ); @@ -214,7 +214,7 @@ public void deleteIndex( final long indexId ) { } - public Index getIndex( CatalogSchema schema, LogicalTable table, List columns ) { + public Index getIndex( LogicalNamespace schema, LogicalTable table, List columns ) { return this.indexById.values().stream().filter( index -> index.schema.equals( schema ) && index.table.equals( table ) @@ -224,7 +224,7 @@ public Index getIndex( CatalogSchema schema, LogicalTable table, List co } - public Index getIndex( CatalogSchema schema, LogicalTable table, List columns, String method, Boolean unique, Boolean persistent ) { + public Index getIndex( LogicalNamespace schema, LogicalTable table, List columns, String method, Boolean unique, Boolean persistent ) { return this.indexById.values().stream().filter( index -> index.schema.equals( schema ) && index.table.equals( table ) @@ -236,7 +236,7 @@ public Index getIndex( CatalogSchema schema, LogicalTable table, List co } - public List getIndices( CatalogSchema schema, LogicalTable table ) { + public List getIndices( LogicalNamespace schema, LogicalTable table ) { return this.indexById.values().stream() .filter( index -> index.schema.equals( schema ) && index.table.equals( table ) ) .collect( Collectors.toList() ); diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java index dc13bca9c7..2ced3cf402 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AbstractQueryableEntity.java @@ -36,7 +36,7 @@ import java.lang.reflect.Type; import org.apache.calcite.linq4j.tree.Expression; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.Schemas; diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index 31c8533276..65135baaf8 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -45,9 +45,7 @@ import java.util.List; import java.util.Map; import org.apache.calcite.linq4j.Enumerable; -import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Linq4j; -import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.function.Function1; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; @@ -55,25 +53,20 @@ import org.apache.calcite.linq4j.tree.Types; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgReferentialConstraint; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.refactor.ScannableEntity; -import org.polypheny.db.schema.Entity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Schemas; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; import org.polypheny.db.schema.TableMacro; import org.polypheny.db.schema.TranslatableEntity; import org.polypheny.db.schema.impl.AbstractNamespace; -import org.polypheny.db.schema.impl.AbstractTableQueryable; import org.polypheny.db.schema.impl.ReflectiveFunctionBase; import org.polypheny.db.util.BuiltInMethod; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java b/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java index 36d16cfee6..9f4a7d9e09 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java @@ -25,7 +25,7 @@ import org.polypheny.db.algebra.AlgWriter; import org.polypheny.db.algebra.BiAlg; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -45,7 +45,7 @@ public abstract class ConditionalExecute extends BiAlg { @Getter @Setter - protected CatalogSchema catalogSchema = null; + protected LogicalNamespace logicalNamespace = null; @Getter @Setter protected LogicalTable catalogTable = null; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java index 7861ed4bce..a635246569 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentValues.java @@ -30,8 +30,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.DocumentType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java index ac60f1511e..662822621c 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelationalTransformable.java @@ -20,9 +20,9 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.common.Modify.Operation; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.prepare.Prepare.CatalogReader; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConditionalExecute.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConditionalExecute.java index 09df150aee..d2e83ca29f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConditionalExecute.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConditionalExecute.java @@ -58,7 +58,7 @@ public static LogicalConditionalExecute create( AlgNode left, AlgNode right, Log copy.exceptionClass, copy.exceptionMessage ); lce.checkDescription = description; - lce.catalogSchema = copy.catalogSchema; + lce.logicalNamespace = copy.logicalNamespace; lce.catalogTable = copy.catalogTable; lce.catalogColumns = copy.catalogColumns; lce.values = copy.values; @@ -77,7 +77,7 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { exceptionClass, exceptionMessage ); lce.setCheckDescription( checkDescription ); - lce.setCatalogSchema( catalogSchema ); + lce.setLogicalNamespace( logicalNamespace ); lce.setCatalogTable( catalogTable ); lce.setCatalogColumns( catalogColumns ); lce.setValues( values ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java index a938159c61..222e19aa22 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java @@ -21,8 +21,8 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java index fbda351c46..537640e47b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java @@ -22,13 +22,11 @@ import org.polypheny.db.algebra.core.document.DocumentScan; import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.schema.ModelTrait; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java index e15734ab50..2a8d16cb31 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java @@ -26,9 +26,9 @@ import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java index 2a104af77c..0c690ee195 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java @@ -24,8 +24,8 @@ import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java index 2713dcb8a8..dcebcaf8b2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java @@ -28,15 +28,13 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.schema.ModelTrait; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java index 846bf34140..f7709042f7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java @@ -36,8 +36,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexLiteral; diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 1c40b5299d..3d46af20e0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -17,68 +17,29 @@ package org.polypheny.db.catalog; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.util.List; import java.util.Map; +import lombok.NonNull; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.pf4j.ExtensionPoint; -import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.CatalogView; -import org.polypheny.db.catalog.entity.MaterializedCriteria; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.DataPlacementRole; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; -import org.polypheny.db.catalog.logistic.IndexType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.logistic.PlacementType; -import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.partition.properties.PartitionProperty; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Transaction; -import org.polypheny.db.type.PolyType; public abstract class Catalog implements ExtensionPoint { @@ -183,47 +144,47 @@ protected final boolean isValidIdentifier( final String str ) { /** * Get all schemas which fit to the specified filter pattern. - * getSchemas(xid, null, null) returns all schemas of all databases. + * getNamespaces(xid, null, null) returns all schemas of all databases. * - * @param schemaNamePattern Pattern for the schema name. null returns all. + * @param name Pattern for the schema name. null returns all. * @return List of schemas which fit to the specified filter. If there is no schema which meets the criteria, an empty list is returned. */ - public abstract List getSchemas( Pattern schemaNamePattern ); + public abstract @NonNull List getNamespaces( Pattern name ); /** * Returns the schema with the specified id. * - * @param schemaId The id of the schema + * @param id The id of the schema * @return The schema */ - public abstract CatalogSchema getSchema( long schemaId ); + public abstract LogicalNamespace getNamespace( long id ); /** * Returns the schema with the given name in the specified database. * - * @param schemaName The name of the schema + * @param name The name of the schema * @return The schema * @throws UnknownSchemaException If there is no schema with this name in the specified database. */ - public abstract CatalogSchema getSchema( String schemaName ) throws UnknownSchemaException; + public abstract LogicalNamespace getNamespace( String name ) throws UnknownSchemaException; /** * Adds a schema in a specified database * * @param name The name of the schema - * @param ownerId The owner of this schema * @param namespaceType The type of this schema + * @param caseSensitive * @return The id of the inserted schema */ - public abstract long addNamespace( String name, int ownerId, NamespaceType namespaceType ); + public abstract long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ); /** * Checks weather a schema with the specified name exists in a database. * - * @param schemaName The name of the schema to check + * @param name The name of the schema to check * @return True if there is a schema with this name. False if not. */ - public abstract boolean checkIfExistsSchema( String schemaName ); + public abstract boolean checkIfExistsNamespace( String name ); /** * Renames a schema @@ -231,1580 +192,128 @@ protected final boolean isValidIdentifier( final String str ) { * @param schemaId The if of the schema to rename * @param name New name of the schema */ - public abstract void renameSchema( long schemaId, String name ); - - /** - * Change owner of a schema - * - * @param schemaId The id of the schema - * @param ownerId ID of the new owner - */ - public abstract void setSchemaOwner( long schemaId, long ownerId ); - - /** - * Adds a new graph to the catalog, on the same layer as schema in relational. - * - * @param name The name of the graph - * @param stores The datastores on which the graph is placed - * @param modifiable If the graph is modifiable - * @param ifNotExists If the task fails when the graph already exists - * @param replace If the graph should replace an existing one - * @return The id of the newly added graph - */ - public abstract long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ); - - /** - * Additional operations for the creation of a graph entity. - * - * @param id The predefined id of the already added graph - * @param stores The stores on which the graph was placed - * @param onlyPlacement If the substitution only creates the placements and not the entites - */ - public abstract void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException; - - /** - * Deletes an existing graph. - * - * @param id The id of the graph to delete - */ - public abstract void deleteGraph( long id ); - - /** - * Returns an existing graph. - * - * @param id The id of the graph to return - * @return The graph entity with the provided id - */ - public abstract LogicalGraph getGraph( long id ); - - /** - * Get a collection of all graphs, which match the given conditions. - * - * @param graphName The pattern to which the name has to match, null if every name is matched - * @return A collection of all graphs matching - */ - public abstract List getGraphs( Pattern graphName ); - - /** - * Add a new alias for a given graph. - * - * @param graphId The id of the graph to which the alias is added - * @param alias The alias to add - * @param ifNotExists If the alias should only be added if it not already exists - */ - public abstract void addGraphAlias( long graphId, String alias, boolean ifNotExists ); - - /** - * Removes a given alias for a specific graph. - * - * @param graphId The id of the graph for which the alias is removed - * @param alias The alias to remove - * @param ifExists If the alias should only be removed if it exists - */ - public abstract void removeGraphAlias( long graphId, String alias, boolean ifExists ); + public abstract void renameNamespace( long schemaId, String name ); - /** - * Returns the mapping of the graph used for substitution in other data models. - * - * @param graphId The id of the graph for which the mapping is requested - * @return The mapping for the graph - */ - public abstract CatalogGraphMapping getGraphMapping( long graphId ); /** * Delete a schema from the catalog * - * @param schemaId The id of the schema to delete - */ - public abstract void deleteSchema( long schemaId ); - - /** - * Get all tables of the specified schema which fit to the specified filters. - * getTables(xid, databaseName, null, null, null) returns all tables of the database. - * - * @param schemaId The id of the schema - * @param tableNamePattern Pattern for the table name. null returns all. - * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. - */ - public abstract List getTables( long schemaId, Pattern tableNamePattern ); - - /** - * Get all tables of the specified database which fit to the specified filters. - * getTables(xid, databaseName, null, null, null) returns all tables of the database. - * - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns all. - * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. - */ - public abstract List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ); - - /** - * Returns the table with the given name in the specified database and schema. - * - * @param schemaName The name of the schema - * @param tableName The name of the table - * @return The table - */ - public abstract LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException; - - - /** - * Returns the table with the given id - * - * @param tableId The id of the table - * @return The table - */ - public abstract LogicalTable getTable( long tableId ); - - /** - * Returns the table with the given name in the specified schema. - * - * @param schemaId The id of the schema - * @param tableName The name of the table - * @return The table - * @throws UnknownTableException If there is no table with this name in the specified database and schema. - */ - public abstract LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException; - - /** - * Returns the table which is associated with a given partitionId - * - * @param partitionId to use for lookup - * @return CatalogEntity that contains partitionId - */ - public abstract LogicalTable getTableFromPartition( long partitionId ); - - /** - * Adds a table to a specified schema. - * - * @param name The name of the table to add - * @param namespaceId The id of the schema - * @param ownerId The if of the owner - * @param entityType The table type - * @param modifiable Whether the content of the table can be modified - * @return The id of the inserted table - */ - public abstract long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ); - - - /** - * Adds a view to a specified schema. - * - * @param name The name of the view to add - * @param namespaceId The id of the schema - * @param ownerId The if of the owner - * @param entityType The table type - * @param modifiable Whether the content of the table can be modified - * @param definition {@link AlgNode} used to create Views - * @param underlyingTables all tables and columns used within the view - * @param fieldList all columns used within the View - * @return The id of the inserted table - */ - public abstract long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ); - - /** - * Adds a materialized view to a specified schema. - * - * @param name of the view to add - * @param namespaceId id of the schema - * @param ownerId id of the owner - * @param entityType type of table - * @param modifiable Whether the content of the table can be modified - * @param definition {@link AlgNode} used to create Views - * @param algCollation relCollation used for materialized view - * @param underlyingTables all tables and columns used within the view - * @param fieldList all columns used within the View - * @param materializedCriteria Information like freshness and last updated - * @param query used to define materialized view - * @param language query language used to define materialized view - * @param ordered if materialized view is ordered or not - * @return id of the inserted materialized view - */ - public abstract long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException; - - /** - * Checks if there is a table with the specified name in the specified schema. - * - * @param namespaceId The id of the schema - * @param entityName The name to check for - * @return true if there is a table with this name, false if not. - */ - public abstract boolean checkIfExistsEntity( long namespaceId, String entityName ); - - /** - * Checks if there is a table with the specified id. - * - * @param tableId id of the table - * @return true if there is a table with this id, false if not. - */ - public abstract boolean checkIfExistsEntity( long tableId ); - - /** - * Renames a table - * - * @param tableId The if of the table to rename - * @param name New name of the table - */ - public abstract void renameTable( long tableId, String name ); - - /** - * Delete the specified table. Columns need to be deleted before. - * - * @param tableId The id of the table to delete - */ - public abstract void deleteTable( long tableId ); - - /** - * Change owner of a table - * - * @param tableId The if of the table - * @param ownerId ID of the new owner - */ - public abstract void setTableOwner( long tableId, int ownerId ); - - /** - * Set the primary key of a table - * - * @param tableId The id of the table - * @param keyId The id of the key to set as primary key. Set null to set no primary key. - */ - public abstract void setPrimaryKey( long tableId, Long keyId ); - - /** - * Adds a placement for a column. - * - * @param adapterId The adapter on which the table should be placed on - * @param columnId The id of the column to be placed - * @param placementType The type of placement - * @param physicalSchemaName The schema name on the adapter - * @param physicalTableName The table name on the adapter - * @param physicalColumnName The column name on the adapter - */ - public abstract void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ); - - /** - * Deletes all dependent column placements - * - * @param adapterId The id of the adapter - * @param columnId The id of the column - * @param columnOnly columnOnly If delete originates from a dropColumn - */ - public abstract void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ); - - /** - * Gets a collective list of column placements per column on an adapter. - * Effectively used to retrieve all relevant placements including partitions. - * - * @param adapterId The id of the adapter - * @param columnId The id of the column - * @return The specific column placement - */ - public abstract CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ); - - /** - * Checks if there is a column with the specified name in the specified table. - * - * @param adapterId The id of the adapter - * @param columnId The id of the column - * @return true if there is a column placement, false if not. - */ - public abstract boolean checkIfExistsColumnPlacement( int adapterId, long columnId ); - - /** - * Get all column placements of a column - * - * @param columnId The id of the specific column - * @return List of column placements of specific column - */ - public abstract List getColumnPlacement( long columnId ); - - /** - * Get column placements of a specific table on a specific adapter on column detail level. - * Only returns one ColumnPlacement per column on adapter. Ignores multiplicity due to different partitionsIds - * - * @param adapterId The id of the adapter - * @return List of column placements of the table on the specified adapter - */ - public abstract List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ); - - /** - * Gets a collection of placements on a specific adapter sorted by their position. - * - * @param adapterId The adapter on which the placements are placed - * @param tableId The id of the table of the placements - * @return The collection of column placements sorted - */ - public abstract List getColumnPlacementsOnAdapterSortedByPhysicalPosition( int adapterId, long tableId ); - - /** - * Get column placements on a adapter. On column detail level - * Only returns one ColumnPlacement per column on adapter. Ignores multiplicity due to different partitionsIds - * - * @param adapterId The id of the adapter - * @return List of column placements on the specified adapter - */ - public abstract List getColumnPlacementsOnAdapter( int adapterId ); - - /** - * Gets a collection of column placements for a given column. - * - * @param columnId The id of the column of requested column placements - * @return The collection of placements sorted - */ - public abstract List getColumnPlacementsByColumn( long columnId ); - - /** - * Gets all column placements of a table structured by the id of the adapters. - * - * @param tableId The id of the table for the requested column placements - * @return The requested collection - */ - public abstract ImmutableMap> getColumnPlacementsByAdapter( long tableId ); - - /** - * Gets a map partition placements sorted by adapter. - * - * @param tableId The id of the table for which the partitions are returned - * @return The sorted partitions placements + * @param id The id of the schema to delete */ - public abstract ImmutableMap> getPartitionPlacementsByAdapter( long tableId ); + public abstract void deleteNamespace( long id ); - /** - * Gets the partitions groups for a given table grouped by adapters. - * - * @param tableId The table on which the partitions groups are placed - * @return The map sorting the partitions groups by adapter - */ - public abstract ImmutableMap> getPartitionGroupsByAdapter( long tableId ); /** - * Gets the partition group sorted by partition. + * Get the user with the specified name * - * @param partitionId The id of the partitions group + * @param name The name of the user + * @return The user + * @throws UnknownUserException If there is no user with the specified name */ - public abstract long getPartitionGroupByPartition( long partitionId ); + public abstract CatalogUser getUser( String name ) throws UnknownUserException; /** - * Gets a collection of all keys. + * Get the user with the specified id. * - * @return The keys + * @param id The id of the user + * @return The user */ - public abstract List getKeys(); - + public abstract CatalogUser getUser( long id ); /** - * Get all keys for a given table. + * Get list of all adapters * - * @param tableId The id of the table for which the keys are returned - * @return The collection of keys + * @return List of adapters */ - public abstract List getTableKeys( long tableId ); + public abstract List getAdapters(); /** - * Get column placements in a specific schema on a specific adapter + * Get an adapter by its unique name * - * @param adapterId The id of the adapter - * @param schemaId The id of the schema - * @return List of column placements on this adapter and schema + * @return The adapter */ - public abstract List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ); + public abstract CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException; /** - * Update the type of a placement. + * Get an adapter by its id * - * @param adapterId The id of the adapter - * @param columnId The id of the column - * @param placementType The new type of placement + * @return The adapter */ - public abstract void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ); + public abstract CatalogAdapter getAdapter( long id ); /** - * Update physical position of a column placement on a specified adapter. + * Check if an adapter with the given id exists * - * @param adapterId The id of the adapter - * @param columnId The id of the column - * @param position The physical position to set + * @param id the id of the adapter + * @return if the adapter exists */ - public abstract void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ); + public abstract boolean checkIfExistsAdapter( long id ); /** - * Update physical position of a column placement on a specified adapter. Uses auto-increment to get the globally increasing number. + * Add an adapter * - * @param adapterId The id of the adapter - * @param columnId The id of the column + * @param uniqueName The unique name of the adapter + * @param clazz The class name of the adapter + * @param type The type of adapter + * @param settings The configuration of the adapter + * @return The id of the newly added adapter */ - public abstract void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ); + public abstract long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ); /** - * Change physical names of all column placements. + * Update settings of an adapter * * @param adapterId The id of the adapter - * @param columnId The id of the column - * @param physicalSchemaName The physical schema name - * @param physicalColumnName The physical column name - * @param updatePhysicalColumnPosition Whether to reset the column position (the highest number in the table; represents that the column is now at the last position) - */ - public abstract void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ); - - /** - * Get all columns of the specified table. - * - * @param tableId The id of the table - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - public abstract List getColumns( long tableId ); - - /** - * Get all columns of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all columns of the database. - * - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns all. - * @param columnNamePattern Pattern for the column name. null returns all. - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - public abstract List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ); - - /** - * Returns the column with the specified id. - * - * @param columnId The id of the column - * @return A CatalogColumn - */ - public abstract LogicalColumn getColumn( long columnId ); - - /** - * Returns the column with the specified name in the specified table of the specified database and schema. - * - * @param tableId The id of the table - * @param columnName The name of the column - * @return A CatalogColumn - * @throws UnknownColumnException If there is no column with this name in the specified table of the database and schema. - */ - public abstract LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException; - - /** - * Returns the column with the specified name in the specified table of the specified database and schema. - * - * @param schemaName The name of the schema - * @param tableName The name of the table - * @param columnName The name of the column - * @return A CatalogColumn - */ - public abstract LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; - - /** - * Adds a column. - * - * @param name The name of the column - * @param tableId The id of the corresponding table - * @param position The ordinal position of the column (starting with 1) - * @param type The type of the column - * @param length The length of the field (if applicable, else null) - * @param scale The number of digits after the decimal point (if applicable, else null) - * @param nullable Weather the column can contain null values - * @param collation The collation of the field (if applicable, else null) - * @return The id of the inserted column - */ - public abstract long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ); - - /** - * Renames a column - * - * @param columnId The if of the column to rename - * @param name New name of the column - */ - public abstract void renameColumn( long columnId, String name ); - - /** - * Change the position of the column. - * - * @param columnId The id of the column for which to change the position - * @param position The new position of the column - */ - public abstract void setColumnPosition( long columnId, int position ); - - /** - * Change the data type of a column. - * - * @param columnId The id of the column - * @param type The new type of the column - */ - public abstract void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ) throws GenericCatalogException; - - /** - * Change nullability of the column (weather the column allows null values). - * - * @param columnId The id of the column - * @param nullable True if the column should allow null values, false if not. - */ - public abstract void setNullable( long columnId, boolean nullable ) throws GenericCatalogException; - - /** - * Set the collation of a column. - * If the column already has the specified collation set, this method is a NoOp. - * - * @param columnId The id of the column - * @param collation The collation to set - */ - public abstract void setCollation( long columnId, Collation collation ); - - /** - * Checks if there is a column with the specified name in the specified table. - * - * @param tableId The id of the table - * @param columnName The name to check for - * @return true if there is a column with this name, false if not. - */ - public abstract boolean checkIfExistsColumn( long tableId, String columnName ); - - /** - * Delete the specified column. This also deletes a default value in case there is one defined for this column. - * - * @param columnId The id of the column to delete - */ - public abstract void deleteColumn( long columnId ); - - /** - * Adds a default value for a column. If there already is a default values, it being replaced. - * - * @param columnId The id of the column - * @param type The type of the default value - * @param defaultValue True if the column should allow null values, false if not. - */ - public abstract void setDefaultValue( long columnId, PolyType type, String defaultValue ); - - /** - * Deletes an existing default value of a column. NoOp if there is no default value defined. - * - * @param columnId The id of the column + * @param newSettings The new settings for the adapter */ - public abstract void deleteDefaultValue( long columnId ); + public abstract void updateAdapterSettings( long adapterId, Map newSettings ); /** - * Returns a specified primary key + * Delete an adapter * - * @param key The id of the primary key - * @return The primary key + * @param id The id of the adapter to delete */ - public abstract CatalogPrimaryKey getPrimaryKey( long key ); + public abstract void deleteAdapter( long id ); - /** - * Check whether a key is a primary key + /* + * Get list of all query interfaces * - * @param keyId The id of the key - * @return Whether the key is a primary key + * @return List of query interfaces */ - public abstract boolean isPrimaryKey( long keyId ); + public abstract List getQueryInterfaces(); /** - * Check whether a key is a foreign key + * Get a query interface by its unique name * - * @param keyId The id of the key - * @return Whether the key is a foreign key + * @param uniqueName The unique name of the query interface + * @return The CatalogQueryInterface */ - public abstract boolean isForeignKey( long keyId ); + public abstract CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException; /** - * Check whether a key is an index + * Get a query interface by its id * - * @param keyId The id of the key - * @return Whether the key is an index + * @param id The id of the query interface + * @return The CatalogQueryInterface */ - public abstract boolean isIndex( long keyId ); + public abstract CatalogQueryInterface getQueryInterface( long id ); /** - * Check whether a key is a constraint + * Add a query interface * - * @param keyId The id of the key - * @return Whether the key is a constraint + * @param uniqueName The unique name of the query interface + * @param clazz The class name of the query interface + * @param settings The configuration of the query interface + * @return The id of the newly added query interface */ - public abstract boolean isConstraint( long keyId ); - - /** - * Adds a primary key - * - * @param tableId The id of the table - * @param columnIds The id of key which will be part of the primary keys - */ - public abstract void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException; - - /** - * Returns all (imported) foreign keys of a specified table - * - * @param tableId The id of the table - * @return List of foreign keys - */ - public abstract List getForeignKeys( long tableId ); - - /** - * Returns all foreign keys that reference the specified table (exported keys). - * - * @param tableId The id of the table - * @return List of foreign keys - */ - public abstract List getExportedKeys( long tableId ); - - /** - * Get all constraints of the specified table - * - * @param tableId The id of the table - * @return List of constraints - */ - public abstract List getConstraints( long tableId ); - - /** - * Gets a collection of index for the given key. - * - * @param key The key for which the collection is returned - * @return The collection of indexes - */ - public abstract List getIndexes( CatalogKey key ); - - /** - * Gets a collection of foreign keys for a given {@link Catalog Key}. - * - * @param key The key for which the collection is returned - * @return The collection foreign keys - */ - public abstract List getForeignKeys( CatalogKey key ); - - /** - * Gets a collection of constraints for a given key. - * - * @param key The key for which the collection is returned - * @return The collection of constraints - */ - public abstract List getConstraints( CatalogKey key ); - - /** - * Returns the constraint with the specified name in the specified table. - * - * @param tableId The id of the table - * @param constraintName The name of the constraint - * @return The constraint - */ - public abstract CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException; - - /** - * Return the foreign key with the specified name from the specified table - * - * @param tableId The id of the table - * @param foreignKeyName The name of the foreign key - * @return The foreign key - */ - public abstract CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException; - - /** - * Adds a unique foreign key constraint. - * - * @param tableId The id of the table - * @param columnIds The id of the columns which are part of the foreign key - * @param referencesTableId The if of the referenced table - * @param referencesIds The id of columns forming the key referenced by this key - * @param constraintName The name of the constraint - * @param onUpdate The option for updates - * @param onDelete The option for deletes - */ - public abstract void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException; - - /** - * Adds a unique constraint. - * - * @param tableId The id of the table - * @param constraintName The name of the constraint - * @param columnIds A list of column ids - */ - public abstract void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException; - - /** - * Returns all indexes of a table - * - * @param tableId The id of the table - * @param onlyUnique true if only indexes for unique values are returned. false if all indexes are returned. - * @return List of indexes - */ - public abstract List getIndexes( long tableId, boolean onlyUnique ); - - /** - * Returns the index with the specified name in the specified table - * - * @param tableId The id of the table - * @param indexName The name of the index - * @return The Index - */ - public abstract CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException; - - /** - * Checks if there is an index with the specified name in the specified table. - * - * @param tableId The id of the table - * @param indexName The name to check for - * @return true if there is an index with this name, false if not. - */ - public abstract boolean checkIfExistsIndex( long tableId, String indexName ); - - /** - * Returns the index with the specified id - * - * @param indexId The id of the index - * @return The Index - */ - public abstract CatalogIndex getIndex( long indexId ); - - /** - * Returns list of all indexes - * - * @return List of indexes - */ - public abstract List getIndexes(); - - /** - * Adds an index over the specified columns - * - * @param tableId The id of the table - * @param columnIds A list of column ids - * @param unique Weather the index is unique - * @param method Name of the index method (e.g. btree_unique) - * @param methodDisplayName Display name of the index method (e.g. BTREE) - * @param location ID of the data store where the index is located (0 for Polypheny-DB itself) - * @param type The type of index (manual, automatic) - * @param indexName The name of the index - * @return The id of the created index - */ - public abstract long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException; - - /** - * Set physical index name. - * - * @param indexId The id of the index - * @param physicalName The physical name to be set - */ - public abstract void setIndexPhysicalName( long indexId, String physicalName ); - - /** - * Delete the specified index - * - * @param indexId The id of the index to drop - */ - public abstract void deleteIndex( long indexId ); - - /** - * Deletes the specified primary key (including the entry in the key table). If there is an index on this key, make sure to delete it first. - * - * @param tableId The id of the key to drop - */ - public abstract void deletePrimaryKey( long tableId ) throws GenericCatalogException; - - /** - * Delete the specified foreign key (does not delete the referenced key). - * - * @param foreignKeyId The id of the foreign key to delete - */ - public abstract void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException; - - /** - * Delete the specified constraint. - * For deleting foreign keys, use {@link #deleteForeignKey(long)}. - * - * @param constraintId The id of the constraint to delete - */ - public abstract void deleteConstraint( long constraintId ) throws GenericCatalogException; - - /** - * Get the user with the specified name - * - * @param userName The name of the user - * @return The user - * @throws UnknownUserException If there is no user with the specified name - */ - public abstract CatalogUser getUser( String userName ) throws UnknownUserException; - - /** - * Get the user with the specified id. - * - * @param userId The id of the user - * @return The user - */ - public abstract CatalogUser getUser( int userId ); - - /** - * Get list of all adapters - * - * @return List of adapters - */ - public abstract List getAdapters(); - - /** - * Get an adapter by its unique name - * - * @return The adapter - */ - public abstract CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException; - - /** - * Get an adapter by its id - * - * @return The adapter - */ - public abstract CatalogAdapter getAdapter( int adapterId ); - - /** - * Check if an adapter with the given id exists - * - * @param adapterId the id of the adapter - * @return if the adapter exists - */ - public abstract boolean checkIfExistsAdapter( int adapterId ); - - /** - * Add an adapter - * - * @param uniqueName The unique name of the adapter - * @param clazz The class name of the adapter - * @param type The type of adapter - * @param settings The configuration of the adapter - * @return The id of the newly added adapter - */ - public abstract int addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ); - - /** - * Update settings of an adapter - * - * @param adapterId The id of the adapter - * @param newSettings The new settings for the adapter - */ - public abstract void updateAdapterSettings( int adapterId, Map newSettings ); - - /** - * Delete an adapter - * - * @param adapterId The id of the adapter to delete - */ - public abstract void deleteAdapter( int adapterId ); - - /* - * Get list of all query interfaces - * - * @return List of query interfaces - */ - public abstract List getQueryInterfaces(); - - /** - * Get a query interface by its unique name - * - * @param uniqueName The unique name of the query interface - * @return The CatalogQueryInterface - */ - public abstract CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException; - - /** - * Get a query interface by its id - * - * @param ifaceId The id of the query interface - * @return The CatalogQueryInterface - */ - public abstract CatalogQueryInterface getQueryInterface( int ifaceId ); - - /** - * Add a query interface - * - * @param uniqueName The unique name of the query interface - * @param clazz The class name of the query interface - * @param settings The configuration of the query interface - * @return The id of the newly added query interface - */ - public abstract int addQueryInterface( String uniqueName, String clazz, Map settings ); + public abstract long addQueryInterface( String uniqueName, String clazz, Map settings ); /** * Delete a query interface * - * @param ifaceId The id of the query interface to delete - */ - public abstract void deleteQueryInterface( int ifaceId ); - - /** - * Adds a partition to the catalog - * - * @param tableId The unique id of the table - * @param schemaId The unique id of the table - * @param partitionType partition Type of the added partition - * @return The id of the created partitionGroup - */ - public abstract long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; - - /** - * Should only be called from mergePartitions(). Deletes a single partition and all references. - * - * @param tableId The unique id of the table - * @param schemaId The unique id of the table - * @param partitionGroupId The partitionId to be deleted - */ - public abstract void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ); - - /** - * Get a partition object by its unique id - * - * @param partitionGroupId The unique id of the partition - * @return A catalog partitionGroup - */ - public abstract CatalogPartitionGroup getPartitionGroup( long partitionGroupId ); - - /** - * Adds a partition to the catalog - * - * @param tableId The unique id of the table - * @param schemaId The unique id of the table - * @param partitionGroupId partitionGroupId where the partition should be initially added to - * @return The id of the created partition - */ - public abstract long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; - - /** - * Deletes a single partition and all references. - * - * @param tableId The unique id of the table - * @param schemaId The unique id of the table - * @param partitionId The partitionId to be deleted - */ - public abstract void deletePartition( long tableId, long schemaId, long partitionId ); - - /** - * Get a partition object by its unique id - * - * @param partitionId The unique id of the partition - * @return A catalog partition - */ - public abstract CatalogPartition getPartition( long partitionId ); - - /** - * Retrieves a list of partitions which are associated with a specific table - * - * @param tableId Table for which partitions shall be gathered - * @return List of all partitions associated with that table - */ - public abstract List getPartitionsByTable( long tableId ); - - /** - * Effectively partitions a table with the specified partitionType - * - * @param tableId Table to be partitioned - * @param partitionType Partition function to apply on the table - * @param partitionColumnId Column used to apply the partition function on - * @param numPartitionGroups Explicit number of partitions - * @param partitionGroupIds List of ids of the catalog partitions - */ - public abstract void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ); - - /** - * Merges a partitioned table. - * Resets all objects and structures which were introduced by partitionTable. - * - * @param tableId Table to be merged - */ - public abstract void mergeTable( long tableId ); - - /** - * Updates partitionProperties on table - * - * @param tableId Table to be partitioned - * @param partitionProperty Partition properties - */ - public abstract void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ); - - /** - * Get a List of all partitions belonging to a specific table - * - * @param tableId Table to be queried - * @return list of all partitions on this table - */ - public abstract List getPartitionGroups( long tableId ); - - /** - * Get all partitions of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. - * - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - public abstract List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ); - - /** - * Updates the specified partition group with the attached partitionIds - * - * @param partitionGroupId Partition Group to be updated - * @param partitionIds List of new partitionIds - */ - public abstract void updatePartitionGroup( long partitionGroupId, List partitionIds ); - - /** - * Adds a partition to an already existing partition Group - * - * @param partitionGroupId Group to add to - * @param partitionId Partition to add - */ - public abstract void addPartitionToGroup( long partitionGroupId, Long partitionId ); - - /** - * Removes a partition from an already existing partition Group - * - * @param partitionGroupId Group to remove the partition from - * @param partitionId Partition to remove - */ - public abstract void removePartitionFromGroup( long partitionGroupId, Long partitionId ); - - /** - * Assign the partition to a new partitionGroup - * - * @param partitionId Partition to move - * @param partitionGroupId New target group to move the partition to - */ - public abstract void updatePartition( long partitionId, Long partitionGroupId ); - - /** - * Get a List of all partitions belonging to a specific table - * - * @param partitionGroupId Table to be queried - * @return list of all partitions on this table - */ - public abstract List getPartitions( long partitionGroupId ); - - /** - * Get all partitions of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. - * - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - public abstract List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ); - - /** - * Get a list of all partition name belonging to a specific table - * - * @param tableId Table to be queried - * @return list of all partition names on this table - */ - public abstract List getPartitionGroupNames( long tableId ); - - /** - * Get placements by partition. Identify the location of partitions. - * Essentially returns all ColumnPlacements which hold the specified partitionID. - * - * @param tableId The id of the table - * @param partitionGroupId The id of the partition - * @param columnId The id of tje column - * @return List of CatalogColumnPlacements - */ - public abstract List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ); - - /** - * Get adapters by partition. Identify the location of partitions/replicas - * Essentially returns all adapters which hold the specified partitionID - * - * @param tableId The unique id of the table - * @param partitionGroupId The unique id of the partition - * @return List of CatalogAdapters - */ - public abstract List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ); - - /** - * Get all partitions of a DataPlacement (identified by adapterId and tableId) - * - * @param adapterId The unique id of the adapter - * @param tableId The unique id of the table - * @return List of partitionIds - */ - public abstract List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ); - - /** - * Get all partitions of a DataPlacement (identified by adapterId and tableId) - * - * @param adapterId The unique id of the adapter - * @param tableId The unique id of the table - * @return List of partitionIds - */ - public abstract List getPartitionsOnDataPlacement( int adapterId, long tableId ); - - /** - * Returns list with the index of the partitions on this store from 0..numPartitions - * - * @param adapterId The unique id of the adapter - * @param tableId The unique id of the table - * @return List of partitionId Indices - */ - public abstract List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ); - - /** - * Returns a specific DataPlacement of a given table. - * - * @param adapterId adapter where placement is located - * @param tableId table to retrieve the placement from - * @return DataPlacement of a table placed on a specific store - */ - public abstract CatalogDataPlacement getDataPlacement( int adapterId, long tableId ); - - /** - * Returns all DataPlacements of a given table. - * - * @param tableId table to retrieve the placements from - * @return List of all DataPlacements for the table - */ - public abstract List getDataPlacements( long tableId ); - - /** - * Returns a list of all DataPlacements that contain all columns as well as all partitions - * - * @param tableId table to retrieve the list from - * @return list of all full DataPlacements - */ - public abstract List getAllFullDataPlacements( long tableId ); - - /** - * Returns a list of all DataPlacements that contain all columns - * - * @param tableId table to retrieve the list from - * @return list of all full DataPlacements - */ - public abstract List getAllColumnFullDataPlacements( long tableId ); - - /** - * Returns a list of all DataPlacements that contain all partitions - * - * @param tableId table to retrieve the list from - * @return list of all full DataPlacements - */ - public abstract List getAllPartitionFullDataPlacements( long tableId ); - - /** - * Returns all DataPlacements of a given table that are associated with a given role. - * - * @param tableId table to retrieve the placements from - * @param role role to specifically filter - * @return List of all DataPlacements for the table that are associated with a specific role - */ - public abstract List getDataPlacementsByRole( long tableId, DataPlacementRole role ); - - /** - * Returns all PartitionPlacements of a given table that are associated with a given role. - * - * @param tableId table to retrieve the placements from - * @param role role to specifically filter - * @return List of all PartitionPlacements for the table that are associated with a specific role - */ - public abstract List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ); - - /** - * Returns all PartitionPlacements of a given table with a given ID that are associated with a given role. - * - * @param tableId table to retrieve the placements from - * @param role role to specifically filter - * @param partitionId filter by ID - * @return List of all PartitionPlacements for the table that are associated with a specific role for a specific partitionId - */ - public abstract List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ); - - /** - * Checks if the planned changes are allowed in terms of placements that need to be present. - * Each column must be present for all partitions somewhere. - * - * @param tableId Table to be checked - * @param adapterId Adapter where Ids will be removed from - * @param columnIdsToBeRemoved columns that shall be removed - * @param partitionsIdsToBeRemoved partitions that shall be removed - * @return true if these changes can be made to the data placement, false if not - */ - public abstract boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ); - - /** - * Flags the table for deletion. - * This method should be executed on a partitioned table before we run a DROP TABLE statement. - * - * @param tableId table to be flagged for deletion - * @param flag true if it should be flagged, false if flag should be removed - */ - public abstract void flagTableForDeletion( long tableId, boolean flag ); - - /** - * Is used to detect if a table is flagged for deletion. - * Effectively checks if a drop of this table is currently in progress. - * This is needed to ensure that there aren't any constraints when recursively removing a table and all placements and partitions. - * - * @param tableId table to be checked - * @return If table is flagged for deletion or not - */ - public abstract boolean isTableFlaggedForDeletion( long tableId ); - - /** - * Adds a placement for a partition. - * - * @param namespaceId - * @param adapterId The adapter on which the table should be placed on - * @param tableId The table for which a partition placement shall be created - * @param partitionId The id of a specific partition that shall create a new placement - * @param placementType The type of placement - * @param physicalSchemaName The schema name on the adapter - * @param physicalTableName The table name on the adapter - */ - public abstract void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); - - /** - * Adds a new DataPlacement for a given table on a specific store - * - * @param adapterId adapter where placement should be located - * @param tableId table to retrieve the placement from - */ - public abstract void addDataPlacement( int adapterId, long tableId ); - - /** - * Adds a new DataPlacement for a given table on a specific store. - * If it already exists it simply returns the existing placement. - * - * @param adapterId adapter where placement is located - * @param tableId table to retrieve the placement from - * @return DataPlacement of a table placed on a specific store - */ - public abstract CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ); - - /** - * Modifies a specific DataPlacement of a given table. - * - * @param adapterId adapter where placement is located - * @param tableId table to retrieve the placement from - * @param catalogDataPlacement new dataPlacement to be written - */ - protected abstract void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ); - - /** - * Adds a new placement on a given adapter for an existing graph. - * - * @param adapterId The id of the adapter on which the graph is added - * @param graphId The id of the graph for which a new placement is added - * @return The id of the new placement - */ - public abstract long addGraphPlacement( int adapterId, long graphId ); - - /** - * Gets a collection of graph placements for a given adapter. - * - * @param adapterId The id of the adapter on which the placements are placed - * @return The collection of graph placements - */ - public abstract List getGraphPlacements( int adapterId ); - - /** - * Deletes a specific graph placement for a given graph and adapter. - * - * @param adapterId The id of the adapter on which the placement is removed - * @param graphId The id of the graph for which the placement is removed - */ - public abstract void deleteGraphPlacement( int adapterId, long graphId ); - - /** - * Updates the physical names for a given graph. - * - * @param graphId The id of the graph to update - * @param adapterId The id of the adapter on which the graph to update is placed - * @param physicalGraphName The new physical name of the graph - */ - public abstract void updateGraphPlacementPhysicalNames( long graphId, int adapterId, String physicalGraphName ); - - /** - * Gets a specific placement for a graph on a given adapter. - * - * @param graphId The id of the graph - * @param adapterId The id of the adapter on which the placement is placed - * @return The placement matching the conditions - */ - public abstract CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ); - - /** - * Removes a DataPlacement for a given table on a specific store - * - * @param adapterId adapter where placement should be removed from - * @param tableId table to retrieve the placement from - */ - public abstract void removeDataPlacement( int adapterId, long tableId ); - - /** - * Adds a single dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - */ - protected abstract void addSingleDataPlacementToTable( Integer adapterId, long tableId ); - - /** - * Removes a single dataPlacement from a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - */ - protected abstract void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ); - - /** - * Updates the list of data placements on a table - * - * @param tableId table to be updated - * @param newDataPlacements list of new DataPlacements that shall replace the old ones - */ - public abstract void updateDataPlacementsOnTable( long tableId, List newDataPlacements ); - - /** - * Adds columns to dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - * @param columnIds List of columnIds to add to a specific store for the table - */ - protected abstract void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ); - - /** - * Remove columns to dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - * @param columnIds List of columnIds to remove from a specific store for the table - */ - protected abstract void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ); - - /** - * Adds partitions to dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - * @param partitionIds List of partitionIds to add to a specific store for the table - */ - protected abstract void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ); - - /** - * Remove partitions to dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - * @param partitionIds List of partitionIds to remove from a specific store for the table - */ - protected abstract void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ); - - /** - * Updates and overrides list of associated columnPlacements {@code &} partitionPlacements for a given data placement - * - * @param adapterId adapter where placement is located - * @param tableId table to retrieve the placement from - * @param columnIds List of columnIds to be located on a specific store for the table - * @param partitionIds List of partitionIds to be located on a specific store for the table - */ - public abstract void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ); - - /** - * Change physical names of a partition placement. - * - * @param adapterId The id of the adapter - * @param partitionId The id of the partition - * @param physicalSchemaName The physical schema name - * @param physicalTableName The physical table name - */ - public abstract void updatePartitionPlacementPhysicalNames( int adapterId, long partitionId, String physicalSchemaName, String physicalTableName ); - - /** - * Deletes a placement for a partition. - * - * @param adapterId The adapter on which the table should be placed on - * @param partitionId The id of a partition which shall be removed from that store. - */ - public abstract void deletePartitionPlacement( int adapterId, long partitionId ); - - /** - * Returns a specific partition entity which is placed on a store. - * - * @param adapterId The adapter on which the requested partition placements reside - * @param partitionId The id of the requested partition - * @return The requested PartitionPlacement on that store for a given is - */ - public abstract CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ); - - /** - * Returns a list of all Partition Placements which currently reside on an adapter, disregarded of the table. - * - * @param adapterId The adapter on which the requested partition placements reside - * @return A list of all Partition Placements, that are currently located on that specific store - */ - public abstract List getPartitionPlacementsByAdapter( int adapterId ); - - /** - * Returns a list of all Partition Placements which currently reside on an adapter, for a specific table. - * - * @param adapterId The adapter on which the requested partition placements reside - * @param tableId The table for which all partition placements on an adapter should be considered - * @return A list of all Partition Placements, that are currently located on that specific store for an individual table - */ - public abstract List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ); - - /** - * Returns a list of all Partition Placements which are currently associated with a table. - * - * @param tableId The table on which the requested partition placements are currently associated with. - * @return A list of all Partition Placements, that belong to the desired table - */ - public abstract List getAllPartitionPlacementsByTable( long tableId ); - - /** - * Get all Partition Placements which are associated with an individual partition ID. - * Identifies on which locations and how often the individual partition is placed. - * - * @param partitionId The requested partition ID - * @return A list of Partition Placements which are physically responsible for that partition - */ - public abstract List getPartitionPlacements( long partitionId ); - - /** - * Returns all tables which are in need of special periodic treatment. - * - * @return List of tables which need to be periodically processed - */ - public abstract List getTablesForPeriodicProcessing(); - - /** - * Registers a table to be considered for periodic processing - * - * @param tableId ID of table to be considered for periodic processing - */ - public abstract void addTableToPeriodicProcessing( long tableId ); - - /** - * Remove a table from periodic background processing - * - * @param tableId ID of table to be removed for periodic processing - */ - public abstract void removeTableFromPeriodicProcessing( long tableId ); - - /** - * Probes if a Partition Placement on an adapter for a specific partition already exists. - * - * @param adapterId Adapter on which to check - * @param partitionId Partition which to check - * @return teh response of the probe - */ - public abstract boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ); - - /** - * Deletes all the dependencies of a view. This is used when deleting a view. - * - * @param catalogView view for which to delete its dependencies - */ - public abstract void deleteViewDependencies( CatalogView catalogView ); - - /** - * Updates the last time a materialized view has been refreshed. - * - * @param materializedViewId id of the materialized view - */ - public abstract void updateMaterializedViewRefreshTime( long materializedViewId ); - - /** - * Get the graph with the given id. - * - * @param collectionId The id of the graph - * @return The requested collection - */ - public abstract LogicalCollection getCollection( long collectionId ); - - /** - * Get a collection of collections which match the given naming pattern. - * - * @param namespaceId The id of the namespace to which the collection belongs - * @param namePattern The naming pattern of the collection itself, null if all are matched - * @return collection of collections matching conditions - */ - public abstract List getCollections( long namespaceId, Pattern namePattern ); - - /** - * Add a new collection with the given parameters. - * - * @param id ID of the collection to add, null if a new one needs to be generated - * @param name The name of the collection - * @param schemaId The id of the namespace to which the collection is added - * @param currentUserId The user, which adds the collection - * @param entity The type of entity of the collection - * @param modifiable If the collection is modifiable - * @return The id of the added collection - */ - public abstract long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ); - - /** - * Adds a new placement for a given collection. - * - * @param namespaceId - * @param adapterId The id of the adapter on which the placement is added - * @param collectionId The id of the collection for which the placement is added - * @param placementType The type of placement - * @return The id of the newly added placement - */ - public abstract long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ); - - /** - * Get the mapping for the collection, which points to the substitution entities in other data models. - * - * @param id The id of the collection - * @return The mapping for the specific collection - */ - public abstract CatalogCollectionMapping getCollectionMapping( long id ); - - /** - * Added the required additional entities for the substitutions entities on different data models. - * - * @param schemaId The id of the namespace to which the collection belongs - * @param name The name of the collection - * @param stores The stores on which the collection was added - * @param onlyPlacement If the substitution entities should be created fully or only the placements - * @return The id of the mapping - */ - public abstract long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException; - - /** - * Gets a collection containing all placements for a given adapter. - * - * @param adapterId The id of the adapter for which the collection is provided - * @return The collection of placements - */ - public abstract List getCollectionPlacementsByAdapter( int adapterId ); - - /** - * Gets a specific placement for a given collection and adapter. - * - * @param collectionId The id of the collection for the placement - * @param adapterId The adapter on which the collection is placed - * @return The placement of the collection on the specified adapter - */ - public abstract CatalogCollectionPlacement getCollectionPlacement( long collectionId, int adapterId ); - - /** - * Updates the physical name of the given collection. - * - * @param namespaceId - * @param collectionId The id of the collection to change - * @param adapterId The id of the adapter on which the physical names of the collection are updated - * @param physicalNamespaceName The new namespace name - * @param namespaceName The namespace name - * @param physicalCollectionName The new physical collection name - */ - public abstract void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ); - - /** - * Delete a specific collection. - * - * @param id The id of the collection to delete - */ - public abstract void deleteCollection( long id ); - - /** - * Drop a placement of a specific collection. - * - * @param id The id of the collection to drop - * @param adapterId The id of the adapter on which the collection is placed + * @param id The id of the query interface to delete */ - public abstract void dropCollectionPlacement( long id, int adapterId ); + public abstract void deleteQueryInterface( long id ); public abstract void close(); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogSchema.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java similarity index 74% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogSchema.java rename to core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java index 16d51b9043..b6f8274e17 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/CatalogSchema.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java @@ -14,16 +14,8 @@ * limitations under the License. */ -package org.polypheny.db.catalog.logical.relational; +package org.polypheny.db.catalog.catalogs; -import lombok.AllArgsConstructor; -import lombok.Value; - -@AllArgsConstructor -@Value -public class CatalogSchema { - - public long id; - public String name; +public interface AllocationCatalog { } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java new file mode 100644 index 0000000000..7874198a8a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.catalogs; + +import java.util.List; +import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; + +public interface AllocationDocumentCatalog extends AllocationCatalog { + + /** + * Added the required additional entities for the substitutions entities on different data models. + * + * @param schemaId The id of the namespace to which the collection belongs + * @param name The name of the collection + * @param stores The stores on which the collection was added + * @param onlyPlacement If the substitution entities should be created fully or only the placements + * @return The id of the mapping + */ + public abstract long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException; + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java new file mode 100644 index 0000000000..6a0843505b --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java @@ -0,0 +1,59 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.catalogs; + +import java.util.List; +import org.polypheny.db.catalog.entity.CatalogGraphPlacement; + +public interface AllocationGraphCatalog extends AllocationCatalog { + + + /** + * Adds a new placement on a given adapter for an existing graph. + * + * @param adapterId The id of the adapter on which the graph is added + * @param graphId The id of the graph for which a new placement is added + * @return The id of the new placement + */ + public abstract long addGraphPlacement( int adapterId, long graphId ); + + /** + * Gets a collection of graph placements for a given adapter. + * + * @param adapterId The id of the adapter on which the placements are placed + * @return The collection of graph placements + */ + public abstract List getGraphPlacements( int adapterId ); + + /** + * Deletes a specific graph placement for a given graph and adapter. + * + * @param adapterId The id of the adapter on which the placement is removed + * @param graphId The id of the graph for which the placement is removed + */ + public abstract void deleteGraphPlacement( int adapterId, long graphId ); + + /** + * Gets a specific placement for a graph on a given adapter. + * + * @param graphId The id of the graph + * @param adapterId The id of the adapter on which the placement is placed + * @return The placement matching the conditions + */ + public abstract CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ); + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java new file mode 100644 index 0000000000..8d6141a0dd --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -0,0 +1,673 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.catalogs; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.List; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.CatalogPartitionGroup; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.partition.properties.PartitionProperty; + +public interface AllocationRelationalCatalog extends AllocationCatalog { + + + /** + * Adds a placement for a column. + * + * @param adapterId The adapter on which the table should be placed on + * @param columnId The id of the column to be placed + * @param placementType The type of placement + * @param physicalSchemaName The schema name on the adapter + * @param physicalTableName The table name on the adapter + * @param physicalColumnName The column name on the adapter + */ + public abstract void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ); + + /** + * Deletes all dependent column placements + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + * @param columnOnly columnOnly If delete originates from a dropColumn + */ + public abstract void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ); + + /** + * Gets a collective list of column placements per column on an adapter. + * Effectively used to retrieve all relevant placements including partitions. + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + * @return The specific column placement + */ + public abstract CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ); + + /** + * Checks if there is a column with the specified name in the specified table. + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + * @return true if there is a column placement, false if not. + */ + public abstract boolean checkIfExistsColumnPlacement( int adapterId, long columnId ); + + /** + * Get all column placements of a column + * + * @param columnId The id of the specific column + * @return List of column placements of specific column + */ + public abstract List getColumnPlacement( long columnId ); + + /** + * Get column placements of a specific table on a specific adapter on column detail level. + * Only returns one ColumnPlacement per column on adapter. Ignores multiplicity due to different partitionsIds + * + * @param adapterId The id of the adapter + * @return List of column placements of the table on the specified adapter + */ + public abstract List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ); + + /** + * Get column placements on a adapter. On column detail level + * Only returns one ColumnPlacement per column on adapter. Ignores multiplicity due to different partitionsIds + * + * @param adapterId The id of the adapter + * @return List of column placements on the specified adapter + */ + public abstract List getColumnPlacementsOnAdapter( int adapterId ); + + /** + * Gets a collection of column placements for a given column. + * + * @param columnId The id of the column of requested column placements + * @return The collection of placements sorted + */ + public abstract List getColumnPlacementsByColumn( long columnId ); + + /** + * Gets all column placements of a table structured by the id of the adapters. + * + * @param tableId The id of the table for the requested column placements + * @return The requested collection + */ + public abstract ImmutableMap> getColumnPlacementsByAdapter( long tableId ); + + /** + * Gets a map partition placements sorted by adapter. + * + * @param tableId The id of the table for which the partitions are returned + * @return The sorted partitions placements + */ + public abstract ImmutableMap> getPartitionPlacementsByAdapter( long tableId ); + + /** + * Gets the partition group sorted by partition. + * + * @param partitionId The id of the partitions group + */ + public abstract long getPartitionGroupByPartition( long partitionId ); + + + /** + * Get column placements in a specific schema on a specific adapter + * + * @param adapterId The id of the adapter + * @param schemaId The id of the schema + * @return List of column placements on this adapter and schema + */ + public abstract List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ); + + /** + * Update the type of a placement. + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + * @param placementType The new type of placement + */ + public abstract void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ); + + /** + * Update physical position of a column placement on a specified adapter. + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + * @param position The physical position to set + */ + public abstract void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ); + + /** + * Update physical position of a column placement on a specified adapter. Uses auto-increment to get the globally increasing number. + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + */ + public abstract void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ); + + /** + * Change physical names of all column placements. + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + * @param physicalSchemaName The physical schema name + * @param physicalColumnName The physical column name + * @param updatePhysicalColumnPosition Whether to reset the column position (the highest number in the table; represents that the column is now at the last position) + */ + public abstract void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ); + + + /** + * Adds a partition to the catalog + * + * @param tableId The unique id of the table + * @param schemaId The unique id of the table + * @param partitionType partition Type of the added partition + * @return The id of the created partitionGroup + */ + public abstract long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; + + /** + * Should only be called from mergePartitions(). Deletes a single partition and all references. + * + * @param tableId The unique id of the table + * @param schemaId The unique id of the table + * @param partitionGroupId The partitionId to be deleted + */ + public abstract void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ); + + /** + * Get a partition object by its unique id + * + * @param partitionGroupId The unique id of the partition + * @return A catalog partitionGroup + */ + public abstract CatalogPartitionGroup getPartitionGroup( long partitionGroupId ); + + /** + * Adds a partition to the catalog + * + * @param tableId The unique id of the table + * @param schemaId The unique id of the table + * @param partitionGroupId partitionGroupId where the partition should be initially added to + * @return The id of the created partition + */ + public abstract long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; + + /** + * Deletes a single partition and all references. + * + * @param tableId The unique id of the table + * @param schemaId The unique id of the table + * @param partitionId The partitionId to be deleted + */ + public abstract void deletePartition( long tableId, long schemaId, long partitionId ); + + /** + * Get a partition object by its unique id + * + * @param partitionId The unique id of the partition + * @return A catalog partition + */ + public abstract CatalogPartition getPartition( long partitionId ); + + /** + * Retrieves a list of partitions which are associated with a specific table + * + * @param tableId Table for which partitions shall be gathered + * @return List of all partitions associated with that table + */ + public abstract List getPartitionsByTable( long tableId ); + + /** + * Effectively partitions a table with the specified partitionType + * + * @param tableId Table to be partitioned + * @param partitionType Partition function to apply on the table + * @param partitionColumnId Column used to apply the partition function on + * @param numPartitionGroups Explicit number of partitions + * @param partitionGroupIds List of ids of the catalog partitions + */ + public abstract void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ); + + /** + * Merges a partitioned table. + * Resets all objects and structures which were introduced by partitionTable. + * + * @param tableId Table to be merged + */ + public abstract void mergeTable( long tableId ); + + /** + * Updates partitionProperties on table + * + * @param tableId Table to be partitioned + * @param partitionProperty Partition properties + */ + public abstract void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ); + + /** + * Get a List of all partitions belonging to a specific table + * + * @param tableId Table to be queried + * @return list of all partitions on this table + */ + public abstract List getPartitionGroups( long tableId ); + + /** + * Get all partitions of the specified database which fit to the specified filter patterns. + * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. + * + * @param schemaNamePattern Pattern for the schema name. null returns all. + * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. + * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. + */ + public abstract List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ); + + /** + * Updates the specified partition group with the attached partitionIds + * + * @param partitionGroupId Partition Group to be updated + * @param partitionIds List of new partitionIds + */ + public abstract void updatePartitionGroup( long partitionGroupId, List partitionIds ); + + /** + * Adds a partition to an already existing partition Group + * + * @param partitionGroupId Group to add to + * @param partitionId Partition to add + */ + public abstract void addPartitionToGroup( long partitionGroupId, Long partitionId ); + + /** + * Removes a partition from an already existing partition Group + * + * @param partitionGroupId Group to remove the partition from + * @param partitionId Partition to remove + */ + public abstract void removePartitionFromGroup( long partitionGroupId, Long partitionId ); + + /** + * Assign the partition to a new partitionGroup + * + * @param partitionId Partition to move + * @param partitionGroupId New target group to move the partition to + */ + public abstract void updatePartition( long partitionId, Long partitionGroupId ); + + /** + * Get a List of all partitions belonging to a specific table + * + * @param partitionGroupId Table to be queried + * @return list of all partitions on this table + */ + public abstract List getPartitions( long partitionGroupId ); + + /** + * Get all partitions of the specified database which fit to the specified filter patterns. + * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. + * + * @param schemaNamePattern Pattern for the schema name. null returns all. + * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. + * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. + */ + public abstract List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ); + + /** + * Get a list of all partition name belonging to a specific table + * + * @param tableId Table to be queried + * @return list of all partition names on this table + */ + public abstract List getPartitionGroupNames( long tableId ); + + /** + * Get placements by partition. Identify the location of partitions. + * Essentially returns all ColumnPlacements which hold the specified partitionID. + * + * @param tableId The id of the table + * @param partitionGroupId The id of the partition + * @param columnId The id of tje column + * @return List of CatalogColumnPlacements + */ + public abstract List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ); + + /** + * Get adapters by partition. Identify the location of partitions/replicas + * Essentially returns all adapters which hold the specified partitionID + * + * @param tableId The unique id of the table + * @param partitionGroupId The unique id of the partition + * @return List of CatalogAdapters + */ + public abstract List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ); + + /** + * Get all partitions of a DataPlacement (identified by adapterId and tableId) + * + * @param adapterId The unique id of the adapter + * @param tableId The unique id of the table + * @return List of partitionIds + */ + public abstract List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ); + + /** + * Get all partitions of a DataPlacement (identified by adapterId and tableId) + * + * @param adapterId The unique id of the adapter + * @param tableId The unique id of the table + * @return List of partitionIds + */ + public abstract List getPartitionsOnDataPlacement( int adapterId, long tableId ); + + /** + * Returns list with the index of the partitions on this store from 0..numPartitions + * + * @param adapterId The unique id of the adapter + * @param tableId The unique id of the table + * @return List of partitionId Indices + */ + public abstract List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ); + + /** + * Returns a specific DataPlacement of a given table. + * + * @param adapterId adapter where placement is located + * @param tableId table to retrieve the placement from + * @return DataPlacement of a table placed on a specific store + */ + public abstract CatalogDataPlacement getDataPlacement( int adapterId, long tableId ); + + /** + * Returns all DataPlacements of a given table. + * + * @param tableId table to retrieve the placements from + * @return List of all DataPlacements for the table + */ + public abstract List getDataPlacements( long tableId ); + + /** + * Returns a list of all DataPlacements that contain all columns as well as all partitions + * + * @param tableId table to retrieve the list from + * @return list of all full DataPlacements + */ + public abstract List getAllFullDataPlacements( long tableId ); + + /** + * Returns a list of all DataPlacements that contain all columns + * + * @param tableId table to retrieve the list from + * @return list of all full DataPlacements + */ + public abstract List getAllColumnFullDataPlacements( long tableId ); + + /** + * Returns a list of all DataPlacements that contain all partitions + * + * @param tableId table to retrieve the list from + * @return list of all full DataPlacements + */ + public abstract List getAllPartitionFullDataPlacements( long tableId ); + + /** + * Returns all DataPlacements of a given table that are associated with a given role. + * + * @param tableId table to retrieve the placements from + * @param role role to specifically filter + * @return List of all DataPlacements for the table that are associated with a specific role + */ + public abstract List getDataPlacementsByRole( long tableId, DataPlacementRole role ); + + /** + * Returns all PartitionPlacements of a given table that are associated with a given role. + * + * @param tableId table to retrieve the placements from + * @param role role to specifically filter + * @return List of all PartitionPlacements for the table that are associated with a specific role + */ + public abstract List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ); + + /** + * Returns all PartitionPlacements of a given table with a given ID that are associated with a given role. + * + * @param tableId table to retrieve the placements from + * @param role role to specifically filter + * @param partitionId filter by ID + * @return List of all PartitionPlacements for the table that are associated with a specific role for a specific partitionId + */ + public abstract List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ); + + /** + * Checks if the planned changes are allowed in terms of placements that need to be present. + * Each column must be present for all partitions somewhere. + * + * @param tableId Table to be checked + * @param adapterId Adapter where Ids will be removed from + * @param columnIdsToBeRemoved columns that shall be removed + * @param partitionsIdsToBeRemoved partitions that shall be removed + * @return true if these changes can be made to the data placement, false if not + */ + public abstract boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ); + + + /** + * Adds a placement for a partition. + * + * @param namespaceId + * @param adapterId The adapter on which the table should be placed on + * @param tableId The table for which a partition placement shall be created + * @param partitionId The id of a specific partition that shall create a new placement + * @param placementType The type of placement + * @param physicalSchemaName The schema name on the adapter + * @param physicalTableName The table name on the adapter + */ + public abstract void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); + + /** + * Adds a new DataPlacement for a given table on a specific store + * + * @param adapterId adapter where placement should be located + * @param tableId table to retrieve the placement from + */ + public abstract void addDataPlacement( int adapterId, long tableId ); + + /** + * Adds a new DataPlacement for a given table on a specific store. + * If it already exists it simply returns the existing placement. + * + * @param adapterId adapter where placement is located + * @param tableId table to retrieve the placement from + * @return DataPlacement of a table placed on a specific store + */ + public abstract CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ); + + /** + * Modifies a specific DataPlacement of a given table. + * + * @param adapterId adapter where placement is located + * @param tableId table to retrieve the placement from + * @param catalogDataPlacement new dataPlacement to be written + */ + abstract void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ); + + + /** + * Removes a DataPlacement for a given table on a specific store + * + * @param adapterId adapter where placement should be removed from + * @param tableId table to retrieve the placement from + */ + public abstract void removeDataPlacement( int adapterId, long tableId ); + + /** + * Adds a single dataPlacement on a store for a specific table + * + * @param adapterId adapter id corresponding to a new DataPlacements + * @param tableId table to be updated + */ + abstract void addSingleDataPlacementToTable( Integer adapterId, long tableId ); + + /** + * Removes a single dataPlacement from a store for a specific table + * + * @param adapterId adapter id corresponding to a new DataPlacements + * @param tableId table to be updated + */ + abstract void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ); + + /** + * Updates the list of data placements on a table + * + * @param tableId table to be updated + * @param newDataPlacements list of new DataPlacements that shall replace the old ones + */ + public abstract void updateDataPlacementsOnTable( long tableId, List newDataPlacements ); + + /** + * Adds columns to dataPlacement on a store for a specific table + * + * @param adapterId adapter id corresponding to a new DataPlacements + * @param tableId table to be updated + * @param columnIds List of columnIds to add to a specific store for the table + */ + abstract void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ); + + /** + * Remove columns to dataPlacement on a store for a specific table + * + * @param adapterId adapter id corresponding to a new DataPlacements + * @param tableId table to be updated + * @param columnIds List of columnIds to remove from a specific store for the table + */ + abstract void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ); + + /** + * Adds partitions to dataPlacement on a store for a specific table + * + * @param adapterId adapter id corresponding to a new DataPlacements + * @param tableId table to be updated + * @param partitionIds List of partitionIds to add to a specific store for the table + */ + abstract void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ); + + /** + * Remove partitions to dataPlacement on a store for a specific table + * + * @param adapterId adapter id corresponding to a new DataPlacements + * @param tableId table to be updated + * @param partitionIds List of partitionIds to remove from a specific store for the table + */ + abstract void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ); + + /** + * Updates and overrides list of associated columnPlacements {@code &} partitionPlacements for a given data placement + * + * @param adapterId adapter where placement is located + * @param tableId table to retrieve the placement from + * @param columnIds List of columnIds to be located on a specific store for the table + * @param partitionIds List of partitionIds to be located on a specific store for the table + */ + public abstract void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ); + + + /** + * Deletes a placement for a partition. + * + * @param adapterId The adapter on which the table should be placed on + * @param partitionId The id of a partition which shall be removed from that store. + */ + public abstract void deletePartitionPlacement( int adapterId, long partitionId ); + + /** + * Returns a specific partition entity which is placed on a store. + * + * @param adapterId The adapter on which the requested partition placements reside + * @param partitionId The id of the requested partition + * @return The requested PartitionPlacement on that store for a given is + */ + public abstract CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ); + + /** + * Returns a list of all Partition Placements which currently reside on an adapter, disregarded of the table. + * + * @param adapterId The adapter on which the requested partition placements reside + * @return A list of all Partition Placements, that are currently located on that specific store + */ + public abstract List getPartitionPlacementsByAdapter( int adapterId ); + + /** + * Returns a list of all Partition Placements which currently reside on an adapter, for a specific table. + * + * @param adapterId The adapter on which the requested partition placements reside + * @param tableId The table for which all partition placements on an adapter should be considered + * @return A list of all Partition Placements, that are currently located on that specific store for an individual table + */ + public abstract List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ); + + /** + * Returns a list of all Partition Placements which are currently associated with a table. + * + * @param tableId The table on which the requested partition placements are currently associated with. + * @return A list of all Partition Placements, that belong to the desired table + */ + public abstract List getAllPartitionPlacementsByTable( long tableId ); + + /** + * Get all Partition Placements which are associated with an individual partition ID. + * Identifies on which locations and how often the individual partition is placed. + * + * @param partitionId The requested partition ID + * @return A list of Partition Placements which are physically responsible for that partition + */ + public abstract List getPartitionPlacements( long partitionId ); + + /** + * Registers a table to be considered for periodic processing + * + * @param tableId ID of table to be considered for periodic processing + */ + public abstract void addTableToPeriodicProcessing( long tableId ); + + /** + * Remove a table from periodic background processing + * + * @param tableId ID of table to be removed for periodic processing + */ + public abstract void removeTableFromPeriodicProcessing( long tableId ); + + /** + * Probes if a Partition Placement on an adapter for a specific partition already exists. + * + * @param adapterId Adapter on which to check + * @param partitionId Partition which to check + * @return teh response of the probe + */ + public abstract boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ); + + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java new file mode 100644 index 0000000000..3754eca65a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.catalogs; + +import org.polypheny.db.catalog.entity.LogicalNamespace; + +public interface LogicalCatalog { + + /** + * Checks if there is a table with the specified name in the specified schema. + * + * @param namespaceId The id of the schema + * @param entityName The name to check for + * @return true if there is a table with this name, false if not. + */ + public abstract boolean checkIfExistsEntity( long namespaceId, String entityName ); + + /** + * Checks if there is a table with the specified id. + * + * @param tableId id of the table + * @return true if there is a table with this id, false if not. + */ + public abstract boolean checkIfExistsEntity( long tableId ); + + LogicalNamespace getLogicalNamespace(); + + + LogicalCatalog withLogicalNamespace( LogicalNamespace logicalNamespace ); + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java new file mode 100644 index 0000000000..c7d1f9d2e8 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java @@ -0,0 +1,64 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.catalogs; + +import java.util.List; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.Pattern; + +public interface LogicalDocumentCatalog extends LogicalCatalog { + + /** + * Get the collection with the given id. + * + * @param collectionId The id of the collection + * @return The requested collection + */ + public abstract LogicalCollection getCollection( long collectionId ); + + /** + * Get a collection of collections which match the given naming pattern. + * + * @param namespaceId The id of the namespace to which the collection belongs + * @param namePattern The naming pattern of the collection itself, null if all are matched + * @return collection of collections matching conditions + */ + public abstract List getCollections( long namespaceId, Pattern namePattern ); + + /** + * Add a new collection with the given parameters. + * + * @param id ID of the collection to add, null if a new one needs to be generated + * @param name The name of the collection + * @param schemaId The id of the namespace to which the collection is added + * @param currentUserId The user, which adds the collection + * @param entity The type of entity of the collection + * @param modifiable If the collection is modifiable + * @return The id of the added collection + */ + public abstract long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ); + + + /** + * Delete a specific collection. + * + * @param id The id of the collection to delete + */ + public abstract void deleteCollection( long id ); + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java new file mode 100644 index 0000000000..4ff400a941 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java @@ -0,0 +1,93 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.catalogs; + +import java.util.List; +import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.Pattern; + +public interface LogicalGraphCatalog extends LogicalCatalog { + + /** + * Add a new alias for a given graph. + * + * @param graphId The id of the graph to which the alias is added + * @param alias The alias to add + * @param ifNotExists If the alias should only be added if it not already exists + */ + public abstract void addGraphAlias( long graphId, String alias, boolean ifNotExists ); + + /** + * Removes a given alias for a specific graph. + * + * @param graphId The id of the graph for which the alias is removed + * @param alias The alias to remove + * @param ifExists If the alias should only be removed if it exists + */ + public abstract void removeGraphAlias( long graphId, String alias, boolean ifExists ); + + /** + * Adds a new graph to the catalog, on the same layer as schema in relational. + * + * @param name The name of the graph + * @param stores The datastores on which the graph is placed + * @param modifiable If the graph is modifiable + * @param ifNotExists If the task fails when the graph already exists + * @param replace If the graph should replace an existing one + * @return The id of the newly added graph + */ + public abstract long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ); + + /** + * Deletes an existing graph. + * + * @param id The id of the graph to delete + */ + public abstract void deleteGraph( long id ); + + /** + * Returns an existing graph. + * + * @param id The id of the graph to return + * @return The graph entity with the provided id + */ + public abstract LogicalGraph getGraph( long id ); + + /** + * Get a collection of all graphs, which match the given conditions. + * + * @param graphName The pattern to which the name has to match, null if every name is matched + * @return A collection of all graphs matching + */ + public abstract List getGraphs( Pattern graphName ); + + + /** + * Additional operations for the creation of a graph entity. + * + * @param id The predefined id of the already added graph + * @param stores The stores on which the graph was placed + * @param onlyPlacement If the substitution only creates the placements and not the entites + */ + public abstract void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException; + + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java new file mode 100644 index 0000000000..e4a35aa550 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -0,0 +1,613 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.catalogs; + +import java.util.List; +import java.util.Map; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogConstraint; +import org.polypheny.db.catalog.entity.CatalogForeignKey; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownConstraintException; +import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; +import org.polypheny.db.catalog.exceptions.UnknownIndexException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.type.PolyType; + +public interface LogicalRelationalCatalog extends LogicalCatalog { + + /** + * Get all tables of the specified schema which fit to the specified filters. + * getTables(xid, databaseName, null, null, null) returns all tables of the database. + * + * @param schemaId The id of the schema + * @param tableNamePattern Pattern for the table name. null returns all. + * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. + */ + public abstract List getTables( long schemaId, Pattern tableNamePattern ); + + /** + * Get all tables of the specified database which fit to the specified filters. + * getTables(xid, databaseName, null, null, null) returns all tables of the database. + * + * @param schemaNamePattern Pattern for the schema name. null returns all. + * @param tableNamePattern Pattern for the table name. null returns all. + * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. + */ + public abstract List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ); + + /** + * Returns the table with the given name in the specified database and schema. + * + * @param schemaName The name of the schema + * @param tableName The name of the table + * @return The table + */ + public abstract LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException; + + + /** + * Returns the table with the given id + * + * @param tableId The id of the table + * @return The table + */ + public abstract LogicalTable getTable( long tableId ); + + /** + * Returns the table with the given name in the specified schema. + * + * @param schemaId The id of the schema + * @param tableName The name of the table + * @return The table + * @throws UnknownTableException If there is no table with this name in the specified database and schema. + */ + public abstract LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException; + + /** + * Returns the table which is associated with a given partitionId + * + * @param partitionId to use for lookup + * @return CatalogEntity that contains partitionId + */ + public abstract LogicalTable getTableFromPartition( long partitionId ); + + /** + * Adds a table to a specified schema. + * + * @param name The name of the table to add + * @param namespaceId The id of the schema + * @param ownerId The if of the owner + * @param entityType The table type + * @param modifiable Whether the content of the table can be modified + * @return The id of the inserted table + */ + public abstract long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ); + + + /** + * Adds a view to a specified schema. + * + * @param name The name of the view to add + * @param namespaceId The id of the schema + * @param ownerId The if of the owner + * @param entityType The table type + * @param modifiable Whether the content of the table can be modified + * @param definition {@link AlgNode} used to create Views + * @param underlyingTables all tables and columns used within the view + * @param fieldList all columns used within the View + * @return The id of the inserted table + */ + public abstract long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ); + + /** + * Adds a materialized view to a specified schema. + * + * @param name of the view to add + * @param namespaceId id of the schema + * @param ownerId id of the owner + * @param entityType type of table + * @param modifiable Whether the content of the table can be modified + * @param definition {@link AlgNode} used to create Views + * @param algCollation relCollation used for materialized view + * @param underlyingTables all tables and columns used within the view + * @param fieldList all columns used within the View + * @param materializedCriteria Information like freshness and last updated + * @param query used to define materialized view + * @param language query language used to define materialized view + * @param ordered if materialized view is ordered or not + * @return id of the inserted materialized view + */ + public abstract long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException; + + /** + * Renames a table + * + * @param tableId The if of the table to rename + * @param name New name of the table + */ + public abstract void renameTable( long tableId, String name ); + + /** + * Delete the specified table. Columns need to be deleted before. + * + * @param tableId The id of the table to delete + */ + public abstract void deleteTable( long tableId ); + + /** + * Change owner of a table + * + * @param tableId The if of the table + * @param ownerId ID of the new owner + */ + public abstract void setTableOwner( long tableId, int ownerId ); + + /** + * Set the primary key of a table + * + * @param tableId The id of the table + * @param keyId The id of the key to set as primary key. Set null to set no primary key. + */ + public abstract void setPrimaryKey( long tableId, Long keyId ); + + + /** + * Gets a collection of all keys. + * + * @return The keys + */ + public abstract List getKeys(); + + + /** + * Get all keys for a given table. + * + * @param tableId The id of the table for which the keys are returned + * @return The collection of keys + */ + public abstract List getTableKeys( long tableId ); + + + /** + * Get all columns of the specified table. + * + * @param tableId The id of the table + * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. + */ + public abstract List getColumns( long tableId ); + + /** + * Get all columns of the specified database which fit to the specified filter patterns. + * getColumns(xid, databaseName, null, null, null) returns all columns of the database. + * + * @param schemaNamePattern Pattern for the schema name. null returns all. + * @param tableNamePattern Pattern for the table name. null returns all. + * @param columnNamePattern Pattern for the column name. null returns all. + * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. + */ + public abstract List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ); + + /** + * Returns the column with the specified id. + * + * @param columnId The id of the column + * @return A CatalogColumn + */ + public abstract LogicalColumn getColumn( long columnId ); + + /** + * Returns the column with the specified name in the specified table of the specified database and schema. + * + * @param tableId The id of the table + * @param columnName The name of the column + * @return A CatalogColumn + * @throws UnknownColumnException If there is no column with this name in the specified table of the database and schema. + */ + public abstract LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException; + + /** + * Returns the column with the specified name in the specified table of the specified database and schema. + * + * @param schemaName The name of the schema + * @param tableName The name of the table + * @param columnName The name of the column + * @return A CatalogColumn + */ + public abstract LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; + + /** + * Adds a column. + * + * @param name The name of the column + * @param tableId The id of the corresponding table + * @param position The ordinal position of the column (starting with 1) + * @param type The type of the column + * @param length The length of the field (if applicable, else null) + * @param scale The number of digits after the decimal point (if applicable, else null) + * @param nullable Weather the column can contain null values + * @param collation The collation of the field (if applicable, else null) + * @return The id of the inserted column + */ + public abstract long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ); + + /** + * Renames a column + * + * @param columnId The if of the column to rename + * @param name New name of the column + */ + public abstract void renameColumn( long columnId, String name ); + + /** + * Change the position of the column. + * + * @param columnId The id of the column for which to change the position + * @param position The new position of the column + */ + public abstract void setColumnPosition( long columnId, int position ); + + /** + * Change the data type of a column. + * + * @param columnId The id of the column + * @param type The new type of the column + */ + public abstract void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ) throws GenericCatalogException; + + /** + * Change nullability of the column (weather the column allows null values). + * + * @param columnId The id of the column + * @param nullable True if the column should allow null values, false if not. + */ + public abstract void setNullable( long columnId, boolean nullable ) throws GenericCatalogException; + + /** + * Set the collation of a column. + * If the column already has the specified collation set, this method is a NoOp. + * + * @param columnId The id of the column + * @param collation The collation to set + */ + public abstract void setCollation( long columnId, Collation collation ); + + /** + * Checks if there is a column with the specified name in the specified table. + * + * @param tableId The id of the table + * @param columnName The name to check for + * @return true if there is a column with this name, false if not. + */ + public abstract boolean checkIfExistsColumn( long tableId, String columnName ); + + /** + * Delete the specified column. This also deletes a default value in case there is one defined for this column. + * + * @param columnId The id of the column to delete + */ + public abstract void deleteColumn( long columnId ); + + /** + * Adds a default value for a column. If there already is a default values, it being replaced. + * + * @param columnId The id of the column + * @param type The type of the default value + * @param defaultValue True if the column should allow null values, false if not. + */ + public abstract void setDefaultValue( long columnId, PolyType type, String defaultValue ); + + /** + * Deletes an existing default value of a column. NoOp if there is no default value defined. + * + * @param columnId The id of the column + */ + public abstract void deleteDefaultValue( long columnId ); + + /** + * Returns a specified primary key + * + * @param key The id of the primary key + * @return The primary key + */ + public abstract CatalogPrimaryKey getPrimaryKey( long key ); + + /** + * Check whether a key is a primary key + * + * @param keyId The id of the key + * @return Whether the key is a primary key + */ + public abstract boolean isPrimaryKey( long keyId ); + + /** + * Check whether a key is a foreign key + * + * @param keyId The id of the key + * @return Whether the key is a foreign key + */ + public abstract boolean isForeignKey( long keyId ); + + /** + * Check whether a key is an index + * + * @param keyId The id of the key + * @return Whether the key is an index + */ + public abstract boolean isIndex( long keyId ); + + /** + * Check whether a key is a constraint + * + * @param keyId The id of the key + * @return Whether the key is a constraint + */ + public abstract boolean isConstraint( long keyId ); + + /** + * Adds a primary key + * + * @param tableId The id of the table + * @param columnIds The id of key which will be part of the primary keys + */ + public abstract void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException; + + /** + * Returns all (imported) foreign keys of a specified table + * + * @param tableId The id of the table + * @return List of foreign keys + */ + public abstract List getForeignKeys( long tableId ); + + /** + * Returns all foreign keys that reference the specified table (exported keys). + * + * @param tableId The id of the table + * @return List of foreign keys + */ + public abstract List getExportedKeys( long tableId ); + + /** + * Get all constraints of the specified table + * + * @param tableId The id of the table + * @return List of constraints + */ + public abstract List getConstraints( long tableId ); + + + /** + * Gets a collection of constraints for a given key. + * + * @param key The key for which the collection is returned + * @return The collection of constraints + */ + public abstract List getConstraints( CatalogKey key ); + + /** + * Returns the constraint with the specified name in the specified table. + * + * @param tableId The id of the table + * @param constraintName The name of the constraint + * @return The constraint + */ + public abstract CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException; + + /** + * Return the foreign key with the specified name from the specified table + * + * @param tableId The id of the table + * @param foreignKeyName The name of the foreign key + * @return The foreign key + */ + public abstract CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException; + + /** + * Adds a unique foreign key constraint. + * + * @param tableId The id of the table + * @param columnIds The id of the columns which are part of the foreign key + * @param referencesTableId The if of the referenced table + * @param referencesIds The id of columns forming the key referenced by this key + * @param constraintName The name of the constraint + * @param onUpdate The option for updates + * @param onDelete The option for deletes + */ + public abstract void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException; + + /** + * Adds a unique constraint. + * + * @param tableId The id of the table + * @param constraintName The name of the constraint + * @param columnIds A list of column ids + */ + public abstract void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException; + + /** + * Deletes the specified primary key (including the entry in the key table). If there is an index on this key, make sure to delete it first. + * + * @param tableId The id of the key to drop + */ + public abstract void deletePrimaryKey( long tableId ) throws GenericCatalogException; + + /** + * Delete the specified foreign key (does not delete the referenced key). + * + * @param foreignKeyId The id of the foreign key to delete + */ + public abstract void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException; + + /** + * Delete the specified constraint. + * For deleting foreign keys, use {@link #deleteForeignKey(long)}. + * + * @param constraintId The id of the constraint to delete + */ + public abstract void deleteConstraint( long constraintId ) throws GenericCatalogException; + + + /** + * Deletes all the dependencies of a view. This is used when deleting a view. + * + * @param catalogView view for which to delete its dependencies + */ + public abstract void deleteViewDependencies( CatalogView catalogView ); + + /** + * Updates the last time a materialized view has been refreshed. + * + * @param materializedViewId id of the materialized view + */ + public abstract void updateMaterializedViewRefreshTime( long materializedViewId ); + + + /** + * Returns all tables which are in need of special periodic treatment. + * + * @return List of tables which need to be periodically processed + */ + public abstract List getTablesForPeriodicProcessing(); + + + /** + * Flags the table for deletion. + * This method should be executed on a partitioned table before we run a DROP TABLE statement. + * + * @param tableId table to be flagged for deletion + * @param flag true if it should be flagged, false if flag should be removed + */ + public abstract void flagTableForDeletion( long tableId, boolean flag ); + + /** + * Is used to detect if a table is flagged for deletion. + * Effectively checks if a drop of this table is currently in progress. + * This is needed to ensure that there aren't any constraints when recursively removing a table and all placements and partitions. + * + * @param tableId table to be checked + * @return If table is flagged for deletion or not + */ + public abstract boolean isTableFlaggedForDeletion( long tableId ); + + /** + * Gets a collection of index for the given key. + * + * @param key The key for which the collection is returned + * @return The collection of indexes + */ + public abstract List getIndexes( CatalogKey key ); + + /** + * Gets a collection of foreign keys for a given {@link Catalog Key}. + * + * @param key The key for which the collection is returned + * @return The collection foreign keys + */ + public abstract List getForeignKeys( CatalogKey key ); + + /** + * Returns all indexes of a table + * + * @param tableId The id of the table + * @param onlyUnique true if only indexes for unique values are returned. false if all indexes are returned. + * @return List of indexes + */ + public abstract List getIndexes( long tableId, boolean onlyUnique ); + + /** + * Returns the index with the specified name in the specified table + * + * @param tableId The id of the table + * @param indexName The name of the index + * @return The Index + */ + public abstract CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException; + + /** + * Checks if there is an index with the specified name in the specified table. + * + * @param tableId The id of the table + * @param indexName The name to check for + * @return true if there is an index with this name, false if not. + */ + public abstract boolean checkIfExistsIndex( long tableId, String indexName ); + + /** + * Returns the index with the specified id + * + * @param indexId The id of the index + * @return The Index + */ + public abstract CatalogIndex getIndex( long indexId ); + + /** + * Returns list of all indexes + * + * @return List of indexes + */ + public abstract List getIndexes(); + + /** + * Adds an index over the specified columns + * + * @param tableId The id of the table + * @param columnIds A list of column ids + * @param unique Weather the index is unique + * @param method Name of the index method (e.g. btree_unique) + * @param methodDisplayName Display name of the index method (e.g. BTREE) + * @param location ID of the data store where the index is located (0 for Polypheny-DB itself) + * @param type The type of index (manual, automatic) + * @param indexName The name of the index + * @return The id of the created index + */ + public abstract long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException; + + /** + * Set physical index name. + * + * @param indexId The id of the index + * @param physicalName The physical name to be set + */ + public abstract void setIndexPhysicalName( long indexId, String physicalName ); + + /** + * Delete the specified index + * + * @param indexId The id of the index to drop + */ + public abstract void deleteIndex( long indexId ); + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogUser.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java similarity index 58% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogUser.java rename to core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index 30902a6024..bc5323a194 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogUser.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -14,25 +14,8 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entities; +package org.polypheny.db.catalog.catalogs; -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; - -public class CatalogUser { - - @Serialize - public final String name; - - @Serialize - public final long id; - - - public CatalogUser( - @Deserialize("id") long id, - @Deserialize("name") String name ) { - this.id = id; - this.name = name; - } +public interface PhysicalCatalog { } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java index ff4ac29667..588dbbacb7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java @@ -17,48 +17,41 @@ package org.polypheny.db.catalog.entity; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.io.Serializable; import java.util.List; import java.util.Map; import lombok.EqualsAndHashCode; import lombok.NonNull; +import lombok.Value; +import lombok.With; import org.polypheny.db.adapter.Adapter.AdapterProperties; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode +@Value +@With public class CatalogAdapter implements CatalogObject { private static final long serialVersionUID = -6140489767408917639L; - public final int id; - public final String uniqueName; - public final String adapterName; - public final AdapterType type; - public final ImmutableMap settings; - private List supportedNamespaces; + public long id; + public String uniqueName; + public String adapterName; + public AdapterType type; + public ImmutableMap settings; + public ImmutableList supportedNamespaces; - private String adapterTypeName; - - - public String getAdapterTypeName() { - if ( adapterTypeName == null ) { - // General settings are provided by the annotations of the adapter class - AdapterProperties annotations = Adapter.fromString( adapterName, type ).getClazz().getAnnotation( AdapterProperties.class ); - this.adapterTypeName = annotations.name(); - } - return adapterTypeName; - - - } + public String adapterTypeName; public enum AdapterType {STORE, SOURCE} public CatalogAdapter( - final int id, + final long id, @NonNull final String uniqueName, @NonNull final String adapterName, @NonNull final AdapterType adapterType, @@ -68,16 +61,22 @@ public CatalogAdapter( this.adapterName = adapterName; this.type = adapterType; this.settings = ImmutableMap.copyOf( settings ); + this.supportedNamespaces = ImmutableList.copyOf( createSupportedNamespaces() ); + this.adapterTypeName = getAdapterName(); + } + + + private String getAdapterTypeName() { + // General settings are provided by the annotations of the adapter class + AdapterProperties annotations = Adapter.fromString( adapterName, type ).getClazz().getAnnotation( AdapterProperties.class ); + return annotations.name(); } - public List getSupportedNamespaces() { - if ( supportedNamespaces == null ) { - // General settings are provided by the annotations of the adapter class - AdapterProperties annotations = Adapter.fromString( adapterName, type ).getClazz().getAnnotation( AdapterProperties.class ); - this.supportedNamespaces = List.of( annotations.supportedNamespaceTypes() ); - } - return supportedNamespaces; + private List createSupportedNamespaces() { + // General settings are provided by the annotations of the adapter class + AdapterProperties annotations = Adapter.fromString( adapterName, type ).getClazz().getAnnotation( AdapterProperties.class ); + return List.of( annotations.supportedNamespaceTypes() ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java index fc0c35b7eb..2e13345823 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java @@ -66,7 +66,7 @@ public CatalogForeignKey( @SneakyThrows public String getReferencedKeySchemaName() { - return Catalog.getInstance().getSchema( referencedKeySchemaId ).name; + return Catalog.getInstance().getNamespace( referencedKeySchemaId ).name; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java index 1db08da52e..e421a72ed1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java @@ -53,7 +53,7 @@ public CatalogKey( @SneakyThrows public String getSchemaName() { - return Catalog.getInstance().getSchema( schemaId ).name; + return Catalog.getInstance().getNamespace( schemaId ).name; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java index dbe504f3ba..fc82a2a760 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java @@ -75,7 +75,7 @@ public String getTableName() { @SneakyThrows public String getSchemaName() { - return Catalog.getInstance().getSchema( schemaId ).name; + return Catalog.getInstance().getNamespace( schemaId ).name; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java index 7a4bfc62e9..5227502381 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java @@ -22,20 +22,24 @@ import java.util.Map; import lombok.EqualsAndHashCode; import lombok.NonNull; +import lombok.Value; +import lombok.With; @EqualsAndHashCode +@Value +@With public class CatalogQueryInterface implements CatalogObject { private static final long serialVersionUID = 7212289724539530050L; - public final int id; - public final String name; - public final String clazz; - public final ImmutableMap settings; + public long id; + public String name; + public String clazz; + public ImmutableMap settings; - public CatalogQueryInterface( final int id, @NonNull final String uniqueName, @NonNull final String clazz, @NonNull final Map settings ) { + public CatalogQueryInterface( final long id, @NonNull final String uniqueName, @NonNull final String clazz, @NonNull final Map settings ) { this.id = id; this.name = uniqueName; this.clazz = clazz; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java similarity index 72% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java rename to core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java index a351893169..724a2f0eb2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogSchema.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java @@ -22,38 +22,36 @@ import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; +import lombok.Value; +import lombok.With; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = false) -public final class CatalogSchema extends CatalogNamespace implements CatalogObject, Comparable { +@With +@Value +public class LogicalNamespace extends CatalogNamespace implements CatalogObject, Comparable { private static final long serialVersionUID = 3090632164988970558L; - public final long id; + public long id; @Getter - public final String name; - public final int ownerId; - public final String ownerName; + public String name; @Getter @EqualsAndHashCode.Exclude - public final NamespaceType namespaceType; + public NamespaceType namespaceType; - public final boolean caseSensitive; + public boolean caseSensitive; - public CatalogSchema( + public LogicalNamespace( final long id, @NonNull final String name, - final int ownerId, - @NonNull final String ownerName, @NonNull final NamespaceType namespaceType, boolean caseSensitive ) { super( id, name, namespaceType ); this.id = id; this.name = name; - this.ownerId = ownerId; - this.ownerName = ownerName; this.namespaceType = namespaceType; this.caseSensitive = caseSensitive; } @@ -62,12 +60,12 @@ public CatalogSchema( // Used for creating ResultSets @Override public Serializable[] getParameterArray() { - return new Serializable[]{ name, ownerName, CatalogObject.getEnumNameOrNull( namespaceType ) }; + return new Serializable[]{ name, CatalogObject.getEnumNameOrNull( namespaceType ) }; } @Override - public int compareTo( CatalogSchema o ) { + public int compareTo( LogicalNamespace o ) { if ( o != null ) { return (int) (this.id - o.id); } @@ -75,13 +73,11 @@ public int compareTo( CatalogSchema o ) { return -1; } - @RequiredArgsConstructor public static class PrimitiveCatalogSchema { public final String tableSchem; public final String tableCatalog; - public final String owner; public final String schemaType; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index fcc096bdac..e63c67efb9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -29,6 +29,7 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -39,10 +40,12 @@ public class AllocationTable extends CatalogEntity implements Allocation { public List placements; public long adapterId; public long logicalId; + public LogicalTable logicalTable; - public AllocationTable( long id, long logicalId, String name, long adapterId, List placements ) { + public AllocationTable( LogicalTable logicalTable, long id, long logicalId, String name, long adapterId, List placements ) { super( id, name, EntityType.ENTITY, NamespaceType.RELATIONAL ); + this.logicalTable = logicalTable; this.logicalId = logicalId; this.adapterId = adapterId; this.placements = placements; @@ -80,4 +83,5 @@ public String getNamespaceName() { return null; } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index bb66fa6b48..e05501244a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -91,7 +91,7 @@ public LogicalCollection removePlacement( int adapterId ) { @SneakyThrows public String getNamespaceName() { - return Catalog.getInstance().getSchema( namespaceId ).name; + return Catalog.getInstance().getNamespace( namespaceId ).name; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java index f34dc13c89..c8154b832e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java @@ -145,13 +145,13 @@ public AlgDataType getAlgDataType( final AlgDataTypeFactory typeFactory ) { @SneakyThrows public String getSchemaName() { - return Catalog.getInstance().getSchema( schemaId ).name; + return Catalog.getInstance().getNamespace( schemaId ).name; } @SneakyThrows public String getTableName() { - return Catalog.getInstance().getTable( tableId ).name; + return Catalog.getInstance().getSnapshot( 0 ).getLogicalTable( tableId ).name; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 6c7d0c9c50..7b8a6d2971 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -126,13 +126,13 @@ public LogicalTable( @SneakyThrows public String getNamespaceName() { - return Catalog.getInstance().getSchema( namespaceId ).name; + return Catalog.getInstance().getNamespace( namespaceId ).name; } @SneakyThrows public NamespaceType getNamespaceType() { - return Catalog.getInstance().getSchema( namespaceId ).namespaceType; + return Catalog.getInstance().getNamespace( namespaceId ).namespaceType; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 397982fb8f..cc3dc1f0ed 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -49,9 +49,12 @@ public class PhysicalTable extends CatalogEntity implements Physical { public ImmutableList columnNames; public String namespaceName; + public AllocationTable allocation; - public PhysicalTable( long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { + + public PhysicalTable( AllocationTable allocation, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { super( id, name, type, namespaceType ); + this.allocation = allocation; this.namespaceName = namespaceName; this.placements = ImmutableList.copyOf( placements ); this.columnIds = ImmutableList.copyOf( placements.stream().map( p -> p.columnId ).collect( Collectors.toList() ) ); @@ -60,7 +63,7 @@ public PhysicalTable( long id, String name, String namespaceName, EntityType typ public PhysicalTable( AllocationTable table, String name, String namespaceName, List columnNames ) { - this( table.id, name, namespaceName, table.entityType, table.namespaceType, table.placements, columnNames ); + this( table, table.id, name, namespaceName, table.entityType, table.namespaceType, table.placements, columnNames ); } @@ -75,7 +78,7 @@ public AlgProtoDataType buildProto() { final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); for ( CatalogColumnPlacement placement : placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( placement.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot( 0 ).getLogicalColumn( placement.columnId ); AlgDataType sqlType = logicalColumn.getAlgDataType( typeFactory ); fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java index 8ec1db2f1b..3fad0ab6c7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/QueryableEntity.java @@ -19,7 +19,7 @@ import java.lang.reflect.Type; import org.apache.calcite.linq4j.Queryable; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; public interface QueryableEntity { diff --git a/core/src/main/java/org/polypheny/db/catalog/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java similarity index 95% rename from core/src/main/java/org/polypheny/db/catalog/Snapshot.java rename to core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index ac29ea85b5..38b64cf48f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog; +package org.polypheny.db.catalog.snapshot; import java.util.List; import org.apache.calcite.linq4j.tree.Expression; @@ -22,12 +22,14 @@ import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.operators.OperatorTable; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; @@ -86,6 +88,8 @@ default Expression getSnapshotExpression( long id ) { List getLogicalTables( long namespaceId, Pattern name ); + LogicalColumn getLogicalColumn( long id ); + LogicalCollection getLogicalCollection( long id ); LogicalCollection getLogicalCollection( long namespaceId, String name ); diff --git a/core/src/main/java/org/polypheny/db/languages/LanguageManager.java b/core/src/main/java/org/polypheny/db/languages/LanguageManager.java index e2902f3472..bb5daf3af0 100644 --- a/core/src/main/java/org/polypheny/db/languages/LanguageManager.java +++ b/core/src/main/java/org/polypheny/db/languages/LanguageManager.java @@ -23,11 +23,10 @@ import java.util.function.BiFunction; import java.util.function.Supplier; import lombok.Getter; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.prepare.Context; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.processing.Processor; public class LanguageManager { diff --git a/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java b/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java index 774ef04f88..5d1611d936 100644 --- a/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java +++ b/core/src/main/java/org/polypheny/db/languages/QueryLanguage.java @@ -26,11 +26,10 @@ import java.util.function.BiFunction; import java.util.function.Supplier; import lombok.Getter; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.prepare.Context; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.processing.Processor; public class QueryLanguage { diff --git a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java index 4938c1515e..082b498a3e 100644 --- a/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/nodes/validate/ValidatorCatalogReader.java @@ -19,10 +19,9 @@ import java.util.List; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.nodes.Identifier; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.Moniker; diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java index 0f38e8d662..1266bebaf7 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java @@ -34,7 +34,6 @@ package org.polypheny.db.plan; -import com.fasterxml.jackson.databind.type.TypeFactory; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -48,11 +47,10 @@ import org.polypheny.db.algebra.metadata.MetadataFactory; import org.polypheny.db.algebra.metadata.MetadataFactoryImpl; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.PolyphenyDbSchema; /** diff --git a/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java b/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java index 4733aa8471..00d7fb3dc3 100644 --- a/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java +++ b/core/src/main/java/org/polypheny/db/plan/VisitorDataContext.java @@ -44,7 +44,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.nodes.Function; import org.polypheny.db.nodes.Function.FunctionType; import org.polypheny.db.nodes.Operator; diff --git a/core/src/main/java/org/polypheny/db/prepare/Context.java b/core/src/main/java/org/polypheny/db/prepare/Context.java index 25929df707..5fbb51a40d 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Context.java +++ b/core/src/main/java/org/polypheny/db/prepare/Context.java @@ -20,9 +20,8 @@ import java.util.List; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.PolyphenyDbConnectionConfig; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; diff --git a/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java b/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java index a76d45b745..e09bfcf0d9 100644 --- a/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java @@ -23,10 +23,9 @@ import lombok.Getter; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.config.PolyphenyDbConnectionConfigImpl; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.transaction.Statement; diff --git a/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java b/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java index ec4b42896d..4bf50747d8 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PlannerImpl.java @@ -40,14 +40,13 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.metadata.CachingAlgMetadataProvider; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.nodes.Node; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgTraitDef; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexExecutor; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.tools.AlgConversionException; import org.polypheny.db.tools.FrameworkConfig; import org.polypheny.db.tools.Frameworks; diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index 8a17c738b5..982cdbe9db 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -43,10 +43,10 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.nodes.Identifier; import org.polypheny.db.nodes.Operator; import org.polypheny.db.schema.Wrapper; diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java index 34ce4e5ca6..5716a9e5eb 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbPrepareImpl.java @@ -102,7 +102,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.interpreter.BindableConvention; import org.polypheny.db.interpreter.Bindables; diff --git a/core/src/main/java/org/polypheny/db/prepare/Prepare.java b/core/src/main/java/org/polypheny/db/prepare/Prepare.java index 71e3d7af4e..298739f217 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Prepare.java +++ b/core/src/main/java/org/polypheny/db/prepare/Prepare.java @@ -54,11 +54,11 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.refactor.TranslatableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.validate.Validator; diff --git a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java index b075112221..ca0ed3da09 100644 --- a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java @@ -37,9 +37,10 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogEntityPlacement; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.util.Pair; import org.polypheny.db.util.Triple; @@ -82,7 +83,7 @@ public AbstractPolyphenyDbSchema( * Creates a root schema. */ public static Snapshot createSnapshot() { - return PolySchemaBuilder.getInstance().getCurrent(); + return Catalog.getInstance().getSnapshot( 0 ); } diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java index 1217a954db..e1bc6f0026 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java @@ -23,8 +23,8 @@ import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.schema.Namespace.Schema; diff --git a/core/src/main/java/org/polypheny/db/schema/Namespace.java b/core/src/main/java/org/polypheny/db/schema/Namespace.java index a540c2d6ba..9bad3993f8 100644 --- a/core/src/main/java/org/polypheny/db/schema/Namespace.java +++ b/core/src/main/java/org/polypheny/db/schema/Namespace.java @@ -38,8 +38,8 @@ import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; /** diff --git a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java b/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java deleted file mode 100644 index 5616b13537..0000000000 --- a/core/src/main/java/org/polypheny/db/schema/PolySchemaBuilder.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.schema; - - -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.AdapterManager; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; -import org.polypheny.db.algebra.type.AlgDataTypeImpl; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogEntityPlacement; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.util.Pair; -import org.polypheny.db.util.Triple; - - -public class PolySchemaBuilder implements PropertyChangeListener { - - private final static PolySchemaBuilder INSTANCE = new PolySchemaBuilder(); - - private Snapshot current; - private boolean isOutdated = true; - - - private PolySchemaBuilder() { - Catalog.getInstance().addObserver( this ); - } - - - public static PolySchemaBuilder getInstance() { - return INSTANCE; - } - - - public Snapshot getCurrent() { - if ( !RuntimeConfig.SCHEMA_CACHING.getBoolean() ) { - return buildSchema(); - } - if ( current == null || isOutdated ) { - current = buildSchema(); - } - return current; - } - - - private synchronized Snapshot buildSchema() { - - Catalog catalog = Catalog.getInstance(); - - // Build logical namespaces - Map, CatalogEntity> logicalRelational = buildRelationalLogical( catalog ); - - Map, CatalogEntity> logicalDocument = buildDocumentLogical( catalog ); - - Map, CatalogEntity> logicalGraph = buildGraphLogical( catalog ); - - // Build mapping structures - - // Build physical namespaces - List adapters = Catalog.getInstance().getAdapters(); - - Map, CatalogEntityPlacement> physicalRelational = buildPhysicalTables( catalog, adapters ); - - Map, CatalogEntityPlacement> physicalDocument = buildPhysicalDocuments( catalog, adapters ); - - Map, CatalogEntityPlacement> physicalGraph = buildPhysicalGraphs( catalog ); - - isOutdated = false; - return null; - //return new SimplePolyphenyDbSchema( logicalRelational, logicalDocument, logicalGraph, physicalRelational, physicalDocument, physicalGraph ); - } - - - private Map, CatalogEntity> buildGraphLogical( Catalog catalog ) { - return catalog.getGraphs( null ).stream().collect( Collectors.toMap( e -> Pair.of( e.id, e.id ), e -> e ) ); - } - - - private Map, CatalogEntity> buildRelationalLogical( Catalog catalog ) { - Map, CatalogEntity> entities = new HashMap<>(); - for ( CatalogSchema catalogSchema : catalog.getSchemas( null ) ) { - if ( catalogSchema.namespaceType != NamespaceType.RELATIONAL ) { - continue; - } - - for ( LogicalTable catalogTable : catalog.getTables( catalogSchema.id, null ) ) { - entities.put( Pair.of( catalogSchema.id, catalogTable.id ), catalogTable ); - } - } - return entities; - } - - - private Map, CatalogEntity> buildDocumentLogical( Catalog catalog ) { - Map, CatalogEntity> entities = new HashMap<>(); - for ( CatalogSchema catalogSchema : catalog.getSchemas( null ) ) { - if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT ) { - continue; - } - - for ( LogicalCollection catalogEntity : catalog.getCollections( catalogSchema.id, null ) ) { - entities.put( Pair.of( catalogSchema.id, catalogEntity.id ), catalogEntity ); - } - } - - return entities; - } - - - private Map, CatalogEntityPlacement> buildPhysicalGraphs( Catalog catalog ) { - Map, CatalogEntityPlacement> placements = new HashMap<>(); - // Build adapter schema (physical schema) GRAPH - for ( LogicalGraph graph : catalog.getGraphs( null ) ) { - for ( int adapterId : graph.placements ) { - - CatalogGraphPlacement placement = catalog.getGraphPlacement( graph.id, adapterId ); - Adapter adapter = AdapterManager.getInstance().getAdapter( adapterId ); - - if ( !adapter.getSupportedNamespaceTypes().contains( NamespaceType.GRAPH ) ) { - continue; - } - - //adapter.createGraphNamespace( rootSchema, schemaName, graph.id ); - - placements.put( new Triple<>( graph.id, (long) adapter.getAdapterId(), graph.id ), placement ); - } - } - return placements; - } - - - private Map, CatalogEntityPlacement> buildPhysicalDocuments( Catalog catalog, List adapters ) { - Map, CatalogEntityPlacement> placements = new HashMap<>(); - // Build adapter schema (physical schema) DOCUMENT - for ( CatalogSchema catalogSchema : catalog.getSchemas( null ).stream().filter( s -> s.namespaceType == NamespaceType.DOCUMENT ).collect( Collectors.toList() ) ) { - for ( CatalogAdapter catalogAdapter : adapters ) { - - Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); - - if ( !adapter.getSupportedNamespaceTypes().contains( NamespaceType.DOCUMENT ) ) { - continue; - } - - // Get list of documents on this adapter - Map> documentIdsPerSchema = new HashMap<>(); - for ( CatalogCollectionPlacement placement : Catalog.getInstance().getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { - documentIdsPerSchema.putIfAbsent( placement.physicalNamespaceName, new HashSet<>() ); - documentIdsPerSchema.get( placement.physicalNamespaceName ).add( placement.collectionId ); - } - - for ( String physicalSchemaName : documentIdsPerSchema.keySet() ) { - Set collectionIds = documentIdsPerSchema.get( physicalSchemaName ); - - //adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); - - for ( long collectionId : collectionIds ) { - LogicalCollection catalogCollection = catalog.getCollection( collectionId ); - - for ( CatalogCollectionPlacement partitionPlacement : catalogCollection.placements.stream().map( p -> catalog.getCollectionPlacement( collectionId, adapter.getAdapterId() ) ).collect( Collectors.toList() ) ) { - if ( catalogSchema.namespaceType != NamespaceType.DOCUMENT && catalogAdapter.getSupportedNamespaces().contains( catalogSchema.namespaceType ) ) { - continue; - } - - //Entity entity = adapter.createDocumentSchema( catalogCollection, partitionPlacement ); - placements.put( new Triple<>( catalogSchema.id, (long) catalogAdapter.id, catalogCollection.id ), partitionPlacement ); - } - } - } - } - } - return placements; - } - - - private Map, CatalogEntityPlacement> buildPhysicalTables( Catalog catalog, List adapters ) { - Map, CatalogEntityPlacement> placements = new HashMap<>(); - // Build adapter schema (physical schema) RELATIONAL - for ( CatalogSchema catalogSchema : new ArrayList<>( catalog.getSchemas( null ) ) ) { - for ( CatalogAdapter catalogAdapter : adapters ) { - // Get list of tables on this adapter - Map> tableIdsPerSchema = new HashMap<>(); - for ( CatalogColumnPlacement placement : Catalog.getInstance().getColumnPlacementsOnAdapterAndSchema( catalogAdapter.id, catalogSchema.id ) ) { - tableIdsPerSchema.putIfAbsent( placement.namespaceId, new HashSet<>() ); - tableIdsPerSchema.get( placement.namespaceId ).add( placement.tableId ); - } - - for ( Long namespaceId : tableIdsPerSchema.keySet() ) { - Set tableIds = tableIdsPerSchema.get( namespaceId ); - //adapter.createNewSchema( rootSchema, schemaName, catalogSchema.id ); - for ( long tableId : tableIds ) { - List partitionPlacements = catalog.getPartitionPlacementsByTableOnAdapter( catalogAdapter.id, tableId ); - - for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { - if ( catalogSchema.namespaceType != NamespaceType.RELATIONAL && catalogAdapter.getSupportedNamespaces().contains( catalogSchema.namespaceType ) ) { - continue; - } - - /* - Entity entity = adapter.createAdapterTable( - catalogTable, - Catalog.getInstance().getColumnPlacementsOnAdapterSortedByPhysicalPosition( adapter.getAdapterId(), catalogTable.id ), - partitionPlacement ); - - */ - placements.put( new Triple<>( catalogSchema.id, (long) catalogAdapter.id, partitionPlacement.tableId ), partitionPlacement ); - } - } - } - } - } - - return placements; - } - - - private void buildView( Map tableMap, PolyphenyDbSchema s, LogicalTable catalogTable, List columnNames, Builder fieldInfo, List columnIds ) { - LogicalRelView view = new LogicalRelView( - catalogTable.id, - catalogTable.getNamespaceName(), - catalogTable.name, - columnIds, - columnNames, - AlgDataTypeImpl.proto( fieldInfo.build() ) ); - //s.add( catalogTable.name, view ); - tableMap.put( catalogTable.name, view ); - } - - - private void buildEntity( Catalog catalog, CatalogSchema catalogSchema, Map tableMap, SchemaPlus s, LogicalTable catalogTable, List columnNames, AlgDataType rowType, List columnIds ) { - LogicalEntity table; - if ( catalogSchema.namespaceType == NamespaceType.RELATIONAL ) { - table = new LogicalEntity( - catalogTable.id, - catalogTable.getNamespaceName(), - catalogTable.name, - columnIds, - columnNames, - AlgDataTypeImpl.proto( rowType ), - catalogSchema.namespaceType ); - if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { - table.getConstraintIds() - .addAll( catalog.getForeignKeys( catalogTable.id ).stream() - .filter( f -> f.enforcementTime == EnforcementTime.ON_COMMIT ) - .map( f -> f.referencedKeyTableId ) - .collect( Collectors.toList() ) ); - table.getConstraintIds() - .addAll( catalog.getExportedKeys( catalogTable.id ).stream() - .filter( f -> f.enforcementTime == EnforcementTime.ON_COMMIT ) - .map( f -> f.referencedKeyTableId ) - .collect( Collectors.toList() ) ); - } - } else if ( catalogSchema.namespaceType == NamespaceType.DOCUMENT ) { - table = new org.polypheny.db.schema.LogicalCollection( - catalogTable.id, - catalogTable.getNamespaceName(), - catalogTable.name, - AlgDataTypeImpl.proto( rowType ) - ); - } else { - throw new RuntimeException( "Model is not supported" ); - } - - //s.add( catalogTable.name, table ); - tableMap.put( catalogTable.name, table ); - } - - - public static String buildAdapterSchemaName( String storeName, String logicalSchema, String physicalSchema ) { - return storeName + "_" + logicalSchema + "_" + physicalSchema; - } - - - // Listens on changes to the catalog - @Override - public void propertyChange( PropertyChangeEvent evt ) { - // Catalog changed, flag as outdated - isOutdated = true; - } - - -} diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 07cfd580ca..0dc79999d9 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -65,14 +65,14 @@ default LogicalCollection getCollection( List names ) { CatalogNamespace namespace; switch ( names.size() ) { case 3: - namespace = Catalog.getInstance().getSchemas( Pattern.of( names.get( 1 ) ) ).get( 0 ); + namespace = Catalog.getInstance().getNamespaces( Pattern.of( names.get( 1 ) ) ).get( 0 ); return Catalog.getInstance().getCollections( namespace.id, Pattern.of( names.get( 2 ) ) ).get( 0 ); case 2: - namespace = Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ) ).get( 0 ); + namespace = Catalog.getInstance().getNamespaces( Catalog.defaultDatabaseId, Pattern.of( names.get( 0 ) ) ).get( 0 ); return Catalog.getInstance().getCollections( namespace.id, Pattern.of( names.get( 1 ) ) ).get( 0 ); case 1: // TODO add methods - namespace = Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, null ).get( 0 ); + namespace = Catalog.getInstance().getNamespaces( Catalog.defaultDatabaseId, null ).get( 0 ); return Catalog.getInstance().getCollections( namespace.id, Pattern.of( names.get( 0 ) ) ).get( 0 ); default: return null; @@ -111,7 +111,7 @@ default PhysicalGraph getPhysicalGraph( long id ){ } default List getNamespaceNames() { - return Catalog.getInstance().getSchemas( Catalog.defaultDatabaseId, null ).stream().map( t -> t.name ).collect( Collectors.toList() ); + return Catalog.getInstance().getNamespaces( Catalog.defaultDatabaseId, null ).stream().map( t -> t.name ).collect( Collectors.toList() ); } default boolean isPartitioned( long id ){ diff --git a/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java index c07577a0d2..6732cf2b01 100644 --- a/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/QueryableEntity.java @@ -38,7 +38,7 @@ import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; /** diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index cd9019a45b..643a1a421f 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -55,9 +55,9 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.PolyphenyDbConnectionConfig; import org.polypheny.db.config.PolyphenyDbConnectionConfigImpl; import org.polypheny.db.config.PolyphenyDbConnectionProperty; diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java index 7093eb682c..9ee24edfa3 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java @@ -43,8 +43,8 @@ import lombok.Getter; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaVersion; diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java index ca85b5f11d..263950923c 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractTableQueryable.java @@ -39,14 +39,12 @@ import org.apache.calcite.linq4j.AbstractQueryable; import org.apache.calcite.linq4j.Linq4j; import org.apache.calcite.linq4j.QueryProvider; -import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.AbstractQueryableEntity; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; /** diff --git a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java index 47a8fe2969..59e72295c4 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java @@ -38,8 +38,8 @@ import java.util.Set; import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.SchemaVersion; diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index d49457b777..d6b523371e 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -103,11 +103,11 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilderFactory.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilderFactory.java index 44a51c9ca5..5985db1fe0 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilderFactory.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilderFactory.java @@ -35,10 +35,9 @@ import org.polypheny.db.algebra.core.AlgFactories; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.schema.PolyphenyDbSchema; /** diff --git a/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java b/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java index 2b3077ee1a..4371112bf3 100644 --- a/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java +++ b/core/src/main/java/org/polypheny/db/tools/FrameworkConfig.java @@ -37,7 +37,7 @@ import com.google.common.collect.ImmutableList; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.plan.AlgOptCostFactory; @@ -45,7 +45,6 @@ import org.polypheny.db.plan.AlgTraitDef; import org.polypheny.db.plan.Context; import org.polypheny.db.rex.RexExecutor; -import org.polypheny.db.schema.PolyphenyDbSchema; /** diff --git a/core/src/main/java/org/polypheny/db/tools/Frameworks.java b/core/src/main/java/org/polypheny/db/tools/Frameworks.java index d9599dfb66..e20445ca03 100644 --- a/core/src/main/java/org/polypheny/db/tools/Frameworks.java +++ b/core/src/main/java/org/polypheny/db/tools/Frameworks.java @@ -45,7 +45,7 @@ import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.PolyphenyDbConnectionProperty; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.Parser.ParserConfig; diff --git a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java index 5fb666dca2..58a6b5594f 100644 --- a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java @@ -26,8 +26,8 @@ import org.bson.BsonValue; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.Context; import org.polypheny.db.plan.Contexts; diff --git a/core/src/main/java/org/polypheny/db/transaction/Transaction.java b/core/src/main/java/org/polypheny/db/transaction/Transaction.java index ab811f7d0d..65b51cb8a7 100644 --- a/core/src/main/java/org/polypheny/db/transaction/Transaction.java +++ b/core/src/main/java/org/polypheny/db/transaction/Transaction.java @@ -22,9 +22,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.information.InformationManager; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.processing.DataMigrator; @@ -63,7 +63,7 @@ public interface Transaction { AtomicBoolean getCancelFlag(); - CatalogSchema getDefaultSchema(); + LogicalNamespace getDefaultSchema(); void addChangedTable( String qualifiedTableName ); diff --git a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java index 74fc7e8345..48a5601e1a 100644 --- a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java +++ b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java @@ -18,8 +18,8 @@ import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -29,9 +29,9 @@ public interface TransactionManager { - Transaction startTransaction( CatalogUser user, CatalogSchema defaultSchema, CatalogDatabase database, boolean analyze, String origin ); + Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin ); - Transaction startTransaction( CatalogUser user, CatalogSchema defaultSchema, CatalogDatabase database, boolean analyze, String origin, MultimediaFlavor flavor ); + Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin, MultimediaFlavor flavor ); Transaction startTransaction( long userId, long databaseId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownDatabaseException, UnknownSchemaException; diff --git a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java index 5ff7c7d9ae..c57d0b9892 100644 --- a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java +++ b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java @@ -34,7 +34,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.type.PolyTypeUtil; diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index db9726b028..1de0b08d96 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -20,6 +20,7 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; +import lombok.NonNull; import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.AlgCollation; @@ -43,9 +44,9 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -234,59 +235,53 @@ private CatalogDatabase getDatabase( long databaseId ) { @Override - public List getSchemas( Pattern schemaNamePattern ) { + public @NonNull List getNamespaces( Pattern name ) { throw new NotImplementedException(); } - private List getSchemas( long databaseId, Pattern schemaNamePattern ) { + private List getSchemas( long databaseId, Pattern schemaNamePattern ) { throw new NotImplementedException(); } @Override - public CatalogSchema getSchema( long schemaId ) { + public LogicalNamespace getNamespace( long id ) { throw new NotImplementedException(); } @Override - public CatalogSchema getSchema( String schemaName ) throws UnknownSchemaException { + public LogicalNamespace getNamespace( String name ) throws UnknownSchemaException { throw new NotImplementedException(); } - private CatalogSchema getSchema( long databaseId, String schemaName ) throws UnknownSchemaException { + private LogicalNamespace getNamespace( long databaseId, String schemaName ) throws UnknownSchemaException { throw new NotImplementedException(); } @Override - public long addNamespace( String name, int ownerId, NamespaceType namespaceType ) { + public long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ) { throw new NotImplementedException(); } @Override - public boolean checkIfExistsSchema( String schemaName ) { + public boolean checkIfExistsNamespace( String name ) { throw new NotImplementedException(); } @Override - public void renameSchema( long schemaId, String name ) { + public void renameNamespace( long schemaId, String name ) { throw new NotImplementedException(); } @Override - public void setSchemaOwner( long schemaId, long ownerId ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteSchema( long schemaId ) { + public void deleteNamespace( long id ) { throw new NotImplementedException(); } @@ -435,12 +430,6 @@ public List getColumnPlacementsOnAdapterPerTable( int ad } - @Override - public List getColumnPlacementsOnAdapterSortedByPhysicalPosition( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - @Override public List getColumnPlacementsOnAdapter( int adapterId ) { throw new NotImplementedException(); @@ -802,13 +791,13 @@ public void deleteConstraint( long constraintId ) throws GenericCatalogException @Override - public CatalogUser getUser( String userName ) throws UnknownUserException { + public CatalogUser getUser( String name ) throws UnknownUserException { throw new NotImplementedException(); } @Override - public CatalogUser getUser( int userId ) { + public CatalogUser getUser( long id ) { throw new NotImplementedException(); } @@ -826,31 +815,31 @@ public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterExcep @Override - public CatalogAdapter getAdapter( int adapterId ) { + public CatalogAdapter getAdapter( long id ) { throw new NotImplementedException(); } @Override - public boolean checkIfExistsAdapter( int adapterId ) { + public boolean checkIfExistsAdapter( long id ) { throw new NotImplementedException(); } @Override - public int addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { + public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { throw new NotImplementedException(); } @Override - public void updateAdapterSettings( int adapterId, Map newSettings ) { + public void updateAdapterSettings( long adapterId, Map newSettings ) { throw new NotImplementedException(); } @Override - public void deleteAdapter( int adapterId ) { + public void deleteAdapter( long id ) { throw new NotImplementedException(); } @@ -868,19 +857,19 @@ public CatalogQueryInterface getQueryInterface( String uniqueName ) throws Unkno @Override - public CatalogQueryInterface getQueryInterface( int ifaceId ) { + public CatalogQueryInterface getQueryInterface( long id ) { throw new NotImplementedException(); } @Override - public int addQueryInterface( String uniqueName, String clazz, Map settings ) { + public long addQueryInterface( String uniqueName, String clazz, Map settings ) { throw new NotImplementedException(); } @Override - public void deleteQueryInterface( int ifaceId ) { + public void deleteQueryInterface( long id ) { throw new NotImplementedException(); } @@ -1086,20 +1075,6 @@ public void addPartitionPlacement( long namespaceId, int adapterId, long tableId } - /** - * Change physical names of a partition placement. - * - * @param adapterId The id of the adapter - * @param partitionId The id of the partition - * @param physicalSchemaName The physical schema name - * @param physicalTableName The physical table name - */ - @Override - public void updatePartitionPlacementPhysicalNames( int adapterId, long partitionId, String physicalSchemaName, String physicalTableName ) { - throw new NotImplementedException(); - } - - /** * Deletes a placement for a partition. * @@ -1311,12 +1286,6 @@ public ImmutableMap> getPartitionPlacementsByAdapte } - @Override - public ImmutableMap> getPartitionGroupsByAdapter( long tableId ) { - return null; - } - - @Override public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { throw new NotImplementedException(); @@ -1347,24 +1316,12 @@ public long getPartitionGroupByPartition( long partitionId ) { } - @Override - public void updateGraphPlacementPhysicalNames( long id, int adapterId, String physicalGraphName ) { - throw new NotImplementedException(); - } - - @Override public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { throw new NotImplementedException(); } - @Override - public void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { - throw new NotImplementedException(); - } - - @Override public void restoreInterfacesIfNecessary() { throw new NotImplementedException(); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index 37d5b86422..2e1a9a7ac8 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -40,13 +40,11 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.AlgDistributions; import org.polypheny.db.algebra.AlgFieldCollation; -import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgReferentialConstraint; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.constant.Monotonicity; @@ -54,16 +52,13 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordTypeImpl; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; -import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; @@ -147,7 +142,7 @@ protected void registerTablesWithRollUp( MockSchema schema, Fixture f ) { protected void registerType( final List names, final AlgProtoDataType algProtoDataType ) { assert names.get( 0 ).equals( DEFAULT_CATALOG ); final List schemaPath = Util.skipLast( names ); - //final PolyphenyDbSchema schema = ValidatorUtil.getSchema( snapshot, schemaPath, NameMatchers.withCaseSensitive( true ) ); + //final PolyphenyDbSchema schema = ValidatorUtil.getNamespace( snapshot, schemaPath, NameMatchers.withCaseSensitive( true ) ); //schema.add( Util.last( names ), algProtoDataType ); } @@ -174,7 +169,7 @@ private void registerTable( final List names, final Entity entity ) { assert names.get( 0 ).equals( DEFAULT_CATALOG ); final List schemaPath = Util.skipLast( names ); final String tableName = Util.last( names ); - //final PolyphenyDbSchema schema = ValidatorUtil.getSchema( snapshot, schemaPath, NameMatchers.withCaseSensitive( true ) ); + //final PolyphenyDbSchema schema = ValidatorUtil.getNamespace( snapshot, schemaPath, NameMatchers.withCaseSensitive( true ) ); //schema.add( tableName, entity ); } diff --git a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java index d842de50e6..a1b70175b8 100644 --- a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java +++ b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java @@ -20,9 +20,9 @@ import java.util.HashMap; import java.util.Map; import org.polypheny.db.catalog.MockCatalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.snapshot.Snapshot; /** * This is a bare-bone catalog which allows to mock register adapters @@ -35,7 +35,7 @@ public class MockCatalogDocker extends MockCatalog { @Override - public int addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { + public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { i++; adapters.put( i, new CatalogAdapter( i, uniqueName, clazz, type, settings ) ); return i; @@ -49,14 +49,14 @@ public Snapshot getSnapshot( long id ) { @Override - public boolean checkIfExistsAdapter( int adapterId ) { - return adapters.containsKey( adapterId ); + public boolean checkIfExistsAdapter( long id ) { + return adapters.containsKey( id ); } @Override - public CatalogAdapter getAdapter( int adapterId ) { - return adapters.get( adapterId ); + public CatalogAdapter getAdapter( long id ) { + return adapters.get( id ); } } diff --git a/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java b/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java index 747f8a88c1..e4ff273ce7 100644 --- a/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java +++ b/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java @@ -35,7 +35,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.Parser; import org.polypheny.db.prepare.ContextImpl; diff --git a/core/src/test/java/org/polypheny/db/test/RexProgramBuilderBase.java b/core/src/test/java/org/polypheny/db/test/RexProgramBuilderBase.java index 90c1e637ba..bc90e96810 100644 --- a/core/src/test/java/org/polypheny/db/test/RexProgramBuilderBase.java +++ b/core/src/test/java/org/polypheny/db/test/RexProgramBuilderBase.java @@ -48,7 +48,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptPredicateList; import org.polypheny.db.prepare.JavaTypeFactoryImpl; diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 8ac31f5556..0c0a0803f6 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -65,9 +65,9 @@ import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.logical.LogicalCollection; @@ -131,7 +131,6 @@ import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.runtime.PolyphenyDbContextException; import org.polypheny.db.runtime.PolyphenyDbException; -import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.type.ArrayType; @@ -209,7 +208,7 @@ private LogicalColumn getCatalogColumn( long tableId, String columnName ) throws public long createNamespace( String name, long databaseId, NamespaceType type, int userId, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException { name = name.toLowerCase(); // Check if there is already a schema with this name - if ( catalog.checkIfExistsSchema( name ) ) { + if ( catalog.checkIfExistsNamespace( name ) ) { if ( ifNotExists ) { // It is ok that there is already a schema with this name because "IF NOT EXISTS" was specified try { @@ -223,7 +222,7 @@ public long createNamespace( String name, long databaseId, NamespaceType type, i throw new NamespaceAlreadyExistsException(); } } else { - return catalog.addNamespace( name, userId, type ); + return catalog.addNamespace( name, type, false ); } } @@ -406,23 +405,23 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte @Override public void alterSchemaOwner( String schemaName, String ownerName, long databaseId ) throws UnknownUserException, UnknownSchemaException { - CatalogSchema catalogSchema = catalog.getSchema( databaseId, schemaName ); + LogicalNamespace logicalNamespace = catalog.getSchema( databaseId, schemaName ); CatalogUser catalogUser = catalog.getUser( ownerName ); - catalog.setSchemaOwner( catalogSchema.id, catalogUser.id ); + // catalog.setNamespaceOwner( logicalNamespace.id, catalogUser.id ); } @Override public void renameSchema( String newName, String oldName, long databaseId ) throws NamespaceAlreadyExistsException, UnknownSchemaException { newName = newName.toLowerCase(); - if ( catalog.checkIfExistsSchema( newName ) ) { + if ( catalog.checkIfExistsNamespace( newName ) ) { throw new NamespaceAlreadyExistsException(); } - CatalogSchema catalogSchema = catalog.getSchema( databaseId, oldName ); - catalog.renameSchema( catalogSchema.id, newName ); + LogicalNamespace logicalNamespace = catalog.getSchema( databaseId, oldName ); + catalog.renameNamespace( logicalNamespace.id, newName ); // Update Name in statistics - StatisticsManager.getInstance().updateSchemaName( catalogSchema, newName ); + StatisticsManager.getInstance().updateSchemaName( logicalNamespace, newName ); } @@ -907,7 +906,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { } // Make sure that the stores have created the schema - PolySchemaBuilder.getInstance().getCurrent(); + Catalog.getInstance().getSnapshot( 0 ); // Create table on store dataStore.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); @@ -1659,7 +1658,7 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme // Check if views are dependent from this view checkViewDependencies( catalogTable ); - if ( catalog.getSchema( catalogTable.namespaceId ).caseSensitive ) { + if ( catalog.getNamespace( catalogTable.namespaceId ).caseSensitive ) { newTableName = newTableName.toLowerCase(); } @@ -1752,7 +1751,7 @@ public void createView( String viewName, long schemaId, AlgNode algNode, AlgColl private String adjustNameIfNeeded( String name, long namespaceId ) { - if ( !catalog.getSchema( namespaceId ).caseSensitive ) { + if ( !catalog.getNamespace( namespaceId ).caseSensitive ) { return name.toLowerCase(); } return name; @@ -1855,7 +1854,7 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR catalog.addPrimaryKey( tableId, columnIds ); CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalog.getTable( tableId ); - PolySchemaBuilder.getInstance().getCurrent(); + Catalog.getInstance().getSnapshot( 0 ); for ( DataStore store : stores ) { catalog.addPartitionPlacement( @@ -1925,7 +1924,7 @@ public long addGraphPlacement( long graphId, List stores, boolean onl } LogicalGraph graph = catalog.getGraph( graphId ); - PolySchemaBuilder.getInstance().getCurrent(); + Catalog.getInstance().getSnapshot( 0 ); List preExistingPlacements = graph.placements .stream() @@ -1963,7 +1962,7 @@ public void removeGraphDatabasePlacement( long graphId, DataStore store, Stateme catalog.deleteGraphPlacement( store.getAdapterId(), graphId ); - PolySchemaBuilder.getInstance().getCurrent(); + Catalog.getInstance().getSnapshot( 0 ); } @@ -2228,7 +2227,7 @@ public void createTable( long schemaId, String name, List fiel LogicalTable catalogTable = catalog.getTable( tableId ); // Trigger rebuild of schema; triggers schema creation on adapters - PolySchemaBuilder.getInstance().getCurrent(); + Catalog.getInstance().getSnapshot( 0 ); for ( DataStore store : stores ) { catalog.addPartitionPlacement( @@ -2283,7 +2282,7 @@ public void createCollection( long schemaId, String name, boolean ifNotExists, L LogicalCollection catalogCollection = catalog.getCollection( collectionId ); // Trigger rebuild of schema; triggers schema creation on adapters - PolySchemaBuilder.getInstance().getCurrent(); + Catalog.getInstance().getSnapshot( 0 ); for ( DataStore store : stores ) { catalog.addCollectionPlacement( @@ -2348,7 +2347,7 @@ public void addCollectionPlacement( long namespaceId, String name, List columns, List constraints ) { - if ( catalog.getSchema( schemaId ).namespaceType == NamespaceType.DOCUMENT ) { + if ( catalog.getNamespace( schemaId ).namespaceType == NamespaceType.DOCUMENT ) { List names = columns.stream().map( c -> c.name ).collect( Collectors.toList() ); if ( names.contains( "_id" ) ) { @@ -2919,23 +2918,23 @@ public void dropSchema( long databaseId, String schemaName, boolean ifExists, St try { schemaName = schemaName.toLowerCase(); // Check if there is a schema with this name - if ( catalog.checkIfExistsSchema( schemaName ) ) { - CatalogSchema catalogSchema = catalog.getSchema( databaseId, schemaName ); + if ( catalog.checkIfExistsNamespace( schemaName ) ) { + LogicalNamespace logicalNamespace = catalog.getSchema( databaseId, schemaName ); // Drop all collections in this namespace - List collections = catalog.getCollections( catalogSchema.id, null ); + List collections = catalog.getCollections( logicalNamespace.id, null ); for ( LogicalCollection collection : collections ) { dropCollection( collection, statement ); } // Drop all tables in this schema - List catalogEntities = catalog.getTables( catalogSchema.id, null ); + List catalogEntities = catalog.getTables( logicalNamespace.id, null ); for ( LogicalTable catalogTable : catalogEntities ) { dropTable( catalogTable, statement ); } // Drop schema - catalog.deleteSchema( catalogSchema.id ); + catalog.deleteNamespace( logicalNamespace.id ); } else { if ( ifExists ) { // This is ok because "IF EXISTS" was specified diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 58f128f8a0..9029c67274 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -79,7 +79,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; @@ -617,7 +617,7 @@ public AlgNode visit( AlgNode node ) { final Catalog catalog = Catalog.getInstance(); final LogicalRelModify ltm = (LogicalRelModify) node; final LogicalTable table = ltm.getEntity().unwrap( LogicalTable.class ); - final CatalogSchema schema = catalog.getSchema( table.namespaceId ); + final LogicalNamespace schema = catalog.getNamespace( table.namespaceId ); final List indices = IndexManager.getInstance().getIndices( schema, table ); // Check if there are any indexes effected by this table modify @@ -859,7 +859,7 @@ public AlgNode visit( AlgNode node ) { if ( node instanceof LogicalConditionalExecute ) { final LogicalConditionalExecute lce = (LogicalConditionalExecute) node; final Index index = IndexManager.getInstance().getIndex( - lce.getCatalogSchema(), + lce.getLogicalNamespace(), lce.getCatalogTable(), lce.getCatalogColumns() ); @@ -910,7 +910,7 @@ public AlgNode visit( LogicalProject project ) { ctypes.add( field.getType() ); } // Retrieve the catalog schema and database representations required for index lookup - final CatalogSchema schema = statement.getTransaction().getDefaultSchema(); + final LogicalNamespace schema = statement.getTransaction().getDefaultSchema(); final LogicalTable ctable = scan.getEntity().unwrap( LogicalTable.class ); // Retrieve any index and use for simplification final Index idx = IndexManager.getInstance().getIndex( schema, ctable, columns ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java index 1a28646ba0..80862f8442 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataContextImpl.java @@ -30,7 +30,7 @@ import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.runtime.Hook; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Holder; diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 27afc61391..1cbaed275f 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -249,7 +249,7 @@ private List handleRelationalOnGraphScan( AlgNode node, Statem AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); - algBuilder.lpgScan( catalog.getSchemas( Catalog.defaultDatabaseId, new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); + algBuilder.lpgScan( catalog.getNamespaces( Catalog.defaultDatabaseId, new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); algBuilder.lpgMatch( List.of( algBuilder.lpgNodeMatch( List.of( logicalTable.getLogicalTableName() ) ) ), List.of( "n" ) ); algBuilder.lpgProject( List.of( rexBuilder.makeLpgGetId(), rexBuilder.makeLpgPropertiesExtract(), rexBuilder.makeLpgLabels() ), diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 35ddd42164..d66f651926 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -54,7 +54,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; @@ -72,6 +71,7 @@ import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.refactor.TranslatableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; @@ -562,7 +562,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st CatalogAdapter adapter = catalog.getAdapter( placementId ); NamespaceType sourceModel = collection.namespaceType; - if ( !adapter.getSupportedNamespaces().contains( sourceModel ) ) { + if ( !adapter.supportedNamespaces.contains( sourceModel ) ) { // document on relational scans.add( handleDocumentOnRelational( alg, placementId, statement, builder ) ); continue; diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 7b92fe7c4c..8efac6664c 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -78,7 +78,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; @@ -98,6 +97,7 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; @@ -731,7 +731,7 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, String collectionName = collection.name + "_" + placement.id; PhysicalCollection document = snapshot.getPhysicalCollection( placement.id ); - if ( !adapter.getSupportedNamespaces().contains( NamespaceType.DOCUMENT ) ) { + if ( !adapter.supportedNamespaces.contains( NamespaceType.DOCUMENT ) ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, statement, placementId, queryInformation ) ); continue; diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java index 722bdefb17..015e87e039 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java @@ -39,12 +39,12 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer.EnforcementInformation; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationManager; import org.polypheny.db.languages.QueryLanguage; @@ -76,7 +76,7 @@ public class TransactionImpl implements Transaction, Comparable { @Getter private final CatalogUser user; @Getter - private final CatalogSchema defaultSchema; + private final LogicalNamespace defaultSchema; @Getter private final CatalogDatabase database; @@ -117,7 +117,7 @@ public class TransactionImpl implements Transaction, Comparable { PolyXid xid, TransactionManagerImpl transactionManager, CatalogUser user, - CatalogSchema defaultSchema, + LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin, diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java index ee997b7e79..dbed8b0700 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java @@ -24,8 +24,8 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -84,7 +84,7 @@ public static TransactionManager getInstance() { @Override - public Transaction startTransaction( CatalogUser user, CatalogSchema defaultSchema, CatalogDatabase database, boolean analyze, String origin, MultimediaFlavor flavor ) { + public Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin, MultimediaFlavor flavor ) { final NodeId nodeId = (NodeId) PUID.randomPUID( Type.NODE ); // TODO: get real node id -- configuration.get("nodeid") final UserId userId = (UserId) PUID.randomPUID( Type.USER ); // TODO: use real user id final ConnectionId connectionId = (ConnectionId) PUID.randomPUID( Type.CONNECTION ); // TODO @@ -95,7 +95,7 @@ public Transaction startTransaction( CatalogUser user, CatalogSchema defaultSche @Override - public Transaction startTransaction( CatalogUser user, CatalogSchema defaultSchema, CatalogDatabase database, boolean analyze, String origin ) { + public Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin ) { return startTransaction( user, defaultSchema, database, analyze, origin, MultimediaFlavor.DEFAULT ); } @@ -105,8 +105,8 @@ public Transaction startTransaction( long userId, long databaseId, boolean analy Catalog catalog = Catalog.getInstance(); CatalogUser catalogUser = catalog.getUser( (int) userId ); CatalogDatabase catalogDatabase = catalog.getDatabase( databaseId ); - CatalogSchema catalogSchema = catalog.getSchema( catalogDatabase.id, catalogDatabase.defaultNamespaceName ); - return startTransaction( catalogUser, catalogSchema, catalogDatabase, analyze, origin, flavor ); + LogicalNamespace logicalNamespace = catalog.getSchema( catalogDatabase.id, catalogDatabase.defaultNamespaceName ); + return startTransaction( catalogUser, logicalNamespace, catalogDatabase, analyze, origin, flavor ); } diff --git a/dbms/src/test/java/org/polypheny/db/catalog/CatalogTest.java b/dbms/src/test/java/org/polypheny/db/catalog/CatalogTest.java index 9d5aa51edf..3e0705d9e0 100644 --- a/dbms/src/test/java/org/polypheny/db/catalog/CatalogTest.java +++ b/dbms/src/test/java/org/polypheny/db/catalog/CatalogTest.java @@ -148,7 +148,7 @@ public void testGetSchema() { ImmutableList.of( schemaTest ) ); } catch ( SQLException e ) { - log.error( "Exception while testing getSchemas()", e ); + log.error( "Exception while testing getNamespaces()", e ); } } diff --git a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java index c3202885d2..a5a759f982 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java @@ -67,7 +67,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.Parser; @@ -79,7 +79,6 @@ import org.polypheny.db.rex.RexInputRef; import org.polypheny.db.rex.RexNode; import org.polypheny.db.runtime.PolyphenyDbException; -import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.test.Matchers; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.FrameworkConfig; diff --git a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java index 5bc31ca636..bba1ebd76a 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java @@ -32,10 +32,10 @@ import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.TestHelper.MongoConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.excluded.CassandraExcluded; import org.polypheny.db.webui.models.Result; @@ -57,7 +57,7 @@ public void addCollectionTest() throws UnknownSchemaException { Catalog catalog = Catalog.getInstance(); String name = "testCollection"; - CatalogSchema namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); + LogicalNamespace namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); int size = catalog.getCollections( namespace.id, null ).size(); @@ -83,7 +83,7 @@ public void addPlacementTest() throws UnknownSchemaException, SQLException { String placement = "store1"; try { - CatalogSchema namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); + LogicalNamespace namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); List collectionNames = catalog.getCollections( namespace.id, null ).stream().map( c -> c.name ).collect( Collectors.toList() ); collectionNames.forEach( n -> execute( String.format( "db.%s.drop()", n ) ) ); @@ -119,7 +119,7 @@ public void deletePlacementTest() throws UnknownSchemaException, SQLException { execute( "db.createCollection(\"" + collectionName + "\")" ); - CatalogSchema namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); + LogicalNamespace namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); LogicalCollection collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java index 93ef793076..d48adc2a89 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java @@ -81,9 +81,9 @@ public void updatePolyphenyStatistic() { this.numberOfPendingEvents = MonitoringServiceProvider.getInstance().getNumberOfElementsInQueue(); catalog.getAdapters().forEach( v -> { - this.availableAdapter.put( v.uniqueName, Pair.of( v.getAdapterTypeName(), v.type ) ); + this.availableAdapter.put( v.uniqueName, Pair.of( v.adapterTypeName, v.type ) ); } ); - catalog.getSchemas( null ).forEach( v -> { + catalog.getNamespaces( null ).forEach( v -> { availableSchemas.put( v.id, Pair.of( v.name, v.namespaceType ) ); } ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java index 9a8e643afc..690b8af738 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java @@ -87,7 +87,7 @@ public StatisticColumn( long schemaId, long tableId, long columnId, PolyType typ Catalog catalog = Catalog.getInstance(); if ( catalog.checkIfExistsEntity( tableId ) ) { - this.schema = catalog.getSchema( schemaId ).name; + this.schema = catalog.getNamespace( schemaId ).name; this.table = catalog.getTable( tableId ).name; this.column = catalog.getColumn( columnId ).name; } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 2721c540a3..722ec38890 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -27,7 +27,7 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -90,8 +90,8 @@ public List> getSchemaTree() { Catalog catalog = Catalog.getInstance(); List> result = new ArrayList<>(); List schemaTree = new ArrayList<>(); - List schemas = catalog.getSchemas( databaseId, null ); - for ( CatalogSchema schema : schemas ) { + List schemas = catalog.getNamespaces( databaseId, null ); + for ( LogicalNamespace schema : schemas ) { List tables = new ArrayList<>(); List childTables = catalog.getTables( schema.id, null ); for ( LogicalTable childTable : childTables ) { diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 2cacdf36f5..adb5544761 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -53,8 +53,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -62,6 +61,7 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.RuntimeConfig; @@ -175,15 +175,15 @@ public void updateTableName( LogicalTable catalogTable, String newName ) { @Override - public void updateSchemaName( CatalogSchema catalogSchema, String newName ) { - if ( statisticSchemaMap.containsKey( catalogSchema.id ) ) { - Map>> tableInformation = statisticSchemaMap.get( catalogSchema.id ); + public void updateSchemaName( LogicalNamespace logicalNamespace, String newName ) { + if ( statisticSchemaMap.containsKey( logicalNamespace.id ) ) { + Map>> tableInformation = statisticSchemaMap.get( logicalNamespace.id ); for ( long tableId : tableInformation.keySet() ) { - Map> columnsInformation = statisticSchemaMap.get( catalogSchema.id ).remove( tableId ); + Map> columnsInformation = statisticSchemaMap.get( logicalNamespace.id ).remove( tableId ); for ( Entry> columnInfo : columnsInformation.entrySet() ) { StatisticColumn statisticColumn = columnInfo.getValue(); statisticColumn.updateSchemaName( newName ); - statisticSchemaMap.get( catalogSchema.id ).get( tableId ).put( columnInfo.getKey(), statisticColumn ); + statisticSchemaMap.get( logicalNamespace.id ).get( tableId ).put( columnInfo.getKey(), statisticColumn ); } } } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index b0345de932..cc16bd7c05 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -79,9 +79,9 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey.CatalogPrimaryKeyColumn; import org.polypheny.db.catalog.entity.CatalogPrimaryKey.CatalogPrimaryKeyColumn.PrimitiveCatalogPrimaryKeyColumn; -import org.polypheny.db.catalog.entity.CatalogSchema; -import org.polypheny.db.catalog.entity.CatalogSchema.PrimitiveCatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalNamespace.PrimitiveCatalogSchema; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalColumn.PrimitiveCatalogColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -346,9 +346,9 @@ public MetaResultSet getSchemas( final ConnectionHandle ch, final String databas final PolyphenyDbConnectionHandle connection = getPolyphenyDbConnectionHandle( ch.id ); synchronized ( connection ) { if ( log.isTraceEnabled() ) { - log.trace( "getSchemas( ConnectionHandle {}, String {}, Pat {} )", ch, database, schemaPattern ); + log.trace( "getNamespaces( ConnectionHandle {}, String {}, Pat {} )", ch, database, schemaPattern ); } - final List schemas = catalog.getSchemas( + final List schemas = catalog.getNamespaces( (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ) ); StatementHandle statementHandle = createStatement( ch ); @@ -1415,7 +1415,7 @@ public void openConnection( final ConnectionHandle ch, final Map // Authorizer.hasAccess( user, database ); // Check schema access - final CatalogSchema schema; + final LogicalNamespace schema; try { schema = catalog.getSchema( database.id, defaultSchemaName ); } catch ( UnknownSchemaException e ) { diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java index 970bc66995..56f28158b5 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java @@ -24,8 +24,8 @@ import org.apache.calcite.avatica.Meta.ConnectionHandle; import org.apache.calcite.avatica.Meta.ConnectionProperties; import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.transaction.PUID.ConnectionId; import org.polypheny.db.transaction.PUID.UserId; import org.polypheny.db.transaction.Transaction; @@ -44,7 +44,7 @@ public class PolyphenyDbConnectionHandle { @Getter private final CatalogUser user; private final CatalogDatabase database; - private final CatalogSchema schema; + private final LogicalNamespace schema; private final ConnectionId connectionId; private Transaction currentTransaction; @@ -55,7 +55,7 @@ public class PolyphenyDbConnectionHandle { private final ConnectionProperties connectionProperties = new ConnectionPropertiesImpl( true, false, java.sql.Connection.TRANSACTION_SERIALIZABLE, "APP", "public" ); - public PolyphenyDbConnectionHandle( final Meta.ConnectionHandle handle, final CatalogUser catalogUser, final ConnectionId connectionId, final CatalogDatabase database, final CatalogSchema schema, final TransactionManager transactionManager ) { + public PolyphenyDbConnectionHandle( final Meta.ConnectionHandle handle, final CatalogUser catalogUser, final ConnectionId connectionId, final CatalogDatabase database, final LogicalNamespace schema, final TransactionManager transactionManager ) { this.handle = handle; this.userId = UserId.fromString( catalogUser.name ); // TODO: refactor CatalogUser @@ -67,7 +67,7 @@ public PolyphenyDbConnectionHandle( final Meta.ConnectionHandle handle, final Ca } - public PolyphenyDbConnectionHandle( final ConnectionHandle handle, final CatalogUser catalogUser, final String connectionId, final CatalogDatabase database, final CatalogSchema schema, final TransactionManager transactionManager ) { + public PolyphenyDbConnectionHandle( final ConnectionHandle handle, final CatalogUser catalogUser, final String connectionId, final CatalogDatabase database, final LogicalNamespace schema, final TransactionManager transactionManager ) { this.handle = handle; this.userId = UserId.fromString( catalogUser.name ); // TODO: refactor CatalogUser diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 5b4637e030..111585c588 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -41,11 +41,11 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.csv.CsvTable.Flavor; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java index 7813d0c10b..dab7c48178 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java @@ -33,27 +33,17 @@ package org.polypheny.db.adapter.csv; -import java.lang.reflect.Type; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; -import org.apache.calcite.linq4j.Queryable; -import org.apache.calcite.linq4j.tree.Expression; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.refactor.QueryableEntity; import org.polypheny.db.catalog.refactor.TranslatableEntity; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.schema.PolyphenyDbSchema; -import org.polypheny.db.schema.Schemas; import org.polypheny.db.util.Source; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index d696f28d6d..cc7857913a 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -49,9 +49,9 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.cypher.CypherNode; import org.polypheny.db.cypher.CypherNode.CypherFamily; import org.polypheny.db.cypher.clause.CypherClause; @@ -69,7 +69,6 @@ import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.processing.ExtendedQueryParameters; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 31690a4667..f446cbffc4 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -55,7 +55,6 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -64,6 +63,7 @@ import org.polypheny.db.catalog.refactor.QueryableEntity; import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.catalog.refactor.TranslatableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.plan.AlgOptCluster; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index 99b963c1f9..ca48112c8c 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -54,10 +54,10 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.Namespace.Schema; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 8e5c6c743e..be0acf6839 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -37,8 +37,8 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; import org.polypheny.db.sql.language.SqlDialect; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 0d9c4aa67b..4dd054bdec 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -30,13 +30,13 @@ import org.polypheny.db.adapter.jdbc.JdbcUtils; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java index c8fdb04724..5004cfcf96 100644 --- a/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java +++ b/plugins/jdbc-adapter-framework/src/test/java/org/polypheny/db/adapter/jdbc/rel2sql/RelToSqlConverterStructsTest.java @@ -47,8 +47,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgProtoDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; import org.polypheny.db.schema.AbstractPolyphenyDbSchema; diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index d4de8b2467..46c597e3f4 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -39,6 +39,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.Getter; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.mapdb.BTreeMap; import org.mapdb.DB; @@ -62,8 +63,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; @@ -81,9 +80,9 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -94,8 +93,6 @@ import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownAdapterIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownCollectionException; -import org.polypheny.db.catalog.exceptions.UnknownCollectionPlacementException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownColumnIdRuntimeException; import org.polypheny.db.catalog.exceptions.UnknownColumnPlacementRuntimeException; @@ -127,6 +124,7 @@ import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.QueryInterfaceManager; import org.polypheny.db.languages.QueryLanguage; @@ -159,8 +157,8 @@ public class CatalogImpl extends Catalog { private static BTreeMap databaseNames; private static HTreeMap> databaseChildren; - private static BTreeMap schemas; - private static BTreeMap schemaNames; + private static BTreeMap schemas; + private static BTreeMap schemaNames; private static HTreeMap> schemaChildren; private static BTreeMap tables; @@ -170,9 +168,6 @@ public class CatalogImpl extends Catalog { private static BTreeMap collections; private static BTreeMap collectionNames; - private static BTreeMap collectionPlacements; - - private static BTreeMap documentMappings; private static BTreeMap columns; private static BTreeMap columnNames; @@ -204,7 +199,6 @@ public class CatalogImpl extends Catalog { private static BTreeMap graphNames; private static BTreeMap graphPlacements; - private static BTreeMap graphMappings; private static Long openTable; @@ -523,7 +517,7 @@ public void restoreViews( Transaction transaction ) { AlgRoot mqlRel = mqlProcessor.translate( statement, mqlNode, - new ExtendedQueryParameters( query, NamespaceType.DOCUMENT, getSchema( defaultDatabaseId ).name ) ); + new ExtendedQueryParameters( query, NamespaceType.DOCUMENT, getNamespace( defaultDatabaseId ).name ) ); nodeInfo.put( c.id, mqlRel.alg ); break; } @@ -693,7 +687,6 @@ private void initGraphInfo( DB db ) { graphNames = db.treeMap( "graphNames", new SerializerArrayTuple( Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); graphPlacements = db.treeMap( "graphPlacements", new SerializerArrayTuple( Serializer.LONG, Serializer.INTEGER ), Serializer.JAVA ).createOrOpen(); - graphMappings = db.treeMap( "graphMappings", Serializer.LONG, Serializer.JAVA ).createOrOpen(); graphAliases = db.treeMap( "graphAliases", Serializer.STRING, Serializer.JAVA ).createOrOpen(); } @@ -703,9 +696,6 @@ private void initDocumentInfo( DB db ) { collections = db.treeMap( "collections", Serializer.LONG, Serializer.JAVA ).createOrOpen(); collectionNames = db.treeMap( "collectionNames", new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - documentMappings = db.treeMap( "documentMappings", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - - collectionPlacements = db.treeMap( "collectionPlacements", new SerializerArrayTuple( Serializer.LONG, Serializer.INTEGER ), Serializer.JAVA ).createOrOpen(); } @@ -765,9 +755,9 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce long schemaId; if ( !schemaNames.containsKey( new Object[]{ "public" } ) ) { - schemaId = addNamespace( "public", 1, NamespaceType.getDefault() ); + schemaId = addNamespace( "public", NamespaceType.getDefault(), false ); } else { - schemaId = getSchema( "public" ).id; + schemaId = getNamespace( "public" ).id; } ////////////// @@ -827,7 +817,7 @@ public void restoreInterfacesIfNecessary() { * Initiates default columns for csv files */ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaException, UnknownTableException, GenericCatalogException, UnknownColumnException { - CatalogSchema schema = getSchema( "public" ); + LogicalNamespace schema = getNamespace( "public" ); LogicalTable depts = getTable( schema.id, "depts" ); addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); @@ -1038,9 +1028,9 @@ private CatalogDatabase getDatabase( long databaseId ) { * {@inheritDoc} */ @Override - public List getSchemas( Pattern schemaNamePattern ) { - if ( schemaNamePattern != null ) { - return schemaNames.values().stream().filter( s -> s.name.matches( schemaNamePattern.toRegex() ) ).collect( Collectors.toList() ); + public @NonNull List getNamespaces( Pattern name ) { + if ( name != null ) { + return schemaNames.values().stream().filter( s -> s.name.matches( name.toRegex() ) ).collect( Collectors.toList() ); } return new ArrayList<>(); } @@ -1050,11 +1040,11 @@ public List getSchemas( Pattern schemaNamePattern ) { * {@inheritDoc} */ @Override - public CatalogSchema getSchema( long schemaId ) { + public LogicalNamespace getNamespace( long id ) { try { - return Objects.requireNonNull( schemas.get( schemaId ) ); + return Objects.requireNonNull( schemas.get( id ) ); } catch ( NullPointerException e ) { - throw new UnknownSchemaIdRuntimeException( schemaId ); + throw new UnknownSchemaIdRuntimeException( id ); } } @@ -1063,7 +1053,7 @@ public CatalogSchema getSchema( long schemaId ) { * {@inheritDoc} */ @Override - public CatalogSchema getSchema( final String schemaName ) throws UnknownSchemaException { + public LogicalNamespace getNamespace( final String schemaName ) throws UnknownSchemaException { String name = schemaName.toLowerCase(); try { return Objects.requireNonNull( schemaNames.get( new Object[]{ name } ) ); @@ -1077,11 +1067,11 @@ public CatalogSchema getSchema( final String schemaName ) throws UnknownSchemaEx * {@inheritDoc} */ @Override - public long addNamespace( String name, int ownerId, NamespaceType namespaceType ) { + public long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ) { name = name.toLowerCase(); CatalogUser owner = getUser( ownerId ); long id = namespaceIdBuilder.getAndIncrement(); - CatalogSchema schema = new CatalogSchema( id, name, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); + LogicalNamespace schema = new LogicalNamespace( id, name, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); synchronized ( this ) { schemas.put( id, schema ); schemaNames.put( new Object[]{ name }, schema ); @@ -1096,9 +1086,9 @@ public long addNamespace( String name, int ownerId, NamespaceType namespaceType * {@inheritDoc} */ @Override - public boolean checkIfExistsSchema( String schemaName ) { - schemaName = schemaName.toLowerCase(); - return schemaNames.containsKey( new Object[]{ schemaName } ); + public boolean checkIfExistsNamespace( String name ) { + name = name.toLowerCase(); + return schemaNames.containsKey( new Object[]{ name } ); } @@ -1106,11 +1096,11 @@ public boolean checkIfExistsSchema( String schemaName ) { * {@inheritDoc} */ @Override - public void renameSchema( long schemaId, String name ) { + public void renameNamespace( long schemaId, String name ) { name = name.toLowerCase(); try { - CatalogSchema old = Objects.requireNonNull( schemas.get( schemaId ) ); - CatalogSchema schema = new CatalogSchema( old.id, name, old.ownerId, old.ownerName, old.namespaceType, false ); + LogicalNamespace old = Objects.requireNonNull( schemas.get( schemaId ) ); + LogicalNamespace schema = new LogicalNamespace( old.id, name, old.ownerId, old.ownerName, old.namespaceType, false ); synchronized ( this ) { schemas.replace( schemaId, schema ); @@ -1127,11 +1117,10 @@ public void renameSchema( long schemaId, String name ) { /** * {@inheritDoc} */ - @Override - public void setSchemaOwner( long schemaId, long ownerId ) { + public void setNamespaceOwner( long schemaId, long ownerId ) { try { - CatalogSchema old = Objects.requireNonNull( schemas.get( schemaId ) ); - CatalogSchema schema = new CatalogSchema( old.id, old.name, (int) ownerId, old.ownerName, old.namespaceType, false ); + LogicalNamespace old = Objects.requireNonNull( schemas.get( schemaId ) ); + LogicalNamespace schema = new LogicalNamespace( old.id, old.name, (int) ownerId, old.ownerName, old.namespaceType, false ); synchronized ( this ) { schemas.replace( schemaId, schema ); schemaNames.replace( new Object[]{ schema.name }, schema ); @@ -1152,7 +1141,7 @@ public long addGraph( String name, List stores, boolean modifiable, b throw new GraphAlreadyExistsException( name ); } - long id = addNamespace( name, Catalog.defaultUserId, NamespaceType.GRAPH ); + long id = addNamespace( name, NamespaceType.GRAPH, false ); LogicalGraph graph = new LogicalGraph( id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); @@ -1205,14 +1194,6 @@ public void removeGraphAlias( long graphId, String alias, boolean ifExists ) { } - /** - * {@inheritDoc} - */ - @Override - public CatalogGraphMapping getGraphMapping( long graphId ) { - return Objects.requireNonNull( graphMappings.get( graphId ) ); - } - /** * {@inheritDoc} @@ -1548,18 +1529,14 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac keyEdgePropertyId, valueEdgePropertyId ); - graphMappings.put( id, mapping ); } } private void removeGraphLogistics( long graphId ) { - if ( !graphMappings.containsKey( graphId ) ) { - throw new UnknownGraphException( graphId ); - } - deleteSchema( graphId ); + deleteNamespace( graphId ); } @@ -1580,7 +1557,6 @@ public void deleteGraph( long id ) { old.placements.forEach( a -> graphPlacements.remove( new Object[]{ old.id, a } ) ); graphs.remove( id ); graphNames.remove( new Object[]{ old.name } ); - graphMappings.remove( id ); } listeners.firePropertyChange( "graph", old, null ); } @@ -1619,8 +1595,8 @@ public List getGraphs( Pattern graphName ) { * {@inheritDoc} */ @Override - public void deleteSchema( long schemaId ) { - CatalogSchema schema = getSchema( schemaId ); + public void deleteNamespace( long schemaId ) { + LogicalNamespace schema = getNamespace( schemaId ); synchronized ( this ) { schemaNames.remove( new Object[]{ schema.name } ); @@ -1642,7 +1618,7 @@ public void deleteSchema( long schemaId ) { public List getTables( long schemaId, Pattern tableNamePattern ) { if ( schemas.containsKey( schemaId ) ) { - CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); + LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); if ( tableNamePattern != null ) { return Collections.singletonList( tableNames.get( new Object[]{ schemaId, tableNamePattern.pattern } ) ); } else { @@ -1659,12 +1635,12 @@ public List getTables( long schemaId, Pattern tableNamePattern ) { @Override public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { if ( schemaNamePattern != null && tableNamePattern != null ) { - CatalogSchema schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); + LogicalNamespace schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); if ( schema != null ) { return Collections.singletonList( Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableNamePattern.pattern } ) ) ); } } else if ( schemaNamePattern != null ) { - CatalogSchema schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); + LogicalNamespace schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); if ( schema != null ) { return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schema.id } ).values() ); } @@ -1695,7 +1671,7 @@ public LogicalTable getTable( long tableId ) { @Override public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { try { - CatalogSchema schema = getSchema( schemaId ); + LogicalNamespace schema = getNamespace( schemaId ); if ( !schema.caseSensitive ) { tableName = tableName.toLowerCase(); } @@ -1721,7 +1697,7 @@ public LogicalTable getTableFromPartition( long partitionId ) { @Override public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { try { - CatalogSchema schema = getSchema( schemaName ); + LogicalNamespace schema = getNamespace( schemaName ); if ( !schema.caseSensitive ) { tableName = tableName.toLowerCase(); } @@ -1739,7 +1715,7 @@ public LogicalTable getTable( String schemaName, String tableName ) throws Unkno @Override public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { long id = entityIdBuilder.getAndIncrement(); - CatalogSchema schema = getSchema( namespaceId ); + LogicalNamespace schema = getNamespace( namespaceId ); if ( !schema.caseSensitive ) { name = name.toLowerCase(); } @@ -1790,7 +1766,7 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent @Override public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { long id = entityIdBuilder.getAndIncrement(); - CatalogSchema schema = getSchema( namespaceId ); + LogicalNamespace schema = getNamespace( namespaceId ); if ( !schema.caseSensitive ) { name = name.toLowerCase(); @@ -1838,7 +1814,7 @@ public long addView( String name, long namespaceId, int ownerId, EntityType enti @Override public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { long id = entityIdBuilder.getAndIncrement(); - CatalogSchema schema = getSchema( namespaceId ); + LogicalNamespace schema = getNamespace( namespaceId ); if ( !schema.caseSensitive ) { name = name.toLowerCase(); @@ -1900,7 +1876,7 @@ public long addMaterializedView( String name, long namespaceId, int ownerId, Ent /** * Update all information after the addition of all kind of tables */ - private void updateEntityLogistics( String name, long namespaceId, long id, CatalogSchema schema, LogicalTable entity ) { + private void updateEntityLogistics( String name, long namespaceId, long id, LogicalNamespace schema, LogicalTable entity ) { synchronized ( this ) { tables.put( id, entity ); tableChildren.put( id, ImmutableList.builder().build() ); @@ -1960,7 +1936,7 @@ public void deleteViewDependencies( CatalogView catalogView ) { */ @Override public boolean checkIfExistsEntity( long namespaceId, String entityName ) { - CatalogSchema schema = getSchema( namespaceId ); + LogicalNamespace schema = getNamespace( namespaceId ); if ( !schema.caseSensitive ) { entityName = entityName.toLowerCase(); } @@ -1983,7 +1959,7 @@ public boolean checkIfExistsEntity( long tableId ) { @Override public void renameTable( long tableId, String name ) { LogicalTable old = getTable( tableId ); - if ( !getSchema( old.namespaceId ).caseSensitive ) { + if ( !getNamespace( old.namespaceId ).caseSensitive ) { name = name.toLowerCase(); } @@ -2108,36 +2084,6 @@ public void addColumnPlacement( int adapterId, long columnId, PlacementType plac } - /** - * {@inheritDoc} - */ - @Override - public void updatePartitionPlacementPhysicalNames( int adapterId, long partitionId, String physicalSchemaName, String physicalTableName ) { - try { - CatalogPartitionPlacement old = Objects.requireNonNull( partitionPlacements.get( new Object[]{ adapterId, partitionId } ) ); - CatalogPartitionPlacement placement = new CatalogPartitionPlacement( - old.namespaceId, - old.tableId, - old.adapterId, - old.adapterUniqueName, - old.placementType, - physicalSchemaName, - physicalTableName, - old.partitionId, - old.role ); - - synchronized ( this ) { - partitionPlacements.replace( new Object[]{ adapterId, partitionId }, placement ); - listeners.firePropertyChange( "partitionPlacement", old, placement ); - } - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getPartition( partitionId ); - throw new UnknownPartitionPlacementException( adapterId, partitionId ); - } - } - - /** * {@inheritDoc} */ @@ -2178,7 +2124,7 @@ public LogicalCollection getCollection( long id ) { @Override public List getCollections( long namespaceId, Pattern namePattern ) { if ( schemas.containsKey( namespaceId ) ) { - CatalogSchema schema = Objects.requireNonNull( schemas.get( namespaceId ) ); + LogicalNamespace schema = Objects.requireNonNull( schemas.get( namespaceId ) ); if ( namePattern != null ) { LogicalCollection collection = collectionNames.get( new Object[]{ namespaceId, namePattern.pattern } ); if ( collection == null ) { @@ -2203,7 +2149,7 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI collectionId = id; } - CatalogSchema namespace = getSchema( schemaId ); + LogicalNamespace namespace = getNamespace( schemaId ); LogicalCollection collection = new LogicalCollection( Catalog.defaultDatabaseId, schemaId, @@ -2223,63 +2169,6 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI } - /** - * {@inheritDoc} - */ - @Override - public long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ) { - long id = partitionIdBuilder.getAndIncrement(); - CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, null, null, id ); - LogicalCollection old = collections.get( collectionId ); - if ( old == null ) { - throw new UnknownCollectionException( collectionId ); - } - - LogicalCollection collection = old.addPlacement( adapterId ); - - synchronized ( this ) { - collectionPlacements.put( new Object[]{ collectionId, adapterId }, placement ); - collections.replace( collectionId, collection ); - collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); - } - listeners.firePropertyChange( "collectionPlacement", null, placement ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { - LogicalCollection old = getCollection( collectionId ); - if ( old == null ) { - throw new UnknownCollectionException( collectionId ); - } - - CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, physicalCollectionName, physicalNamespaceName, old.id ); - LogicalCollection collection = old.setPhysicalName( physicalCollectionName ); - synchronized ( this ) { - collections.replace( collectionId, collection ); - collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); - collectionPlacements.replace( new Object[]{ collectionId, adapterId }, placement ); - } - listeners.firePropertyChange( "collectionPlacements", old, collection ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogCollectionMapping getCollectionMapping( long id ) { - if ( !documentMappings.containsKey( id ) ) { - throw new UnknownTableIdRuntimeException( id ); - } - return Objects.requireNonNull( documentMappings.get( id ) ); - } - - /** * {@inheritDoc} */ @@ -2334,11 +2223,6 @@ public long addCollectionLogistics( long schemaId, String name, List addPrimaryKey( tableId, List.of( idId, dataId ) ); - if ( !onlyPlacement ) { - CatalogCollectionMapping mapping = new CatalogCollectionMapping( tableId, idId, dataId ); - documentMappings.put( tableId, mapping ); - } - return tableId; } @@ -2353,29 +2237,11 @@ public void deleteCollection( long id ) { synchronized ( this ) { collections.remove( collection.namespaceId ); collectionNames.remove( new Object[]{ collection.databaseId, collection.namespaceId, collection.name } ); - collection.placements.forEach( p -> collectionPlacements.remove( new Object[]{ collection.id, p } ) ); } listeners.firePropertyChange( "collection", null, null ); } - /** - * {@inheritDoc} - */ - @Override - public void dropCollectionPlacement( long id, int adapterId ) { - LogicalCollection oldCollection = Objects.requireNonNull( collections.get( id ) ); - LogicalCollection collection = oldCollection.removePlacement( adapterId ); - - synchronized ( this ) { - collectionPlacements.remove( new Object[]{ id, adapterId } ); - collections.replace( id, collection ); - collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); - } - listeners.firePropertyChange( "collectionPlacement", null, null ); - } - - /** * {@inheritDoc} */ @@ -2384,28 +2250,6 @@ public List getGraphPlacements( int adapterId ) { } - /** - * {@inheritDoc} - */ - @Override - public List getCollectionPlacementsByAdapter( int adapterId ) { - return collectionPlacements.values().stream().filter( p -> p.adapter == adapterId ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogCollectionPlacement getCollectionPlacement( long collectionId, int adapterId ) { - if ( !collectionPlacements.containsKey( new Object[]{ collectionId, adapterId } ) ) { - throw new UnknownCollectionPlacementException( collectionId, adapterId ); - } - - return collectionPlacements.get( new Object[]{ collectionId, adapterId } ); - } - - /** * {@inheritDoc} */ @@ -2483,20 +2327,6 @@ public List getColumnPlacementsOnAdapterPerTable( int ad } - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsOnAdapterSortedByPhysicalPosition( int adapterId, long tableId ) { - final Comparator columnPlacementComparator = Comparator.comparingLong( p -> p.physicalPosition ); - return getColumnPlacementsOnAdapter( adapterId ) - .stream() - .filter( p -> p.tableId == tableId ) - .sorted( columnPlacementComparator ) - .collect( Collectors.toList() ); - } - - /** * {@inheritDoc} */ @@ -2547,15 +2377,6 @@ public ImmutableMap> getPartitionPlacementsByAdapte } - /** - * {@inheritDoc} - */ - @Override - public ImmutableMap> getPartitionGroupsByAdapter( long tableId ) { - return null; - } - - /** * {@inheritDoc} */ @@ -2586,7 +2407,7 @@ public List getColumnPlacementsOnAdapterAndSchema( int a return getColumnPlacementsOnAdapter( adapterId ).stream().filter( p -> Objects.requireNonNull( columns.get( p.columnId ) ).schemaId == schemaId ).collect( Collectors.toList() ); } catch ( NullPointerException e ) { getAdapter( adapterId ); - getSchema( schemaId ); + getNamespace( schemaId ); return new ArrayList<>(); } } @@ -2762,7 +2583,7 @@ public LogicalColumn getColumn( long columnId ) { public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { try { LogicalTable table = getTable( tableId ); - if ( !getSchema( table.namespaceId ).caseSensitive ) { + if ( !getNamespace( table.namespaceId ).caseSensitive ) { columnName = columnName.toLowerCase(); } return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); @@ -2793,7 +2614,7 @@ public LogicalColumn getColumn( String schemaName, String tableName, String colu public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { LogicalTable table = getTable( tableId ); - if ( !getSchema( table.namespaceId ).caseSensitive ) { + if ( !getNamespace( table.namespaceId ).caseSensitive ) { name = name.toLowerCase(); } @@ -2851,7 +2672,7 @@ public long addColumn( String name, long tableId, int position, PolyType type, P public void renameColumn( long columnId, String name ) { LogicalColumn old = getColumn( columnId ); - if ( !getSchema( old.schemaId ).caseSensitive ) { + if ( !getNamespace( old.schemaId ).caseSensitive ) { name = name.toLowerCase(); } @@ -3545,11 +3366,11 @@ public void deleteConstraint( long constraintId ) throws GenericCatalogException * {@inheritDoc} */ @Override - public CatalogUser getUser( String userName ) throws UnknownUserException { + public CatalogUser getUser( String name ) throws UnknownUserException { try { - return Objects.requireNonNull( userNames.get( userName ) ); + return Objects.requireNonNull( userNames.get( name ) ); } catch ( NullPointerException e ) { - throw new UnknownUserException( userName ); + throw new UnknownUserException( name ); } } @@ -3558,11 +3379,11 @@ public CatalogUser getUser( String userName ) throws UnknownUserException { * {@inheritDoc} */ @Override - public CatalogUser getUser( int userId ) { + public CatalogUser getUser( long id ) { try { - return Objects.requireNonNull( users.get( userId ) ); + return Objects.requireNonNull( users.get( id ) ); } catch ( NullPointerException e ) { - throw new UnknownUserIdRuntimeException( userId ); + throw new UnknownUserIdRuntimeException( id ); } } @@ -3594,11 +3415,11 @@ public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterExcep * {@inheritDoc} */ @Override - public CatalogAdapter getAdapter( int adapterId ) { + public CatalogAdapter getAdapter( long id ) { try { - return Objects.requireNonNull( adapters.get( adapterId ) ); + return Objects.requireNonNull( adapters.get( id ) ); } catch ( NullPointerException e ) { - throw new UnknownAdapterIdRuntimeException( adapterId ); + throw new UnknownAdapterIdRuntimeException( id ); } } @@ -3607,8 +3428,8 @@ public CatalogAdapter getAdapter( int adapterId ) { * {@inheritDoc} */ @Override - public boolean checkIfExistsAdapter( int adapterId ) { - return adapters.containsKey( adapterId ); + public boolean checkIfExistsAdapter( long id ) { + return adapters.containsKey( id ); } @@ -3616,7 +3437,7 @@ public boolean checkIfExistsAdapter( int adapterId ) { * {@inheritDoc} */ @Override - public int addAdapter( String uniqueName, String adapterName, AdapterType type, Map settings ) { + public long addAdapter( String uniqueName, String adapterName, AdapterType type, Map settings ) { uniqueName = uniqueName.toLowerCase(); int id = adapterIdBuilder.getAndIncrement(); @@ -3640,7 +3461,7 @@ public int addAdapter( String uniqueName, String adapterName, AdapterType type, * {@inheritDoc} */ @Override - public void updateAdapterSettings( int adapterId, Map newSettings ) { + public void updateAdapterSettings( long adapterId, Map newSettings ) { CatalogAdapter old = getAdapter( adapterId ); Map temp = new HashMap<>(); newSettings.forEach( temp::put ); @@ -3657,11 +3478,11 @@ public void updateAdapterSettings( int adapterId, Map newSetting * {@inheritDoc} */ @Override - public void deleteAdapter( int adapterId ) { + public void deleteAdapter( long id ) { try { - CatalogAdapter adapter = Objects.requireNonNull( adapters.get( adapterId ) ); + CatalogAdapter adapter = Objects.requireNonNull( adapters.get( id ) ); synchronized ( this ) { - adapters.remove( adapterId ); + adapters.remove( id ); adapterNames.remove( adapter.uniqueName ); } try { @@ -3676,7 +3497,7 @@ public void deleteAdapter( int adapterId ) { } listeners.firePropertyChange( "adapter", adapter, null ); } catch ( NullPointerException e ) { - throw new UnknownAdapterIdRuntimeException( adapterId ); + throw new UnknownAdapterIdRuntimeException( id ); } } @@ -3708,11 +3529,11 @@ public CatalogQueryInterface getQueryInterface( String uniqueName ) throws Unkno * {@inheritDoc} */ @Override - public CatalogQueryInterface getQueryInterface( int ifaceId ) { + public CatalogQueryInterface getQueryInterface( long id ) { try { - return Objects.requireNonNull( queryInterfaces.get( ifaceId ) ); + return Objects.requireNonNull( queryInterfaces.get( id ) ); } catch ( NullPointerException e ) { - throw new UnknownQueryInterfaceRuntimeException( ifaceId ); + throw new UnknownQueryInterfaceRuntimeException( id ); } } @@ -3721,7 +3542,7 @@ public CatalogQueryInterface getQueryInterface( int ifaceId ) { * {@inheritDoc} */ @Override - public int addQueryInterface( String uniqueName, String clazz, Map settings ) { + public long addQueryInterface( String uniqueName, String clazz, Map settings ) { uniqueName = uniqueName.toLowerCase(); int id = queryInterfaceIdBuilder.getAndIncrement(); @@ -3745,11 +3566,11 @@ public int addQueryInterface( String uniqueName, String clazz, Map partitionIds = new ArrayList<>(); for ( int i = 0; i < numberOfInternalPartitions; i++ ) { @@ -3940,7 +3761,7 @@ public long addPartition( long tableId, long schemaId, long partitionGroupId, Li if ( log.isDebugEnabled() ) { log.debug( "Creating partition with id '{}'", id ); } - CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); + LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); CatalogPartition partition = new CatalogPartition( id, @@ -4622,27 +4443,6 @@ public long addGraphPlacement( int adapterId, long graphId ) { } - /** - * {@inheritDoc} - */ - @Override - public void updateGraphPlacementPhysicalNames( long graphId, int adapterId, String physicalGraphName ) { - if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { - throw new UnknownGraphPlacementsException( graphId, adapterId ); - } - - CatalogGraphPlacement old = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); - - CatalogGraphPlacement placement = old.replacePhysicalName( physicalGraphName ); - - synchronized ( this ) { - graphPlacements.replace( new Object[]{ graphId, adapterId }, placement ); - } - - listeners.firePropertyChange( "graphPlacement", old, placement ); - } - - /** * {@inheritDoc} */ @@ -4669,9 +4469,7 @@ public void deleteGraphPlacement( int adapterId, long graphId ) { private void deleteGraphPlacementLogistics( long graphId, int adapterId ) { - if ( !graphMappings.containsKey( graphId ) ) { - throw new UnknownGraphException( graphId ); - } + /* CatalogGraphMapping mapping = Objects.requireNonNull( graphMappings.get( graphId ) ); if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { throw new UnknownGraphPlacementsException( graphId, adapterId ); @@ -4681,7 +4479,7 @@ private void deleteGraphPlacementLogistics( long graphId, int adapterId ) { removeSingleDataPlacementFromTable( placement.adapterId, mapping.nodesId ); removeSingleDataPlacementFromTable( placement.adapterId, mapping.nodesPropertyId ); removeSingleDataPlacementFromTable( placement.adapterId, mapping.edgesId ); - removeSingleDataPlacementFromTable( placement.adapterId, mapping.edgesPropertyId ); + */ } diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java index 682a113e06..dcbcc81add 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java @@ -39,6 +39,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.Getter; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.mapdb.BTreeMap; import org.mapdb.DB; @@ -81,9 +82,9 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -127,6 +128,7 @@ import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.QueryInterfaceManager; import org.polypheny.db.languages.QueryLanguage; @@ -159,8 +161,8 @@ public class CatalogImplBackup extends Catalog { private static BTreeMap databaseNames; private static HTreeMap> databaseChildren; - private static BTreeMap schemas; - private static BTreeMap schemaNames; + private static BTreeMap schemas; + private static BTreeMap schemaNames; private static HTreeMap> schemaChildren; private static BTreeMap tables; @@ -523,7 +525,7 @@ public void restoreViews( Transaction transaction ) { AlgRoot mqlRel = mqlProcessor.translate( statement, mqlNode, - new ExtendedQueryParameters( query, NamespaceType.DOCUMENT, getSchema( defaultDatabaseId ).name ) ); + new ExtendedQueryParameters( query, NamespaceType.DOCUMENT, getNamespace( defaultDatabaseId ).name ) ); nodeInfo.put( c.id, mqlRel.alg ); break; } @@ -765,9 +767,9 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce long schemaId; if ( !schemaNames.containsKey( new Object[]{ "public" } ) ) { - schemaId = addNamespace( "public", 1, NamespaceType.getDefault() ); + schemaId = addNamespace( "public", NamespaceType.getDefault(), false ); } else { - schemaId = getSchema( "public" ).id; + schemaId = getNamespace( "public" ).id; } ////////////// @@ -827,7 +829,7 @@ public void restoreInterfacesIfNecessary() { * Initiates default columns for csv files */ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaException, UnknownTableException, GenericCatalogException, UnknownColumnException { - CatalogSchema schema = getSchema( "public" ); + LogicalNamespace schema = getNamespace( "public" ); LogicalTable depts = getTable( schema.id, "depts" ); addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); @@ -1038,9 +1040,9 @@ private CatalogDatabase getDatabase( long databaseId ) { * {@inheritDoc} */ @Override - public List getSchemas( Pattern schemaNamePattern ) { - if ( schemaNamePattern != null ) { - return schemaNames.values().stream().filter( s -> s.name.matches( schemaNamePattern.toRegex() ) ).collect( Collectors.toList() ); + public @NonNull List getNamespaces( Pattern name ) { + if ( name != null ) { + return schemaNames.values().stream().filter( s -> s.name.matches( name.toRegex() ) ).collect( Collectors.toList() ); } return new ArrayList<>(); } @@ -1050,11 +1052,11 @@ public List getSchemas( Pattern schemaNamePattern ) { * {@inheritDoc} */ @Override - public CatalogSchema getSchema( long schemaId ) { + public LogicalNamespace getNamespace( long id ) { try { - return Objects.requireNonNull( schemas.get( schemaId ) ); + return Objects.requireNonNull( schemas.get( id ) ); } catch ( NullPointerException e ) { - throw new UnknownSchemaIdRuntimeException( schemaId ); + throw new UnknownSchemaIdRuntimeException( id ); } } @@ -1063,7 +1065,7 @@ public CatalogSchema getSchema( long schemaId ) { * {@inheritDoc} */ @Override - public CatalogSchema getSchema( final String schemaName ) throws UnknownSchemaException { + public LogicalNamespace getNamespace( final String schemaName ) throws UnknownSchemaException { String name = schemaName.toLowerCase(); try { return Objects.requireNonNull( schemaNames.get( new Object[]{ name } ) ); @@ -1077,11 +1079,11 @@ public CatalogSchema getSchema( final String schemaName ) throws UnknownSchemaEx * {@inheritDoc} */ @Override - public long addNamespace( String name, int ownerId, NamespaceType namespaceType ) { + public long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ) { name = name.toLowerCase(); CatalogUser owner = getUser( ownerId ); long id = namespaceIdBuilder.getAndIncrement(); - CatalogSchema schema = new CatalogSchema( id, name, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); + LogicalNamespace schema = new LogicalNamespace( id, name, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); synchronized ( this ) { schemas.put( id, schema ); schemaNames.put( new Object[]{ name }, schema ); @@ -1096,9 +1098,9 @@ public long addNamespace( String name, int ownerId, NamespaceType namespaceType * {@inheritDoc} */ @Override - public boolean checkIfExistsSchema( String schemaName ) { - schemaName = schemaName.toLowerCase(); - return schemaNames.containsKey( new Object[]{ schemaName } ); + public boolean checkIfExistsNamespace( String name ) { + name = name.toLowerCase(); + return schemaNames.containsKey( new Object[]{ name } ); } @@ -1106,11 +1108,11 @@ public boolean checkIfExistsSchema( String schemaName ) { * {@inheritDoc} */ @Override - public void renameSchema( long schemaId, String name ) { + public void renameNamespace( long schemaId, String name ) { name = name.toLowerCase(); try { - CatalogSchema old = Objects.requireNonNull( schemas.get( schemaId ) ); - CatalogSchema schema = new CatalogSchema( old.id, name, old.ownerId, old.ownerName, old.namespaceType, false ); + LogicalNamespace old = Objects.requireNonNull( schemas.get( schemaId ) ); + LogicalNamespace schema = new LogicalNamespace( old.id, name, old.ownerId, old.ownerName, old.namespaceType, false ); synchronized ( this ) { schemas.replace( schemaId, schema ); @@ -1124,25 +1126,6 @@ public void renameSchema( long schemaId, String name ) { } - /** - * {@inheritDoc} - */ - @Override - public void setSchemaOwner( long schemaId, long ownerId ) { - try { - CatalogSchema old = Objects.requireNonNull( schemas.get( schemaId ) ); - CatalogSchema schema = new CatalogSchema( old.id, old.name, (int) ownerId, old.ownerName, old.namespaceType, false ); - synchronized ( this ) { - schemas.replace( schemaId, schema ); - schemaNames.replace( new Object[]{ schema.name }, schema ); - } - listeners.firePropertyChange( "schema", old, schema ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaIdRuntimeException( schemaId ); - } - } - - /** * {@inheritDoc} */ @@ -1152,7 +1135,7 @@ public long addGraph( String name, List stores, boolean modifiable, b throw new GraphAlreadyExistsException( name ); } - long id = addNamespace( name, Catalog.defaultUserId, NamespaceType.GRAPH ); + long id = addNamespace( name, NamespaceType.GRAPH, false ); LogicalGraph graph = new LogicalGraph( id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); @@ -1559,7 +1542,7 @@ private void removeGraphLogistics( long graphId ) { throw new UnknownGraphException( graphId ); } - deleteSchema( graphId ); + deleteNamespace( graphId ); } @@ -1619,8 +1602,8 @@ public List getGraphs( Pattern graphName ) { * {@inheritDoc} */ @Override - public void deleteSchema( long schemaId ) { - CatalogSchema schema = getSchema( schemaId ); + public void deleteNamespace( long schemaId ) { + LogicalNamespace schema = getNamespace( schemaId ); synchronized ( this ) { schemaNames.remove( new Object[]{ schema.name } ); @@ -1642,7 +1625,7 @@ public void deleteSchema( long schemaId ) { public List getTables( long schemaId, Pattern tableNamePattern ) { if ( schemas.containsKey( schemaId ) ) { - CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); + LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); if ( tableNamePattern != null ) { return Collections.singletonList( tableNames.get( new Object[]{ schemaId, tableNamePattern.pattern } ) ); } else { @@ -1659,12 +1642,12 @@ public List getTables( long schemaId, Pattern tableNamePattern ) { @Override public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { if ( schemaNamePattern != null && tableNamePattern != null ) { - CatalogSchema schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); + LogicalNamespace schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); if ( schema != null ) { return Collections.singletonList( Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableNamePattern.pattern } ) ) ); } } else if ( schemaNamePattern != null ) { - CatalogSchema schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); + LogicalNamespace schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); if ( schema != null ) { return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schema.id } ).values() ); } @@ -1695,7 +1678,7 @@ public LogicalTable getTable( long tableId ) { @Override public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { try { - CatalogSchema schema = getSchema( schemaId ); + LogicalNamespace schema = getNamespace( schemaId ); if ( !schema.caseSensitive ) { tableName = tableName.toLowerCase(); } @@ -1721,7 +1704,7 @@ public LogicalTable getTableFromPartition( long partitionId ) { @Override public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { try { - CatalogSchema schema = getSchema( schemaName ); + LogicalNamespace schema = getNamespace( schemaName ); if ( !schema.caseSensitive ) { tableName = tableName.toLowerCase(); } @@ -1739,7 +1722,7 @@ public LogicalTable getTable( String schemaName, String tableName ) throws Unkno @Override public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { long id = entityIdBuilder.getAndIncrement(); - CatalogSchema schema = getSchema( namespaceId ); + LogicalNamespace schema = getNamespace( namespaceId ); if ( !schema.caseSensitive ) { name = name.toLowerCase(); } @@ -1790,7 +1773,7 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent @Override public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { long id = entityIdBuilder.getAndIncrement(); - CatalogSchema schema = getSchema( namespaceId ); + LogicalNamespace schema = getNamespace( namespaceId ); if ( !schema.caseSensitive ) { name = name.toLowerCase(); @@ -1838,7 +1821,7 @@ public long addView( String name, long namespaceId, int ownerId, EntityType enti @Override public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { long id = entityIdBuilder.getAndIncrement(); - CatalogSchema schema = getSchema( namespaceId ); + LogicalNamespace schema = getNamespace( namespaceId ); if ( !schema.caseSensitive ) { name = name.toLowerCase(); @@ -1900,7 +1883,7 @@ public long addMaterializedView( String name, long namespaceId, int ownerId, Ent /** * Update all information after the addition of all kind of tables */ - private void updateEntityLogistics( String name, long namespaceId, long id, CatalogSchema schema, LogicalTable entity ) { + private void updateEntityLogistics( String name, long namespaceId, long id, LogicalNamespace schema, LogicalTable entity ) { synchronized ( this ) { tables.put( id, entity ); tableChildren.put( id, ImmutableList.builder().build() ); @@ -1960,7 +1943,7 @@ public void deleteViewDependencies( CatalogView catalogView ) { */ @Override public boolean checkIfExistsEntity( long namespaceId, String entityName ) { - CatalogSchema schema = getSchema( namespaceId ); + LogicalNamespace schema = getNamespace( namespaceId ); if ( !schema.caseSensitive ) { entityName = entityName.toLowerCase(); } @@ -1983,7 +1966,7 @@ public boolean checkIfExistsEntity( long tableId ) { @Override public void renameTable( long tableId, String name ) { LogicalTable old = getTable( tableId ); - if ( !getSchema( old.namespaceId ).caseSensitive ) { + if ( !getNamespace( old.namespaceId ).caseSensitive ) { name = name.toLowerCase(); } @@ -2108,36 +2091,6 @@ public void addColumnPlacement( int adapterId, long columnId, PlacementType plac } - /** - * {@inheritDoc} - */ - @Override - public void updatePartitionPlacementPhysicalNames( int adapterId, long partitionId, String physicalSchemaName, String physicalTableName ) { - try { - CatalogPartitionPlacement old = Objects.requireNonNull( partitionPlacements.get( new Object[]{ adapterId, partitionId } ) ); - CatalogPartitionPlacement placement = new CatalogPartitionPlacement( - old.namespaceId, - old.tableId, - old.adapterId, - old.adapterUniqueName, - old.placementType, - physicalSchemaName, - physicalTableName, - old.partitionId, - old.role ); - - synchronized ( this ) { - partitionPlacements.replace( new Object[]{ adapterId, partitionId }, placement ); - listeners.firePropertyChange( "partitionPlacement", old, placement ); - } - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getPartition( partitionId ); - throw new UnknownPartitionPlacementException( adapterId, partitionId ); - } - } - - /** * {@inheritDoc} */ @@ -2178,7 +2131,7 @@ public LogicalCollection getCollection( long id ) { @Override public List getCollections( long namespaceId, Pattern namePattern ) { if ( schemas.containsKey( namespaceId ) ) { - CatalogSchema schema = Objects.requireNonNull( schemas.get( namespaceId ) ); + LogicalNamespace schema = Objects.requireNonNull( schemas.get( namespaceId ) ); if ( namePattern != null ) { LogicalCollection collection = collectionNames.get( new Object[]{ namespaceId, namePattern.pattern } ); if ( collection == null ) { @@ -2203,7 +2156,7 @@ public long addCollection( Long id, String name, long schemaId, int currentUserI collectionId = id; } - CatalogSchema namespace = getSchema( schemaId ); + LogicalNamespace namespace = getNamespace( schemaId ); LogicalCollection collection = new LogicalCollection( Catalog.defaultDatabaseId, schemaId, @@ -2247,27 +2200,6 @@ public long addCollectionPlacement( long namespaceId, int adapterId, long collec } - /** - * {@inheritDoc} - */ - @Override - public void updateCollectionPartitionPhysicalNames( long namespaceId, long collectionId, int adapterId, String physicalNamespaceName, String namespaceName, String physicalCollectionName ) { - LogicalCollection old = getCollection( collectionId ); - if ( old == null ) { - throw new UnknownCollectionException( collectionId ); - } - - CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, physicalCollectionName, physicalNamespaceName, old.id ); - LogicalCollection collection = old.setPhysicalName( physicalCollectionName ); - synchronized ( this ) { - collections.replace( collectionId, collection ); - collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); - collectionPlacements.replace( new Object[]{ collectionId, adapterId }, placement ); - } - listeners.firePropertyChange( "collectionPlacements", old, collection ); - } - - /** * {@inheritDoc} */ @@ -2483,20 +2415,6 @@ public List getColumnPlacementsOnAdapterPerTable( int ad } - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsOnAdapterSortedByPhysicalPosition( int adapterId, long tableId ) { - final Comparator columnPlacementComparator = Comparator.comparingLong( p -> p.physicalPosition ); - return getColumnPlacementsOnAdapter( adapterId ) - .stream() - .filter( p -> p.tableId == tableId ) - .sorted( columnPlacementComparator ) - .collect( Collectors.toList() ); - } - - /** * {@inheritDoc} */ @@ -2547,15 +2465,6 @@ public ImmutableMap> getPartitionPlacementsByAdapte } - /** - * {@inheritDoc} - */ - @Override - public ImmutableMap> getPartitionGroupsByAdapter( long tableId ) { - return null; - } - - /** * {@inheritDoc} */ @@ -2586,7 +2495,7 @@ public List getColumnPlacementsOnAdapterAndSchema( int a return getColumnPlacementsOnAdapter( adapterId ).stream().filter( p -> Objects.requireNonNull( columns.get( p.columnId ) ).schemaId == schemaId ).collect( Collectors.toList() ); } catch ( NullPointerException e ) { getAdapter( adapterId ); - getSchema( schemaId ); + getNamespace( schemaId ); return new ArrayList<>(); } } @@ -2762,7 +2671,7 @@ public LogicalColumn getColumn( long columnId ) { public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { try { LogicalTable table = getTable( tableId ); - if ( !getSchema( table.namespaceId ).caseSensitive ) { + if ( !getNamespace( table.namespaceId ).caseSensitive ) { columnName = columnName.toLowerCase(); } return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); @@ -2793,7 +2702,7 @@ public LogicalColumn getColumn( String schemaName, String tableName, String colu public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { LogicalTable table = getTable( tableId ); - if ( !getSchema( table.namespaceId ).caseSensitive ) { + if ( !getNamespace( table.namespaceId ).caseSensitive ) { name = name.toLowerCase(); } @@ -2851,7 +2760,7 @@ public long addColumn( String name, long tableId, int position, PolyType type, P public void renameColumn( long columnId, String name ) { LogicalColumn old = getColumn( columnId ); - if ( !getSchema( old.schemaId ).caseSensitive ) { + if ( !getNamespace( old.schemaId ).caseSensitive ) { name = name.toLowerCase(); } @@ -3545,11 +3454,11 @@ public void deleteConstraint( long constraintId ) throws GenericCatalogException * {@inheritDoc} */ @Override - public CatalogUser getUser( String userName ) throws UnknownUserException { + public CatalogUser getUser( String name ) throws UnknownUserException { try { - return Objects.requireNonNull( userNames.get( userName ) ); + return Objects.requireNonNull( userNames.get( name ) ); } catch ( NullPointerException e ) { - throw new UnknownUserException( userName ); + throw new UnknownUserException( name ); } } @@ -3558,11 +3467,11 @@ public CatalogUser getUser( String userName ) throws UnknownUserException { * {@inheritDoc} */ @Override - public CatalogUser getUser( int userId ) { + public CatalogUser getUser( long id ) { try { - return Objects.requireNonNull( users.get( userId ) ); + return Objects.requireNonNull( users.get( id ) ); } catch ( NullPointerException e ) { - throw new UnknownUserIdRuntimeException( userId ); + throw new UnknownUserIdRuntimeException( id ); } } @@ -3594,11 +3503,11 @@ public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterExcep * {@inheritDoc} */ @Override - public CatalogAdapter getAdapter( int adapterId ) { + public CatalogAdapter getAdapter( long id ) { try { - return Objects.requireNonNull( adapters.get( adapterId ) ); + return Objects.requireNonNull( adapters.get( id ) ); } catch ( NullPointerException e ) { - throw new UnknownAdapterIdRuntimeException( adapterId ); + throw new UnknownAdapterIdRuntimeException( id ); } } @@ -3607,8 +3516,8 @@ public CatalogAdapter getAdapter( int adapterId ) { * {@inheritDoc} */ @Override - public boolean checkIfExistsAdapter( int adapterId ) { - return adapters.containsKey( adapterId ); + public boolean checkIfExistsAdapter( long id ) { + return adapters.containsKey( id ); } @@ -3616,7 +3525,7 @@ public boolean checkIfExistsAdapter( int adapterId ) { * {@inheritDoc} */ @Override - public int addAdapter( String uniqueName, String adapterName, AdapterType type, Map settings ) { + public long addAdapter( String uniqueName, String adapterName, AdapterType type, Map settings ) { uniqueName = uniqueName.toLowerCase(); int id = adapterIdBuilder.getAndIncrement(); @@ -3640,7 +3549,7 @@ public int addAdapter( String uniqueName, String adapterName, AdapterType type, * {@inheritDoc} */ @Override - public void updateAdapterSettings( int adapterId, Map newSettings ) { + public void updateAdapterSettings( long adapterId, Map newSettings ) { CatalogAdapter old = getAdapter( adapterId ); Map temp = new HashMap<>(); newSettings.forEach( temp::put ); @@ -3657,11 +3566,11 @@ public void updateAdapterSettings( int adapterId, Map newSetting * {@inheritDoc} */ @Override - public void deleteAdapter( int adapterId ) { + public void deleteAdapter( long id ) { try { - CatalogAdapter adapter = Objects.requireNonNull( adapters.get( adapterId ) ); + CatalogAdapter adapter = Objects.requireNonNull( adapters.get( id ) ); synchronized ( this ) { - adapters.remove( adapterId ); + adapters.remove( id ); adapterNames.remove( adapter.uniqueName ); } try { @@ -3676,7 +3585,7 @@ public void deleteAdapter( int adapterId ) { } listeners.firePropertyChange( "adapter", adapter, null ); } catch ( NullPointerException e ) { - throw new UnknownAdapterIdRuntimeException( adapterId ); + throw new UnknownAdapterIdRuntimeException( id ); } } @@ -3708,11 +3617,11 @@ public CatalogQueryInterface getQueryInterface( String uniqueName ) throws Unkno * {@inheritDoc} */ @Override - public CatalogQueryInterface getQueryInterface( int ifaceId ) { + public CatalogQueryInterface getQueryInterface( long id ) { try { - return Objects.requireNonNull( queryInterfaces.get( ifaceId ) ); + return Objects.requireNonNull( queryInterfaces.get( id ) ); } catch ( NullPointerException e ) { - throw new UnknownQueryInterfaceRuntimeException( ifaceId ); + throw new UnknownQueryInterfaceRuntimeException( id ); } } @@ -3721,7 +3630,7 @@ public CatalogQueryInterface getQueryInterface( int ifaceId ) { * {@inheritDoc} */ @Override - public int addQueryInterface( String uniqueName, String clazz, Map settings ) { + public long addQueryInterface( String uniqueName, String clazz, Map settings ) { uniqueName = uniqueName.toLowerCase(); int id = queryInterfaceIdBuilder.getAndIncrement(); @@ -3745,11 +3654,11 @@ public int addQueryInterface( String uniqueName, String clazz, Map partitionIds = new ArrayList<>(); for ( int i = 0; i < numberOfInternalPartitions; i++ ) { @@ -3940,7 +3849,7 @@ public long addPartition( long tableId, long schemaId, long partitionGroupId, Li if ( log.isDebugEnabled() ) { log.debug( "Creating partition with id '{}'", id ); } - CatalogSchema schema = Objects.requireNonNull( schemas.get( schemaId ) ); + LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); CatalogPartition partition = new CatalogPartition( id, @@ -4622,27 +4531,6 @@ public long addGraphPlacement( int adapterId, long graphId ) { } - /** - * {@inheritDoc} - */ - @Override - public void updateGraphPlacementPhysicalNames( long graphId, int adapterId, String physicalGraphName ) { - if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { - throw new UnknownGraphPlacementsException( graphId, adapterId ); - } - - CatalogGraphPlacement old = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); - - CatalogGraphPlacement placement = old.replacePhysicalName( physicalGraphName ); - - synchronized ( this ) { - graphPlacements.replace( new Object[]{ graphId, adapterId }, placement ); - } - - listeners.firePropertyChange( "graphPlacement", old, placement ); - } - - /** * {@inheritDoc} */ diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogInfoPage.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogInfoPage.java index 54d9ab3bfc..b12151079b 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogInfoPage.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogInfoPage.java @@ -162,7 +162,7 @@ private void resetCatalogInformation() { catalog.getDatabases( null ).forEach( d -> { databaseInformation.addRow( d.id, d.name, d.defaultNamespaceId ); } ); - catalog.getSchemas( null ).forEach( s -> { + catalog.getNamespaces( null ).forEach( s -> { namespaceInformation.addRow( s.id, s.name, s.databaseId, s.namespaceType, s.caseSensitive ); } ); catalog.getTables( null, null ).forEach( t -> { diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index a7d11eb292..ca748f959d 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -38,8 +38,8 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -93,8 +93,8 @@ public void testLayout() throws UnknownDatabaseException, UnknownSchemaException CatalogDatabase database = catalog.getDatabase( "test_db" ); assertEquals( databaseId, database.id ); - long schemaId = catalog.addNamespace( "test_schema", userId, NamespaceType.RELATIONAL ); - CatalogSchema schema = catalog.getSchema( databaseId, "test_schema" ); + long schemaId = catalog.addNamespace( "test_schema", NamespaceType.RELATIONAL, false ); + LogicalNamespace schema = catalog.getSchema( databaseId, "test_schema" ); assertEquals( schemaId, schema.id ); long tableId = catalog.addTable( "test_table", schemaId, userId, EntityType.ENTITY, true ); @@ -142,22 +142,22 @@ public void testSchema() throws UnknownSchemaException { // test adding of schema for ( String name : names ) { - ids.add( catalog.addNamespace( name, userId, NamespaceType.RELATIONAL ) ); + ids.add( catalog.addNamespace( name, NamespaceType.RELATIONAL, false ) ); } - assertEquals( catalog.getSchemas( databaseId, null ).stream().map( s -> s.name ).collect( Collectors.toList() ), names ); + assertEquals( catalog.getNamespaces( databaseId, null ).stream().map( s -> s.name ).collect( Collectors.toList() ), names ); // test renaming of schema String replacedName = "newName"; Long id = ids.get( 0 ); - catalog.renameSchema( id, replacedName ); + catalog.renameNamespace( id, replacedName ); names.remove( 0 ); names.add( 0, replacedName.toLowerCase() ); - assertEquals( catalog.getSchemas( databaseId, null ).stream().map( s -> s.name ).collect( Collectors.toList() ), names ); + assertEquals( catalog.getNamespaces( databaseId, null ).stream().map( s -> s.name ).collect( Collectors.toList() ), names ); // test changing owner of schema int newUserId = catalog.addUser( "newUser", "" ); - catalog.setSchemaOwner( id, newUserId ); + catalog.setNamespaceOwner( id, newUserId ); assertEquals( catalog.getSchema( databaseId, replacedName ).ownerId, newUserId ); } @@ -170,7 +170,7 @@ public void testTable() throws GenericCatalogException { long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); - long schemaId = catalog.addNamespace( "schema1", userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "schema1", NamespaceType.RELATIONAL, false ); List names = new ArrayList<>( Arrays.asList( "table1", "table2", "table3", "table4", "table5" ) ); List ids = new ArrayList<>(); @@ -232,7 +232,7 @@ public void testColumn() throws GenericCatalogException { long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); - long schemaId = catalog.addNamespace( "schema1", userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "schema1", NamespaceType.RELATIONAL, false ); long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); @@ -313,7 +313,7 @@ public void testColumnPlacement() throws UnknownAdapterException { CatalogUser user = catalog.getUser( userId ); long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); - long schemaId = catalog.addNamespace( "schema1", userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "schema1", NamespaceType.RELATIONAL, false ); long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); long columnId = catalog.addColumn( "column1", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); @@ -344,7 +344,7 @@ public void testKey() throws GenericCatalogException { CatalogUser user = catalog.getUser( userId ); long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); - long schemaId = catalog.addNamespace( "schema1", userId, NamespaceType.RELATIONAL ); + long schemaId = catalog.addNamespace( "schema1", NamespaceType.RELATIONAL, false ); long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); long columnId1 = catalog.addColumn( "column1", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java index 5345bb60f2..f18bf46d62 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java @@ -18,9 +18,9 @@ import java.util.List; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -49,12 +49,12 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); String database = ((MqlQueryParameters) parameters).getDatabase(); - if ( catalog.getSchemas( Catalog.defaultDatabaseId, new Pattern( database ) ).size() != 1 ) { + if ( catalog.getNamespaces( Catalog.defaultDatabaseId, new Pattern( database ) ).size() != 1 ) { // dropping a document database( Polyschema ), which does not exist, which is a no-op return; } - CatalogSchema namespace = catalog.getSchemas( Catalog.defaultDatabaseId, new Pattern( database ) ).get( 0 ); + LogicalNamespace namespace = catalog.getNamespaces( Catalog.defaultDatabaseId, new Pattern( database ) ).get( 0 ); List collections = catalog.getCollections( namespace.id, new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { // dropping a collection, which does not exist, which is a no-op diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index e112167965..20d8ed9096 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -19,7 +19,7 @@ import java.util.List; import java.util.Optional; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -58,7 +58,7 @@ public void execute( Context context, Statement statement, QueryParameters param String database = ((MqlQueryParameters) parameters).getDatabase(); try { - CatalogSchema schema = catalog.getSchema( Catalog.defaultDatabaseId, database ); + LogicalNamespace schema = catalog.getSchema( Catalog.defaultDatabaseId, database ); List tables = catalog.getTables( schema.id, null ); if ( dropTarget ) { diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 9f8e92c7ea..14cba71ae1 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -62,9 +62,9 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.languages.QueryParameters; diff --git a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java index ad8fa91405..b6dbc6027d 100644 --- a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java +++ b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java @@ -17,21 +17,21 @@ package org.polypheny.db.mql.mql2alg; import org.polypheny.db.catalog.MockCatalog; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.logistic.NamespaceType; public class MqlMockCatalog extends MockCatalog { @Override - public CatalogSchema getSchema( long schemaId ) { - return new CatalogSchema( 1, "private", 0, 0, "tester", NamespaceType.DOCUMENT, true ); + public LogicalNamespace getNamespace( long id ) { + return new LogicalNamespace( 1, "private", 0, 0, "tester", NamespaceType.DOCUMENT, true ); } @Override - public CatalogUser getUser( int userId ) { + public CatalogUser getUser( long id ) { return new CatalogUser( 0, "name", "name" ); } diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java index 701b53967a..8ba49669e1 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java @@ -43,7 +43,7 @@ import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.Context; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java index 3e9d6452bd..caa8ba4fd2 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java @@ -38,8 +38,8 @@ public CatalogPlugin( PluginWrapper wrapper ) { public void start() { PolyCatalog catalog = new PolyCatalog(); - catalog.addNamespace( "test", NamespaceType.RELATIONAL ); - long namespaceId = catalog.addNamespace( "test2", NamespaceType.RELATIONAL ); + catalog.addNamespace( "test", NamespaceType.RELATIONAL, false ); + long namespaceId = catalog.addNamespace( "test2", NamespaceType.RELATIONAL, false ); long tableId = catalog.addTable( "testTable", namespaceId ); catalog.addColumn( "testColumn", namespaceId, tableId, null ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java index 8cc417b51d..c589d42435 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java @@ -31,14 +31,45 @@ public class IdBuilder { private final AtomicLong verticalId; private final AtomicLong horizontalId; + private final AtomicLong indexId; + + private final AtomicLong keyId; + + private final AtomicLong adapterId; + + private final AtomicLong interfaceId; public IdBuilder() { - this( new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ) ); + this( + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ), + new AtomicLong( 0 ) ); } - public IdBuilder( AtomicLong snapshotId, AtomicLong databaseId, AtomicLong namespaceId, AtomicLong entityId, AtomicLong fieldId, AtomicLong userId, AtomicLong verticalId, AtomicLong horizontalId ) { + public IdBuilder( + AtomicLong snapshotId, + AtomicLong databaseId, + AtomicLong namespaceId, + AtomicLong entityId, + AtomicLong fieldId, + AtomicLong userId, + AtomicLong verticalId, + AtomicLong horizontalId, + AtomicLong indexId, + AtomicLong keyId, + AtomicLong adapterId, + AtomicLong interfaceId ) { this.snapshotId = snapshotId; this.databaseId = databaseId; @@ -46,9 +77,14 @@ public IdBuilder( AtomicLong snapshotId, AtomicLong databaseId, AtomicLong names this.entityId = entityId; this.fieldId = fieldId; + this.indexId = indexId; + this.keyId = keyId; this.userId = userId; this.verticalId = verticalId; this.horizontalId = horizontalId; + + this.adapterId = adapterId; + this.interfaceId = interfaceId; } @@ -91,4 +127,24 @@ public long getNewHorizontalId() { return horizontalId.getAndIncrement(); } + + public long getNewIndexId() { + return indexId.getAndIncrement(); + } + + + public long getNewKeyId() { + return keyId.getAndIncrement(); + } + + + public long getNewAdapterId() { + return adapterId.getAndIncrement(); + } + + + public long getNewInterfaceId() { + return interfaceId.getAndIncrement(); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java index 756a40ad66..7092e8037c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java @@ -52,5 +52,4 @@ default T unwrap( Class clazz ) { return clazz.cast( this ); } - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index df93627dd4..52b8556fc4 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -16,21 +16,38 @@ package org.polypheny.db.catalog; +import com.google.common.collect.ImmutableMap; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; +import java.beans.PropertyChangeSupport; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; import lombok.Getter; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entities.CatalogUser; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.catalogs.LogicalCatalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.entity.CatalogQueryInterface; +import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logical.document.DocumentCatalog; import org.polypheny.db.catalog.logical.graph.GraphCatalog; import org.polypheny.db.catalog.logical.relational.RelationalCatalog; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.FullSnapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; +import org.polypheny.db.transaction.Transaction; /** @@ -41,32 +58,44 @@ * Field -> Column (Relational), does not exist (Graph), Field (Document) */ @Slf4j -public class PolyCatalog implements Serializable { +public class PolyCatalog extends Catalog implements Serializable { @Getter public final BinarySerializer serializer = Serializable.builder.get().build( PolyCatalog.class ); @Serialize - public final Map catalogs; + public final Map catalogs; @Serialize public final Map users; + @Serialize + public final Map adapters; + + @Serialize + public final Map interfaces; + private final IdBuilder idBuilder = new IdBuilder(); private FullSnapshot fullSnapshot; + protected final PropertyChangeSupport listeners = new PropertyChangeSupport( this ); + public PolyCatalog() { - this( new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); + this( new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); } public PolyCatalog( @Deserialize("users") Map users, - @Deserialize("catalogs") Map catalogs ) { + @Deserialize("catalogs") Map catalogs, + @Deserialize("adapters") Map adapters, + @Deserialize("interfaces") Map interfaces ) { this.users = users; this.catalogs = catalogs; + this.adapters = adapters; + this.interfaces = interfaces; updateSnapshot(); } @@ -76,6 +105,11 @@ private void updateSnapshot() { } + private void change() { + // empty for now + } + + public void commit() { log.debug( "commit" ); updateSnapshot(); @@ -87,52 +121,232 @@ public void rollback() { } - public long addUser( @NonNull String name ) { - long id = idBuilder.getNewUserId(); + @Override + @Deprecated + public Map getNodeInfo() { + return null; + } - users.put( id, new CatalogUser( id, name ) ); - return id; + @Override + @Deprecated + public void restoreInterfacesIfNecessary() { + + } + + + @Override + @Deprecated + public void validateColumns() { + + } + + + @Override + @Deprecated + public void restoreColumnPlacements( Transaction transaction ) { + + } + + + @Override + @Deprecated + public void restoreViews( Transaction transaction ) { + + } + + + @Override + public int addUser( String name, String password ) { + return 0; } - public long addNamespace( String name, NamespaceType namespaceType ) { + public long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ) { long id = idBuilder.getNewNamespaceId(); + LogicalNamespace namespace = new LogicalNamespace( id, name, namespaceType, caseSensitive ); switch ( namespaceType ) { case RELATIONAL: - catalogs.put( id, new RelationalCatalog( id, name ) ); + catalogs.put( id, new RelationalCatalog( namespace, idBuilder ) ); break; case DOCUMENT: - catalogs.put( id, new DocumentCatalog( id, name ) ); + catalogs.put( id, new DocumentCatalog( namespace, idBuilder ) ); break; case GRAPH: - catalogs.put( id, new GraphCatalog( id, name ) ); + catalogs.put( id, new GraphCatalog( namespace, idBuilder ) ); break; } - + change(); return id; } - public long addTable( String name, long namespaceId ) { - long id = idBuilder.getNewEntityId(); + @Override + public @NonNull List getNamespaces( Pattern name ) { + return catalogs.values().stream().filter( c -> + c.getLogicalNamespace().caseSensitive + ? c.getLogicalNamespace().name.toLowerCase( Locale.ROOT ).matches( name.pattern ) + : c.getLogicalNamespace().name.matches( name.pattern ) ) + .map( LogicalCatalog::getLogicalNamespace ).collect( Collectors.toList() ); + } + + + @Override + public LogicalNamespace getNamespace( long id ) { + return catalogs.get( id ).getLogicalNamespace(); + } + + + @Override + public LogicalNamespace getNamespace( String name ) throws UnknownSchemaException { + List namespaces = getNamespaces( Pattern.of( name ) ); + if ( namespaces.isEmpty() ) { + return null; + } else if ( namespaces.size() > 1 ) { + throw new RuntimeException( "multiple namespaces retrieved" ); + } + return namespaces.get( 0 ); + + } - catalogs.get( namespaceId ).asRelational().addTable( id, name ); + @Override + public boolean checkIfExistsNamespace( String name ) { + return !getNamespaces( Pattern.of( name ) ).isEmpty(); + } + + + @Override + public void renameNamespace( long id, String name ) { + if ( catalogs.get( id ) == null ) { + return; + } + catalogs.get( id ).withLogicalNamespace( catalogs.get( id ).getLogicalNamespace().withName( name ) ); + + change(); + } + + + @Override + public void deleteNamespace( long id ) { + catalogs.remove( id ); + + change(); + } + + + @Override + public CatalogUser getUser( String name ) throws UnknownUserException { + return users.values().stream().filter( u -> u.name.equals( name ) ).findFirst().orElse( null ); + } + + + @Override + public CatalogUser getUser( long id ) { + return users.get( id ); + } + + + @Override + public List getAdapters() { + return List.copyOf( adapters.values() ); + } + + + @Override + public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { + return adapters.values().stream().filter( a -> a.uniqueName.equals( uniqueName ) ).findFirst().orElse( null ); + } + + + @Override + public CatalogAdapter getAdapter( long id ) { + return adapters.get( id ); + } + + + @Override + public boolean checkIfExistsAdapter( long id ) { + return adapters.containsKey( id ); + } + + + @Override + public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { + long id = idBuilder.getNewAdapterId(); + adapters.put( id, new CatalogAdapter( id, uniqueName, clazz, type, settings ) ); return id; } - public long addColumn( String name, long namespaceId, long entityId, AlgDataType type ) { - long id = idBuilder.getNewFieldId(); + @Override + public void updateAdapterSettings( long adapterId, Map newSettings ) { + if ( !adapters.containsKey( adapterId ) ) { + return; + } + adapters.put( adapterId, adapters.get( adapterId ).withSettings( ImmutableMap.copyOf( newSettings ) ) ); + } - catalogs.get( namespaceId ).asRelational().addColumn( id, name, entityId, type ); + + @Override + public void deleteAdapter( long id ) { + adapters.remove( id ); + } + + + @Override + public List getQueryInterfaces() { + return List.copyOf( interfaces.values() ); + } + + + @Override + public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { + return interfaces.values().stream().filter( i -> i.name.equals( uniqueName ) ).findFirst().orElse( null ); + } + + + @Override + public CatalogQueryInterface getQueryInterface( long id ) { + return interfaces.get( id ); + } + + + @Override + public long addQueryInterface( String uniqueName, String clazz, Map settings ) { + long id = idBuilder.getNewInterfaceId(); + + interfaces.put( id, new CatalogQueryInterface( id, uniqueName, clazz, settings ) ); return id; } + @Override + public void deleteQueryInterface( long id ) { + interfaces.remove( id ); + } + + + @Override + public void close() { + log.error( "closing" ); + } + + + @Override + public void clear() { + log.error( "clearing" ); + } + + + @Override + public Snapshot getSnapshot( long id ) { + return null; + } + + @Override public PolyCatalog copy() { return deserialize( serialize(), PolyCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogDatabase.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogDatabase.java deleted file mode 100644 index e07c656b44..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/entities/CatalogDatabase.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.entities; - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; - -public class CatalogDatabase { - - @Serialize - public final long id; - - @Serialize - public final String name; - - - public CatalogDatabase( - @Deserialize("id") long id, - @Deserialize("name") String name ) { - this.id = id; - this.name = name; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java deleted file mode 100644 index 0b057f53de..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogCollection.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.logical.document; - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import lombok.Value; - -@Value -public class CatalogCollection { - - @Serialize - public long id; - - @Serialize - public String name; - - - public CatalogCollection( - @Deserialize("id") long id, - @Deserialize("name") String name ) { - this.id = id; - this.name = name; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogDatabase.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogDatabase.java deleted file mode 100644 index 3402e11703..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/CatalogDatabase.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.logical.document; - -import com.google.common.collect.ImmutableMap; -import java.util.Map; -import lombok.Value; - -@Value -public class CatalogDatabase { - - public String name; - public long id; - public ImmutableMap collections; - - - public CatalogDatabase( long id, String name, Map collections ) { - this.id = id; - this.name = name; - this.collections = ImmutableMap.copyOf( collections ); - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java index e1433751f5..dfaad81503 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java @@ -19,43 +19,52 @@ import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.Value; +import lombok.With; import lombok.experimental.NonFinal; -import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; @Value -public class DocumentCatalog implements NCatalog, Serializable { +@With +public class DocumentCatalog implements NCatalog, Serializable, LogicalDocumentCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( DocumentCatalog.class ); @Serialize - public Map collections; - + public IdBuilder idBuilder; @Serialize - public String name; - + public Map collections; + @Getter @Serialize - public long id; + public LogicalNamespace logicalNamespace; - public DocumentCatalog( long id, String name ) { - this( id, name, new ConcurrentHashMap<>() ); + public DocumentCatalog( LogicalNamespace logicalNamespace, IdBuilder idBuilder ) { + this( logicalNamespace, idBuilder, new ConcurrentHashMap<>() ); } public DocumentCatalog( - @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("collections") Map collections ) { + @Deserialize("logicalNamespace") LogicalNamespace logicalNamespace, + @Deserialize("idBuilder") IdBuilder idBuilder, + @Deserialize("collections") Map collections ) { + this.logicalNamespace = logicalNamespace; this.collections = collections; - this.id = id; - this.name = name; + + this.idBuilder = idBuilder; } @@ -98,4 +107,40 @@ public DocumentCatalog copy() { return deserialize( serialize(), DocumentCatalog.class ); } + + @Override + public boolean checkIfExistsEntity( long namespaceId, String entityName ) { + return false; + } + + + @Override + public boolean checkIfExistsEntity( long tableId ) { + return false; + } + + + @Override + public LogicalCollection getCollection( long collectionId ) { + return null; + } + + + @Override + public List getCollections( long namespaceId, Pattern namePattern ) { + return null; + } + + + @Override + public long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ) { + return 0; + } + + + @Override + public void deleteCollection( long id ) { + + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java deleted file mode 100644 index 516dec90e1..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/CatalogGraph.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.logical.graph; - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; - -public class CatalogGraph { - - @Serialize - public final long id; - - @Serialize - public final String name; - - public CatalogGraph( - @Deserialize("id") long id, - @Deserialize("name") String name ) { - this.id = id; - this.name = name; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java index db4ffaf91e..f06fe4e3a8 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java @@ -17,37 +17,43 @@ package org.polypheny.db.catalog.logical.graph; import io.activej.serializer.BinarySerializer; -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; +import java.util.List; import lombok.Getter; import lombok.Value; +import lombok.With; import lombok.experimental.NonFinal; -import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; @Value -public class GraphCatalog implements NCatalog, Serializable { +@With +public class GraphCatalog implements NCatalog, Serializable, LogicalGraphCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( GraphCatalog.class ); + @Getter + public LogicalNamespace logicalNamespace; + public IdBuilder idBuilder; - @Serialize - public long id; - - @Serialize - public String name; @NonFinal boolean openChanges = false; - public GraphCatalog( - @Deserialize("id") long id, - @Deserialize("name") String name ) { + public GraphCatalog( LogicalNamespace logicalNamespace, IdBuilder idBuilder ) { - this.id = id; - this.name = name; + this.logicalNamespace = logicalNamespace; + this.idBuilder = idBuilder; } @@ -81,4 +87,58 @@ public GraphCatalog copy() { return deserialize( serialize(), GraphCatalog.class ); } + + @Override + public boolean checkIfExistsEntity( long namespaceId, String entityName ) { + return false; + } + + + @Override + public boolean checkIfExistsEntity( long tableId ) { + return false; + } + + + @Override + public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { + + } + + + @Override + public void removeGraphAlias( long graphId, String alias, boolean ifExists ) { + + } + + + @Override + public long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { + return 0; + } + + + @Override + public void deleteGraph( long id ) { + + } + + + @Override + public LogicalGraph getGraph( long id ) { + return null; + } + + + @Override + public List getGraphs( Pattern graphName ) { + return null; + } + + + @Override + public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException { + + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java index b629b3fe45..750a14475a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java @@ -19,18 +19,55 @@ import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; +import java.beans.PropertyChangeSupport; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; import lombok.Getter; import lombok.Value; +import lombok.With; import lombok.experimental.NonFinal; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; +import org.polypheny.db.catalog.entity.CatalogConstraint; +import org.polypheny.db.catalog.entity.CatalogForeignKey; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; +import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownConstraintException; +import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; +import org.polypheny.db.catalog.exceptions.UnknownIndexException; +import org.polypheny.db.catalog.exceptions.UnknownIndexIdRuntimeException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.IndexType; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.type.PolyType; @Value -public class RelationalCatalog implements NCatalog, Serializable { +@With +public class RelationalCatalog implements NCatalog, Serializable, LogicalRelationalCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( RelationalCatalog.class ); @@ -38,29 +75,46 @@ public class RelationalCatalog implements NCatalog, Serializable { @Serialize public Map tables; + @Getter + public LogicalNamespace logicalNamespace; + @Serialize - public long id; + public Map indexes; + + @Serialize + public Map keys; + + public Map keyColumns; @Serialize - public String name; + public IdBuilder idBuilder; @NonFinal boolean openChanges = false; + PropertyChangeSupport listeners = new PropertyChangeSupport( this ); + public RelationalCatalog( - @Deserialize("id") long id, - @Deserialize("name") String name, - @Deserialize("tables") Map tables ) { - this.id = id; - this.name = name; + @Deserialize("logicalNamespace") LogicalNamespace logicalNamespace, + @Deserialize("idBuilder") IdBuilder idBuilder, + @Deserialize("tables") Map tables, + @Deserialize("indexes") Map indexes, + @Deserialize("keys") Map keys, + @Deserialize("keyColumns") Map keyColumns ) { + this.logicalNamespace = logicalNamespace; this.tables = new HashMap<>( tables ); + this.indexes = indexes; + this.keys = keys; + this.keyColumns = keyColumns; + + this.idBuilder = idBuilder; } - public RelationalCatalog( long id, String name ) { - this( id, name, new HashMap<>() ); + public RelationalCatalog( LogicalNamespace namespace, IdBuilder idBuilder ) { + this( namespace, idBuilder, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); } @@ -100,4 +154,457 @@ public RelationalCatalog copy() { return deserialize( serialize(), RelationalCatalog.class ); } + + @Override + public boolean checkIfExistsEntity( long namespaceId, String entityName ) { + return false; + } + + + @Override + public boolean checkIfExistsEntity( long tableId ) { + return false; + } + + + @Override + public List getTables( long schemaId, Pattern tableNamePattern ) { + return null; + } + + + @Override + public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { + return null; + } + + + @Override + public LogicalTable getTable( long tableId ) { + return null; + } + + + @Override + public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { + return null; + } + + + @Override + public LogicalTable getTableFromPartition( long partitionId ) { + return null; + } + + + @Override + public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { + return 0; + } + + + @Override + public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { + return 0; + } + + + @Override + public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { + return 0; + } + + + @Override + public void renameTable( long tableId, String name ) { + + } + + + @Override + public void deleteTable( long tableId ) { + + } + + + @Override + public void setTableOwner( long tableId, int ownerId ) { + + } + + + @Override + public void setPrimaryKey( long tableId, Long keyId ) { + + } + + + @Override + public List getIndexes( CatalogKey key ) { + return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); + } + + + @Override + public List getForeignKeys( CatalogKey key ) { + return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); + } + + + @Override + public List getIndexes( long tableId, boolean onlyUnique ) { + if ( !onlyUnique ) { + return indexes.values().stream().filter( i -> i.key.tableId == tableId ).collect( Collectors.toList() ); + } else { + return indexes.values().stream().filter( i -> i.key.tableId == tableId && i.unique ).collect( Collectors.toList() ); + } + } + + + @Override + public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { + try { + return indexes.values().stream() + .filter( i -> i.key.tableId == tableId && i.name.equals( indexName ) ) + .findFirst() + .orElseThrow( NullPointerException::new ); + } catch ( NullPointerException e ) { + throw new UnknownIndexException( tableId, indexName ); + } + } + + + @Override + public boolean checkIfExistsIndex( long tableId, String indexName ) { + try { + getIndex( tableId, indexName ); + return true; + } catch ( UnknownIndexException e ) { + return false; + } + } + + + @Override + public CatalogIndex getIndex( long indexId ) { + try { + return Objects.requireNonNull( indexes.get( indexId ) ); + } catch ( NullPointerException e ) { + throw new UnknownIndexIdRuntimeException( indexId ); + } + } + + + @Override + public List getIndexes() { + return new ArrayList<>( indexes.values() ); + } + + + @Override + public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException { + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); + if ( unique ) { + // TODO: Check if the current values are unique + } + long id = idBuilder.getNewIndexId(); + synchronized ( this ) { + indexes.put( id, new CatalogIndex( + id, + indexName, + unique, + method, + methodDisplayName, + type, + location, + keyId, + Objects.requireNonNull( keys.get( keyId ) ), + null ) ); + } + listeners.firePropertyChange( "index", null, keyId ); + return id; + } + + + private long getOrAddKey( long tableId, List columnIds, EnforcementTime enforcementTime ) { + Long keyId = keyColumns.get( columnIds.stream().mapToLong( Long::longValue ).toArray() ); + if ( keyId != null ) { + return keyId; + } + try { + return addKey( tableId, columnIds, enforcementTime ); + } catch ( GenericCatalogException e ) { + throw new RuntimeException( e ); + } + } + + + private long addKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { + try { + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); + long id = idBuilder.getNewKeyId(); + CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); + synchronized ( this ) { + keys.put( id, key ); + keyColumns.put( columnIds.stream().mapToLong( Long::longValue ).toArray(), id ); + } + listeners.firePropertyChange( "key", null, key ); + return id; + } catch ( NullPointerException e ) { + throw new GenericCatalogException( e ); + } + } + + + @Override + public void setIndexPhysicalName( long indexId, String physicalName ) { + + } + + + @Override + public void deleteIndex( long indexId ) { + + } + + + @Override + public List getKeys() { + return null; + } + + + @Override + public List getTableKeys( long tableId ) { + return null; + } + + + @Override + public List getColumns( long tableId ) { + return null; + } + + + @Override + public List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { + return null; + } + + + @Override + public LogicalColumn getColumn( long columnId ) { + return null; + } + + + @Override + public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { + return null; + } + + + @Override + public LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { + return null; + } + + + @Override + public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { + return 0; + } + + + @Override + public void renameColumn( long columnId, String name ) { + + } + + + @Override + public void setColumnPosition( long columnId, int position ) { + + } + + + @Override + public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ) throws GenericCatalogException { + + } + + + @Override + public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { + + } + + + @Override + public void setCollation( long columnId, Collation collation ) { + + } + + + @Override + public boolean checkIfExistsColumn( long tableId, String columnName ) { + return false; + } + + + @Override + public void deleteColumn( long columnId ) { + + } + + + @Override + public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { + + } + + + @Override + public void deleteDefaultValue( long columnId ) { + + } + + + @Override + public CatalogPrimaryKey getPrimaryKey( long key ) { + return null; + } + + + @Override + public boolean isPrimaryKey( long keyId ) { + return false; + } + + + @Override + public boolean isForeignKey( long keyId ) { + return false; + } + + + @Override + public boolean isIndex( long keyId ) { + return false; + } + + + @Override + public boolean isConstraint( long keyId ) { + return false; + } + + + @Override + public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { + + } + + + @Override + public List getForeignKeys( long tableId ) { + return null; + } + + + @Override + public List getExportedKeys( long tableId ) { + return null; + } + + + @Override + public List getConstraints( long tableId ) { + return null; + } + + + @Override + public List getConstraints( CatalogKey key ) { + return null; + } + + + @Override + public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { + return null; + } + + + @Override + public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { + return null; + } + + + @Override + public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { + + } + + + @Override + public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { + + } + + + @Override + public void deletePrimaryKey( long tableId ) throws GenericCatalogException { + + } + + + @Override + public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { + + } + + + @Override + public void deleteConstraint( long constraintId ) throws GenericCatalogException { + + } + + + @Override + public void deleteViewDependencies( CatalogView catalogView ) { + + } + + + @Override + public void updateMaterializedViewRefreshTime( long materializedViewId ) { + + } + + + @Override + public List getTablesForPeriodicProcessing() { + return null; + } + + + @Override + public void flagTableForDeletion( long tableId, boolean flag ) { + + } + + + @Override + public boolean isTableFlaggedForDeletion( long tableId ) { + return false; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java index f3c01dbd19..1a61afa23b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java @@ -19,8 +19,7 @@ import java.util.List; import java.util.Map; import lombok.Getter; -import org.polypheny.db.catalog.NCatalog; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; @@ -40,7 +39,7 @@ public class FullSnapshot implements Snapshot { private final long id; - public FullSnapshot( long id, Map catalogs ) { + public FullSnapshot( long id, Map catalogs ) { this.id = id; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java index 9c1c1bf658..0d7fc415e4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java @@ -35,8 +35,8 @@ import org.polypheny.db.algebra.operators.ChainedOperatorTable; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.operators.OperatorTable; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.PolyphenyDbConnectionProperty; import org.polypheny.db.languages.LanguageManager; import org.polypheny.db.languages.OperatorRegistry; @@ -44,7 +44,6 @@ import org.polypheny.db.nodes.LangFunctionOperator; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plugins.PolyPluginManager; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; import org.polypheny.db.sql.language.SqlAggFunction; import org.polypheny.db.sql.language.SqlAsOperator; import org.polypheny.db.sql.language.SqlBinaryOperator; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index 7c7e642cab..7ac29a4cf1 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -37,13 +37,13 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.languages.NodeToAlgConverter; @@ -251,7 +251,7 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { if ( oldColumnList != null ) { LogicalTable catalogTable = getCatalogTable( transaction, (SqlIdentifier) insert.getTargetTable() ); - NamespaceType namespaceType = Catalog.getInstance().getSchema( catalogTable.namespaceId ).namespaceType; + NamespaceType namespaceType = Catalog.getInstance().getNamespace( catalogTable.namespaceId ).namespaceType; catalogTable = getCatalogTable( transaction, (SqlIdentifier) insert.getTargetTable() ); @@ -366,7 +366,7 @@ private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tab long schemaId; String tableOldName; if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = Catalog.getInstance().getSchema( tableName.names.get( 1 ) ).id; + schemaId = Catalog.getInstance().getNamespace( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName schemaId = Catalog.getInstance().getSchema( transaction.getDefaultSchema().databaseId, tableName.names.get( 0 ) ).id; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 1fd64cfdba..854e6de21f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -72,7 +72,7 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName String tableOldName; Catalog catalog = Catalog.getInstance(); if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( tableName.names.get( 1 ) ).id; + schemaId = catalog.getNamespace( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName schemaId = catalog.getSchema( context.getDatabaseId(), tableName.names.get( 0 ) ).id; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java index 83d29c8770..bcb5ecae9b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java @@ -41,9 +41,9 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypePrecedenceList; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.BasicNodeVisitor; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/advise/SqlAdvisorValidator.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/advise/SqlAdvisorValidator.java index c78d5c42c4..32280f697f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/advise/SqlAdvisorValidator.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/advise/SqlAdvisorValidator.java @@ -23,10 +23,9 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; -import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorScope; import org.polypheny.db.runtime.PolyphenyDbException; import org.polypheny.db.sql.language.SqlCall; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java index 30f759eb45..8e3961b27a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java @@ -122,7 +122,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( name.names.get( 1 ) ).id; + schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; viewName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index b72ba0fc64..a999b0727f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -211,7 +211,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { // Cannot use getLogicalTable() here since table does not yet exist if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( name.names.get( 1 ) ).id; + schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; tableName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java index b666b2ed78..9116684ef4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java @@ -105,7 +105,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( name.names.get( 1 ) ).id; + schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; viewName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/SqlTypeUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/SqlTypeUtil.java index b6eca3e722..f7335d02ff 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/SqlTypeUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/util/SqlTypeUtil.java @@ -31,7 +31,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeFactoryImpl; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.NodeToAlgConverter.Config; import org.polypheny.db.languages.Parser; @@ -48,8 +48,6 @@ import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.prepare.JavaTypeFactoryImpl; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare.CatalogReader; import org.polypheny.db.schema.AggregateFunction; import org.polypheny.db.schema.Function; import org.polypheny.db.schema.FunctionParameter; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/PolyphenyDbSqlValidator.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/PolyphenyDbSqlValidator.java index 895e7deb1a..416e8297f6 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/PolyphenyDbSqlValidator.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/PolyphenyDbSqlValidator.java @@ -20,8 +20,7 @@ import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.sql.language.SqlInsert; import org.polypheny.db.util.Conformance; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidator.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidator.java index 5f48b7f87e..a2d9df6f98 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidator.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidator.java @@ -24,10 +24,9 @@ import org.polypheny.db.algebra.constant.NullCollation; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.nodes.Node; import org.polypheny.db.nodes.validate.Validator; -import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.runtime.PolyphenyDbException; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlDataTypeSpec; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index 1529c8835a..f21f4e8985 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -69,10 +69,9 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordType; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; @@ -91,10 +90,7 @@ import org.polypheny.db.nodes.validate.ValidatorException; import org.polypheny.db.nodes.validate.ValidatorNamespace; import org.polypheny.db.nodes.validate.ValidatorScope; -import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.prepare.AlgOptEntityImpl; -import org.polypheny.db.prepare.Prepare; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexPatternFieldRef; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 1dd550a143..3089b135bd 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -38,15 +38,14 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Node; -import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.schema.CustomColumnResolvingEntity; import org.polypheny.db.schema.Entity; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 5dfb6e5ae1..575f0ea611 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -121,13 +121,13 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.catalog.refactor.TranslatableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.NodeToAlgConverter; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java index ddf30628e3..5d07458561 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/FrameworksTest.java @@ -51,9 +51,9 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgDataTypeSystemImpl; -import org.polypheny.db.catalog.Snapshot; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.languages.OperatorRegistry; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java index 7fa389a341..5f76828791 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/InterpreterTest.java @@ -35,8 +35,8 @@ import org.polypheny.db.adapter.java.ReflectiveSchema; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Snapshot; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.interpreter.Interpreter; import org.polypheny.db.languages.Parser.ParserConfig; import org.polypheny.db.nodes.Node; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java index 061d3d7ce2..5e333581fc 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/RexExecutorTest.java @@ -42,7 +42,7 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.Snapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptSchema; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java index 1179e22426..ded8df2bb7 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java @@ -29,8 +29,8 @@ import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; @@ -72,7 +72,7 @@ public void exportTest() { ); - new CatalogSchema( 1, "public", 1, 1, "hans", NamespaceType.RELATIONAL, false ); + new LogicalNamespace( 1, "public", 1, 1, "hans", NamespaceType.RELATIONAL, false ); new CatalogDatabase( 1, "APP", 1, "hans", 1L, "public" ); new CatalogUser( 1, "hans", "secrete" ); new HashMap<>(); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index fe7dc48e49..7daff61a3f 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -107,8 +107,8 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -376,10 +376,10 @@ void getSchemaTree( final Context ctx ) { ctx.json( new ArrayList<>() ); } - List schemas = catalog.getSchemas( databaseId, null ); + List schemas = catalog.getNamespaces( databaseId, null ); // remove unwanted namespaces schemas = schemas.stream().filter( s -> request.dataModels.contains( s.namespaceType ) ).collect( Collectors.toList() ); - for ( CatalogSchema schema : schemas ) { + for ( LogicalNamespace schema : schemas ) { SidebarElement schemaTree = new SidebarElement( schema.name, schema.name, schema.namespaceType, "", getIconName( schema.namespaceType ) ); if ( request.depth > 1 && schema.namespaceType != NamespaceType.GRAPH ) { @@ -460,7 +460,7 @@ void getTables( final Context ctx ) { if ( request.schema != null ) { requestedSchema = request.schema; } else { - requestedSchema = catalog.getSchema( schemaId ).name; + requestedSchema = catalog.getNamespace( schemaId ).name; } try { @@ -2896,7 +2896,7 @@ void schemaRequest( final Context ctx ) { // drop schema else if ( !schema.isCreate() && schema.isDrop() ) { if ( type == null ) { - List namespaces = catalog.getSchemas( Catalog.defaultDatabaseId, new org.polypheny.db.catalog.logistic.Pattern( schema.getName() ) ); + List namespaces = catalog.getNamespaces( Catalog.defaultDatabaseId, new org.polypheny.db.catalog.logistic.Pattern( schema.getName() ) ); assert namespaces.size() == 1; type = namespaces.get( 0 ).namespaceType; @@ -3619,9 +3619,9 @@ private Map getCatalogColumns( String schemaName, String void getTypeSchemas( final Context ctx ) { ctx.json( catalog - .getSchemas( 1, null ) + .getNamespaces( 1, null ) .stream() - .collect( Collectors.toMap( CatalogSchema::getName, CatalogSchema::getNamespaceType ) ) ); + .collect( Collectors.toMap( LogicalNamespace::getName, LogicalNamespace::getNamespaceType ) ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index 3b9e71b8af..607a903802 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -34,7 +34,7 @@ import org.eclipse.jetty.websocket.api.Session; import org.eclipse.jetty.websocket.api.annotations.OnWebSocketMessage; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.schema.graph.PolyGraph; import org.polypheny.db.webui.crud.LanguageCrud; @@ -146,7 +146,7 @@ public void onMessage( final WsMessageContext ctx ) { } else {//TableRequest, is equal to UIRequest UIRequest uiRequest = ctx.messageAsClass( UIRequest.class ); try { - CatalogSchema namespace = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, uiRequest.getSchemaName() ); + LogicalNamespace namespace = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, uiRequest.getSchemaName() ); switch ( namespace.namespaceType ) { case RELATIONAL: result = crud.getTable( uiRequest ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index b7b7287cf1..3c10285a7d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -36,7 +36,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogSchema; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -306,9 +306,9 @@ public void createCollection( final Context ctx ) { */ public void getDocumentDatabases( final Context ctx ) { Map names = Catalog.getInstance() - .getSchemas( Catalog.defaultDatabaseId, null ) + .getNamespaces( Catalog.defaultDatabaseId, null ) .stream() - .collect( Collectors.toMap( CatalogSchema::getName, s -> s.namespaceType.name() ) ); + .collect( Collectors.toMap( LogicalNamespace::getName, s -> s.namespaceType.name() ) ); String[][] data = names.entrySet().stream().map( n -> new String[]{ n.getKey(), n.getValue() } ).toArray( String[][]::new ); ctx.json( new Result( new DbColumn[]{ new DbColumn( "Database/Schema" ), new DbColumn( "Type" ) }, data ) ); From 8944d17fecca872a5c829a9fc20b860895671b64 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 1 Mar 2023 17:57:20 +0100 Subject: [PATCH 031/436] started transforming DdlManagerImpl to new catalog structure --- .../org/polypheny/db/adapter/Adapter.java | 29 +- .../polypheny/db/adapter/AdapterManager.java | 12 +- .../db/adapter/index/IndexManager.java | 13 +- .../org/polypheny/db/catalog/Catalog.java | 32 + .../catalog/catalogs/AllocationCatalog.java | 5 + .../catalogs/AllocationDocumentCatalog.java | 3 + .../catalogs/AllocationRelationalCatalog.java | 68 +- .../db/catalog/catalogs/LogicalCatalog.java | 3 +- .../catalogs/LogicalRelationalCatalog.java | 7 +- .../db/catalog/catalogs/PhysicalCatalog.java | 5 + .../db/catalog/entity/CatalogIndex.java | 19 +- .../catalog/entity/allocation/Allocation.java | 27 - .../allocation/AllocationCollection.java | 7 +- .../entity/allocation/AllocationEntity.java | 47 + .../entity/allocation/AllocationGraph.java | 7 +- .../entity/allocation/AllocationTable.java | 7 +- .../entity/logical/LogicalCollection.java | 2 +- .../{Logical.java => LogicalEntity.java} | 23 +- .../catalog/entity/logical/LogicalGraph.java | 2 +- .../catalog/entity/logical/LogicalTable.java | 69 +- .../entity/physical/PhysicalCollection.java | 8 +- .../{Physical.java => PhysicalEntity.java} | 16 +- .../entity/physical/PhysicalGraph.java | 8 +- .../entity/physical/PhysicalTable.java | 6 +- .../java/org/polypheny/db/ddl/DdlManager.java | 33 +- .../polypheny/db/docker/DockerInstance.java | 2 +- .../polypheny/db/docker/DockerManager.java | 2 +- .../db/docker/DockerManagerImpl.java | 2 +- .../org/polypheny/db/catalog/MockCatalog.java | 1100 +---------------- .../org/polypheny/db/ddl/DdlManagerImpl.java | 286 ++--- .../db/misc/VerticalPartitioningTest.java | 4 +- .../polypheny/db/adapter/csv/CsvSource.java | 10 +- .../org/polypheny/db/catalog/CatalogImpl.java | 3 +- .../db/catalog/CatalogImplBackup.java | 3 +- .../org/polypheny/db/catalog/PolyCatalog.java | 120 +- .../logical/document/DocumentCatalog.java | 2 +- .../catalog/logical/graph/GraphCatalog.java | 2 +- .../logical/relational/RelationalCatalog.java | 8 +- .../ddl/alterschema/SqlAlterSchemaOwner.java | 15 +- .../ddl/alterschema/SqlAlterSchemaRename.java | 2 +- 40 files changed, 544 insertions(+), 1475 deletions(-) delete mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/allocation/Allocation.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java rename core/src/main/java/org/polypheny/db/catalog/entity/logical/{Logical.java => LogicalEntity.java} (51%) rename core/src/main/java/org/polypheny/db/catalog/entity/physical/{Physical.java => PhysicalEntity.java} (52%) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index cd6170d3d5..83d3e55ffa 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -48,10 +48,10 @@ import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -279,7 +279,7 @@ public abstract class Adapter { @Getter - private final int adapterId; + private final long adapterId; @Getter private final String uniqueName; @@ -486,15 +486,22 @@ public void addInformationPhysicalNames() { Catalog catalog = Catalog.getInstance(); group.setRefreshFunction( () -> { physicalColumnNames.reset(); - List cpps = catalog.getPartitionPlacementsByAdapter( adapterId ); - cpps.forEach( cpp -> - catalog.getColumnPlacementsOnAdapterPerTable( adapterId, cpp.tableId ).forEach( placement -> { - physicalColumnNames.addRow( - placement.columnId, - catalog.getColumn( placement.columnId ).name, - cpp.physicalSchemaName + "." + cpp.physicalTableName + "." + placement.physicalColumnName ); - } ) - ); + List> physicalsOnAdapter = catalog.getPhysicalsOnAdapter( adapterId ); + + for ( PhysicalEntity entity : physicalsOnAdapter ) { + if ( entity.namespaceType != NamespaceType.RELATIONAL ) { + continue; + } + PhysicalTable physicalTable = (PhysicalTable) entity; + int i = 0; + for ( long columnId : physicalTable.columnIds ) { + physicalColumnNames.addRow( + columnId, + physicalTable.logical.getColumnNames().get( i ), + physicalTable.namespaceName + "." + physicalTable.name + "." + physicalTable.getColumnNames().get( i ) ); + i++; + } + } } ); informationGroups.add( group ); diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index 36ff639e56..ef1181780c 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -40,13 +40,13 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.config.ConfigDocker; import org.polypheny.db.config.RuntimeConfig; public class AdapterManager { - private final Map adapterById = new HashMap<>(); + private final Map adapterById = new HashMap<>(); private final Map adapterByName = new HashMap<>(); @@ -69,7 +69,7 @@ public Adapter getAdapter( String uniqueName ) { } - public Adapter getAdapter( int id ) { + public Adapter getAdapter( long id ) { return adapterById.get( id ); } @@ -88,7 +88,7 @@ public DataStore getStore( String uniqueName ) { } - public DataStore getStore( int id ) { + public DataStore getStore( long id ) { Adapter adapter = getAdapter( id ); if ( adapter instanceof DataStore ) { return (DataStore) adapter; @@ -236,7 +236,7 @@ public Adapter addAdapter( String adapterName, String uniqueName, AdapterType ad } - public void removeAdapter( int adapterId ) { + public void removeAdapter( long adapterId ) { Adapter adapterInstance = getAdapter( adapterId ); if ( adapterInstance == null ) { throw new RuntimeException( "Unknown adapter instance with id: " + adapterId ); @@ -244,7 +244,7 @@ public void removeAdapter( int adapterId ) { CatalogAdapter catalogAdapter = Catalog.getInstance().getAdapter( adapterId ); // Check if the store has any placements - List placements = Catalog.getInstance().getColumnPlacementsOnAdapter( catalogAdapter.id ); + List> placements = Catalog.getInstance().getAllocationsOnAdapter( catalogAdapter.id ); if ( placements.size() != 0 ) { throw new RuntimeException( "There is still data placed on this data store" ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index 5a2342c37c..4835821fde 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -36,7 +36,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -149,7 +148,7 @@ public void initialize( final TransactionManager transactionManager ) { } - public void restoreIndexes() throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownDatabaseException, UnknownUserException, TransactionException { + public void restoreIndexes() throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, TransactionException { for ( final CatalogIndex index : Catalog.getInstance().getIndexes() ) { if ( index.location == 0 ) { addIndex( index ); @@ -158,24 +157,24 @@ public void restoreIndexes() throws UnknownSchemaException, GenericCatalogExcept } - public void addIndex( final CatalogIndex index ) throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, UnknownDatabaseException, TransactionException { + public void addIndex( final CatalogIndex index ) throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, TransactionException { addIndex( index, null ); } - public void addIndex( final CatalogIndex index, final Statement statement ) throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, UnknownDatabaseException, TransactionException { + public void addIndex( final CatalogIndex index, final Statement statement ) throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, TransactionException { // TODO(s3lph): persistent addIndex( index.id, index.name, index.key, index.method, index.unique, null, statement ); } - protected void addIndex( final long id, final String name, final CatalogKey key, final String method, final Boolean unique, final Boolean persistent, final Statement statement ) throws UnknownSchemaException, GenericCatalogException, UnknownDatabaseException, UnknownUserException, TransactionException { + protected void addIndex( final long id, final String name, final CatalogKey key, final String method, final Boolean unique, final Boolean persistent, final Statement statement ) throws UnknownSchemaException, GenericCatalogException, UnknownUserException, TransactionException { final IndexFactory factory = INDEX_FACTORIES.stream() .filter( it -> it.canProvide( method, unique, persistent ) ) .findFirst() .orElseThrow( IllegalArgumentException::new ); - final LogicalTable table = Catalog.getInstance().getTable( key.tableId ); - final CatalogPrimaryKey pk = Catalog.getInstance().getPrimaryKey( table.primaryKey ); + final LogicalTable table = Catalog.getInstance().getLogicalRel( key.schemaId ).getTable( key.tableId ); + final CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( key.schemaId ).getPrimaryKey( table.primaryKey ); final Index index = factory.create( id, name, diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 3d46af20e0..020877d289 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -26,11 +26,20 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.pf4j.ExtensionPoint; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; +import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; +import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; +import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; +import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; +import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; @@ -79,6 +88,18 @@ public static Catalog getInstance() { public abstract void rollback(); + public abstract LogicalRelationalCatalog getLogicalRel( long id ); + + public abstract LogicalDocumentCatalog getLogicalDoc( long id ); + + public abstract LogicalGraphCatalog getLogicalGraph( long id ); + + + public abstract AllocationRelationalCatalog getAllocRel( long id ); + + public abstract AllocationDocumentCatalog getAllocDoc( long id ); + + public abstract AllocationGraphCatalog getAllocGraph( long id ); public abstract Map getNodeInfo(); @@ -323,4 +344,15 @@ protected final boolean isValidIdentifier( final String str ) { public abstract Snapshot getSnapshot( long id ); + //// todo move into snapshot + + + public abstract List> getAllocationsOnAdapter( long id ); + + + public abstract List> getPhysicalsOnAdapter( long adapterId ); + + + public abstract List getIndexes(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java index b6f8274e17..3163b8731a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java @@ -16,6 +16,11 @@ package org.polypheny.db.catalog.catalogs; +import java.util.List; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; + public interface AllocationCatalog { + List> getAllocationsOnAdapter( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java index 7874198a8a..1f6005d28b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java @@ -18,6 +18,7 @@ import java.util.List; import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.exceptions.GenericCatalogException; public interface AllocationDocumentCatalog extends AllocationCatalog { @@ -33,4 +34,6 @@ public interface AllocationDocumentCatalog extends AllocationCatalog { */ public abstract long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException; + List getCollectionPlacementsByAdapter( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 8d6141a0dd..fe4dd42af3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -45,7 +45,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param physicalTableName The table name on the adapter * @param physicalColumnName The column name on the adapter */ - public abstract void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ); + public abstract void addColumnPlacement( long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ); /** * Deletes all dependent column placements @@ -54,7 +54,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @param columnOnly columnOnly If delete originates from a dropColumn */ - public abstract void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ); + public abstract void deleteColumnPlacement( long adapterId, long columnId, boolean columnOnly ); /** * Gets a collective list of column placements per column on an adapter. @@ -64,7 +64,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @return The specific column placement */ - public abstract CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ); + public abstract CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ); /** * Checks if there is a column with the specified name in the specified table. @@ -73,7 +73,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @return true if there is a column placement, false if not. */ - public abstract boolean checkIfExistsColumnPlacement( int adapterId, long columnId ); + public abstract boolean checkIfExistsColumnPlacement( long adapterId, long columnId ); /** * Get all column placements of a column @@ -99,7 +99,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The id of the adapter * @return List of column placements on the specified adapter */ - public abstract List getColumnPlacementsOnAdapter( int adapterId ); + public abstract List getColumnPlacementsOnAdapter( long adapterId ); /** * Gets a collection of column placements for a given column. @@ -117,14 +117,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ public abstract ImmutableMap> getColumnPlacementsByAdapter( long tableId ); - /** - * Gets a map partition placements sorted by adapter. - * - * @param tableId The id of the table for which the partitions are returned - * @return The sorted partitions placements - */ - public abstract ImmutableMap> getPartitionPlacementsByAdapter( long tableId ); - /** * Gets the partition group sorted by partition. * @@ -140,7 +132,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param schemaId The id of the schema * @return List of column placements on this adapter and schema */ - public abstract List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ); + public abstract List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ); /** * Update the type of a placement. @@ -149,7 +141,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @param placementType The new type of placement */ - public abstract void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ); + public abstract void updateColumnPlacementType( long adapterId, long columnId, PlacementType placementType ); /** * Update physical position of a column placement on a specified adapter. @@ -158,7 +150,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @param position The physical position to set */ - public abstract void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ); + public abstract void updateColumnPlacementPhysicalPosition( long adapterId, long columnId, long position ); /** * Update physical position of a column placement on a specified adapter. Uses auto-increment to get the globally increasing number. @@ -166,7 +158,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The id of the adapter * @param columnId The id of the column */ - public abstract void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ); + public abstract void updateColumnPlacementPhysicalPosition( long adapterId, long columnId ); /** * Change physical names of all column placements. @@ -177,7 +169,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param physicalColumnName The physical column name * @param updatePhysicalColumnPosition Whether to reset the column position (the highest number in the table; represents that the column is now at the last position) */ - public abstract void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ); + public abstract void updateColumnPlacementPhysicalNames( long adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ); /** @@ -373,7 +365,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The unique id of the table * @return List of partitionIds */ - public abstract List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ); + public abstract List getPartitionGroupsOnDataPlacement( long adapterId, long tableId ); /** * Get all partitions of a DataPlacement (identified by adapterId and tableId) @@ -382,7 +374,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The unique id of the table * @return List of partitionIds */ - public abstract List getPartitionsOnDataPlacement( int adapterId, long tableId ); + public abstract List getPartitionsOnDataPlacement( long adapterId, long tableId ); /** * Returns list with the index of the partitions on this store from 0..numPartitions @@ -400,7 +392,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placement from * @return DataPlacement of a table placed on a specific store */ - public abstract CatalogDataPlacement getDataPlacement( int adapterId, long tableId ); + public abstract CatalogDataPlacement getDataPlacement( long adapterId, long tableId ); /** * Returns all DataPlacements of a given table. @@ -486,7 +478,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param physicalSchemaName The schema name on the adapter * @param physicalTableName The table name on the adapter */ - public abstract void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); + public abstract void addPartitionPlacement( long namespaceId, long adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); /** * Adds a new DataPlacement for a given table on a specific store @@ -494,7 +486,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter where placement should be located * @param tableId table to retrieve the placement from */ - public abstract void addDataPlacement( int adapterId, long tableId ); + public abstract void addDataPlacement( long adapterId, long tableId ); /** * Adds a new DataPlacement for a given table on a specific store. @@ -504,7 +496,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placement from * @return DataPlacement of a table placed on a specific store */ - public abstract CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ); + public abstract CatalogDataPlacement addDataPlacementIfNotExists( long adapterId, long tableId ); /** * Modifies a specific DataPlacement of a given table. @@ -513,7 +505,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placement from * @param catalogDataPlacement new dataPlacement to be written */ - abstract void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ); + abstract void modifyDataPlacement( long adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ); /** @@ -522,7 +514,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter where placement should be removed from * @param tableId table to retrieve the placement from */ - public abstract void removeDataPlacement( int adapterId, long tableId ); + public abstract void removeDataPlacement( long adapterId, long tableId ); /** * Adds a single dataPlacement on a store for a specific table @@ -530,7 +522,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter id corresponding to a new DataPlacements * @param tableId table to be updated */ - abstract void addSingleDataPlacementToTable( Integer adapterId, long tableId ); + abstract void addSingleDataPlacementToTable( long adapterId, long tableId ); /** * Removes a single dataPlacement from a store for a specific table @@ -538,7 +530,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter id corresponding to a new DataPlacements * @param tableId table to be updated */ - abstract void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ); + abstract void removeSingleDataPlacementFromTable( long adapterId, long tableId ); /** * Updates the list of data placements on a table @@ -555,7 +547,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param columnIds List of columnIds to add to a specific store for the table */ - abstract void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ); + abstract void addColumnsToDataPlacement( long adapterId, long tableId, List columnIds ); /** * Remove columns to dataPlacement on a store for a specific table @@ -564,7 +556,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param columnIds List of columnIds to remove from a specific store for the table */ - abstract void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ); + abstract void removeColumnsFromDataPlacement( long adapterId, long tableId, List columnIds ); /** * Adds partitions to dataPlacement on a store for a specific table @@ -573,7 +565,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param partitionIds List of partitionIds to add to a specific store for the table */ - abstract void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ); + abstract void addPartitionsToDataPlacement( long adapterId, long tableId, List partitionIds ); /** * Remove partitions to dataPlacement on a store for a specific table @@ -582,7 +574,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param partitionIds List of partitionIds to remove from a specific store for the table */ - abstract void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ); + abstract void removePartitionsFromDataPlacement( long adapterId, long tableId, List partitionIds ); /** * Updates and overrides list of associated columnPlacements {@code &} partitionPlacements for a given data placement @@ -592,7 +584,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnIds List of columnIds to be located on a specific store for the table * @param partitionIds List of partitionIds to be located on a specific store for the table */ - public abstract void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ); + public abstract void updateDataPlacement( long adapterId, long tableId, List columnIds, List partitionIds ); /** @@ -601,7 +593,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The adapter on which the table should be placed on * @param partitionId The id of a partition which shall be removed from that store. */ - public abstract void deletePartitionPlacement( int adapterId, long partitionId ); + public abstract void deletePartitionPlacement( long adapterId, long partitionId ); /** * Returns a specific partition entity which is placed on a store. @@ -610,7 +602,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionId The id of the requested partition * @return The requested PartitionPlacement on that store for a given is */ - public abstract CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ); + public abstract CatalogPartitionPlacement getPartitionPlacement( long adapterId, long partitionId ); /** * Returns a list of all Partition Placements which currently reside on an adapter, disregarded of the table. @@ -618,7 +610,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The adapter on which the requested partition placements reside * @return A list of all Partition Placements, that are currently located on that specific store */ - public abstract List getPartitionPlacementsByAdapter( int adapterId ); + public abstract List getPartitionPlacementsByAdapter( long adapterId ); /** * Returns a list of all Partition Placements which currently reside on an adapter, for a specific table. @@ -627,7 +619,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The table for which all partition placements on an adapter should be considered * @return A list of all Partition Placements, that are currently located on that specific store for an individual table */ - public abstract List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ); + public abstract List getPartitionPlacementsByTableOnAdapter( long adapterId, long tableId ); /** * Returns a list of all Partition Placements which are currently associated with a table. @@ -667,7 +659,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionId Partition which to check * @return teh response of the probe */ - public abstract boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ); + public abstract boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java index 3754eca65a..e160ecde2e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java @@ -23,11 +23,10 @@ public interface LogicalCatalog { /** * Checks if there is a table with the specified name in the specified schema. * - * @param namespaceId The id of the schema * @param entityName The name to check for * @return true if there is a table with this name, false if not. */ - public abstract boolean checkIfExistsEntity( long namespaceId, String entityName ); + public abstract boolean checkIfExistsEntity( String entityName ); /** * Checks if there is a table with the specified id. diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index e4a35aa550..c171db0581 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -108,13 +108,12 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * Adds a table to a specified schema. * * @param name The name of the table to add - * @param namespaceId The id of the schema * @param ownerId The if of the owner * @param entityType The table type * @param modifiable Whether the content of the table can be modified * @return The id of the inserted table */ - public abstract long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ); + public abstract long addTable( String name, int ownerId, EntityType entityType, boolean modifiable ); /** @@ -588,12 +587,12 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param unique Weather the index is unique * @param method Name of the index method (e.g. btree_unique) * @param methodDisplayName Display name of the index method (e.g. BTREE) - * @param location ID of the data store where the index is located (0 for Polypheny-DB itself) + * @param adapterId ID of the data store where the index is located (0 for Polypheny-DB itself) * @param type The type of index (manual, automatic) * @param indexName The name of the index * @return The id of the created index */ - public abstract long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException; + public abstract long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ) throws GenericCatalogException; /** * Set physical index name. diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index bc5323a194..a155335c8e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -16,6 +16,11 @@ package org.polypheny.db.catalog.catalogs; +import java.util.List; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; + public interface PhysicalCatalog { + List> getPhysicalsOnAdapter( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java index 081d51c381..22af8c0e7b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java @@ -21,10 +21,9 @@ import java.util.LinkedList; import java.util.List; import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import org.polypheny.db.catalog.Catalog; +import lombok.Value; import org.polypheny.db.catalog.logistic.IndexType; @@ -75,7 +74,7 @@ public List getCatalogIndexColumns() { int i = 1; List list = new LinkedList<>(); for ( String columnName : key.getColumnNames() ) { - list.add( new CatalogIndexColumn( id, i++, columnName ) ); + list.add( new CatalogIndexColumn( id, i++, columnName, this ) ); } return list; } @@ -83,7 +82,6 @@ public List getCatalogIndexColumns() { public Serializable[] getParameterArray( int ordinalPosition, String columnName ) { return new Serializable[]{ - key.getDatabaseName(), key.getSchemaName(), key.getTableName(), !unique, @@ -103,19 +101,22 @@ public Serializable[] getParameterArray( int ordinalPosition, String columnName // Used for creating ResultSets @RequiredArgsConstructor + @Value public static class CatalogIndexColumn implements CatalogObject { private static final long serialVersionUID = -5596459769680478780L; - private final long indexId; - private final int ordinalPosition; - @Getter - private final String columnName; + public long indexId; + public int ordinalPosition; + + public String columnName; + + public CatalogIndex index; @Override public Serializable[] getParameterArray() { - return Catalog.getInstance().getIndex( indexId ).getParameterArray( ordinalPosition, columnName ); + return index.getParameterArray( ordinalPosition, columnName ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/Allocation.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/Allocation.java deleted file mode 100644 index 77b62b8ebb..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/Allocation.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.entity.allocation; - -import org.polypheny.db.catalog.refactor.CatalogType; - -public interface Allocation extends CatalogType { - - default State getCatalogType() { - return State.ALLOCATION; - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java index ab4ae9184b..9951bdd5a3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java @@ -23,17 +23,16 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class AllocationCollection extends CatalogEntity implements Allocation { +public class AllocationCollection extends AllocationEntity { - public AllocationCollection( long id, String name, EntityType type, NamespaceType namespaceType ) { - super( id, name, type, namespaceType ); + public AllocationCollection( long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( id, name, type, namespaceType, adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java new file mode 100644 index 0000000000..f41ccf8e6f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.entity.allocation; + +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; + +@EqualsAndHashCode(callSuper = true) +@Value +@NonFinal +public abstract class AllocationEntity extends LogicalEntity { + + public long adapterId; + public L logical; + + + protected AllocationEntity( L logical, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( id, name, namespaceName, type, namespaceType ); + this.adapterId = adapterId; + this.logical = logical; + } + + + public State getCatalogType() { + return State.ALLOCATION; + } + + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java index 09ea32356f..24c7dbc480 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -23,21 +23,20 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class AllocationGraph extends CatalogEntity implements Allocation { +public class AllocationGraph extends AllocationEntity { public LogicalGraph logical; public long id; - public AllocationGraph( long id, LogicalGraph graph ) { - super( id, graph.name, graph.entityType, graph.namespaceType ); + public AllocationGraph( long id, LogicalGraph graph, long adapterId ) { + super( id, graph.name, graph.entityType, graph.namespaceType, adapterId ); this.id = id; this.logical = graph; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index e63c67efb9..59109053b0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -27,7 +27,6 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; @@ -35,7 +34,7 @@ @EqualsAndHashCode(callSuper = true) @Value -public class AllocationTable extends CatalogEntity implements Allocation { +public class AllocationTable extends AllocationEntity { public List placements; public long adapterId; @@ -43,8 +42,8 @@ public class AllocationTable extends CatalogEntity implements Allocation { public LogicalTable logicalTable; - public AllocationTable( LogicalTable logicalTable, long id, long logicalId, String name, long adapterId, List placements ) { - super( id, name, EntityType.ENTITY, NamespaceType.RELATIONAL ); + public AllocationTable( LogicalTable logicalTable, long id, long logicalId, String name, String namespaceName, long adapterId, List placements ) { + super( logicalTable, id, name, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.logicalTable = logicalTable; this.logicalId = logicalId; this.adapterId = adapterId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index e05501244a..d8e4ce0254 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -37,7 +37,7 @@ @EqualsAndHashCode(callSuper = true) @Value -public class LogicalCollection extends CatalogEntity implements CatalogObject, Logical { +public class LogicalCollection extends CatalogEntity implements CatalogObject, LogicalEntity { private static final long serialVersionUID = -6490762948368178584L; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/Logical.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java similarity index 51% rename from core/src/main/java/org/polypheny/db/catalog/entity/logical/Logical.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java index d74ad6ba4f..db727f095a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/Logical.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java @@ -16,11 +16,28 @@ package org.polypheny.db.catalog.entity.logical; -import org.polypheny.db.catalog.refactor.CatalogType; +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; -public interface Logical extends CatalogType { +@EqualsAndHashCode(callSuper = true) +@Value +@NonFinal +public abstract class LogicalEntity extends CatalogEntity { - default State getCatalogType() { + public String namespaceName; + + + protected LogicalEntity( long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType ) { + super( id, name, type, namespaceType ); + this.namespaceName = namespaceName; + } + + + public State getCatalogType() { return State.LOGICAL; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index 8acfb8e1fb..bf6e4b8158 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -37,7 +37,7 @@ @SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = true) @Value -public class LogicalGraph extends CatalogEntity implements Comparable, Logical { +public class LogicalGraph extends CatalogEntity implements Comparable, LogicalEntity { private static final long serialVersionUID = 7343856827901459672L; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 7b8a6d2971..6e7d5795cd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -21,12 +21,11 @@ import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.io.Serializable; -import java.util.LinkedList; import java.util.List; +import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import lombok.SneakyThrows; import lombok.Value; import lombok.With; import lombok.experimental.NonFinal; @@ -36,7 +35,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeImpl; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.partition.properties.PartitionProperty; @@ -46,7 +44,7 @@ @With @EqualsAndHashCode(callSuper = false) @NonFinal -public class LogicalTable extends CatalogEntity implements Comparable, Logical { +public class LogicalTable extends LogicalEntity implements Comparable { private static final long serialVersionUID = 4653390333258552102L; @@ -55,12 +53,10 @@ public class LogicalTable extends CatalogEntity implements Comparable fieldIds; + public ImmutableList columns; @Serialize public long namespaceId; @Serialize - public int ownerId; - @Serialize public EntityType entityType; @Serialize public Long primaryKey; @@ -69,7 +65,7 @@ public class LogicalTable extends CatalogEntity implements Comparable dataPlacements; + public ImmutableList dataPlacements; @Serialize public ImmutableList connectedViews; @@ -77,36 +73,35 @@ public class LogicalTable extends CatalogEntity implements Comparable fieldIds, + final ImmutableList fieldIds, final long namespaceId, - final int ownerId, + final String namespaceName, @NonNull final EntityType type, final Long primaryKey, - @NonNull final List dataPlacements, + @NonNull final List dataPlacements, boolean modifiable, PartitionProperty partitionProperty ) { - this( id, name, fieldIds, namespaceId, ownerId, type, primaryKey, dataPlacements, modifiable, partitionProperty, ImmutableList.of() ); + this( id, name, fieldIds, namespaceId, namespaceName, type, primaryKey, dataPlacements, modifiable, partitionProperty, ImmutableList.of() ); } public LogicalTable( @Deserialize("id") final long id, @Deserialize("name") @NonNull final String name, - @Deserialize("fieldIds") final List fieldIds, + @Deserialize("columns") final List columns, @Deserialize("namespaceId") final long namespaceId, - @Deserialize("ownerId") final int ownerId, + @Deserialize("namespaceName") final String namespaceName, @Deserialize("type") @NonNull final EntityType type, @Deserialize("primaryKey") final Long primaryKey, - @Deserialize("dataPlacements") @NonNull final List dataPlacements, + @Deserialize("dataPlacements") @NonNull final List dataPlacements, @Deserialize("modifiable") boolean modifiable, @Deserialize("partitionProperty") PartitionProperty partitionProperty, @Deserialize("connectedViews") List connectedViews ) { - super( id, name, type, NamespaceType.RELATIONAL ); + super( id, name, namespaceName, type, NamespaceType.RELATIONAL ); this.id = id; this.name = name; - this.fieldIds = ImmutableList.copyOf( fieldIds ); + this.columns = ImmutableList.copyOf( columns ); this.namespaceId = namespaceId; - this.ownerId = ownerId; this.entityType = type; this.primaryKey = primaryKey; this.modifiable = modifiable; @@ -123,33 +118,13 @@ public LogicalTable( } - - @SneakyThrows - public String getNamespaceName() { - return Catalog.getInstance().getNamespace( namespaceId ).name; - } - - - @SneakyThrows - public NamespaceType getNamespaceType() { - return Catalog.getInstance().getNamespace( namespaceId ).namespaceType; - } - - - @SneakyThrows - public String getOwnerName() { - return Catalog.getInstance().getUser( ownerId ).name; + public List getColumnNames() { + return columns.stream().map( c -> c.name ).collect( Collectors.toList() ); } - @SneakyThrows - public List getColumnNames() { - Catalog catalog = Catalog.getInstance(); - List fieldNames = new LinkedList<>(); - for ( long fieldId : fieldIds ) { - fieldNames.add( catalog.getColumn( fieldId ).name ); - } - return fieldNames; + public List getColumnIds() { + return columns.stream().map( c -> c.id ).collect( Collectors.toList() ); } @@ -165,8 +140,7 @@ public Serializable[] getParameterArray() { null, null, null, - null, - getOwnerName() + null }; } @@ -190,10 +164,9 @@ public int compareTo( LogicalTable o ) { public AlgDataType getRowType() { final AlgDataTypeFactory.Builder fieldInfo = AlgDataTypeFactory.DEFAULT.builder(); - for ( Long id : fieldIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( id ); - AlgDataType sqlType = logicalColumn.getAlgDataType( AlgDataTypeFactory.DEFAULT ); - fieldInfo.add( logicalColumn.name, null, sqlType ).nullable( logicalColumn.nullable ); + for ( LogicalColumn column : columns ) { + AlgDataType sqlType = column.getAlgDataType( AlgDataTypeFactory.DEFAULT ); + fieldInfo.add( column.name, null, sqlType ).nullable( column.nullable ); } return AlgDataTypeImpl.proto( fieldInfo.build() ).apply( AlgDataTypeFactory.DEFAULT ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java index 9d02f263f7..0f94b43465 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -23,17 +23,17 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class PhysicalCollection extends CatalogEntity implements Physical { +public class PhysicalCollection extends PhysicalEntity { - public PhysicalCollection( long id, String name, EntityType type, NamespaceType namespaceType ) { - super( id, name, type, namespaceType ); + public PhysicalCollection( LogicalCollection logical, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( logical, id, name, namespaceName, type, namespaceType, adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java similarity index 52% rename from core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java rename to core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java index 64b8348c1a..18257f0c7f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/Physical.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java @@ -16,10 +16,20 @@ package org.polypheny.db.catalog.entity.physical; -import org.polypheny.db.catalog.refactor.CatalogType; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; -public interface Physical extends CatalogType { - default State getCatalogType() { +public abstract class PhysicalEntity extends AllocationEntity { + + protected PhysicalEntity( L logical, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( logical, id, name, namespaceName, type, namespaceType, adapterId ); + } + + + @Override + public State getCatalogType() { return State.PHYSICAL; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index 8ebd3f7215..454e7aeddf 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -23,17 +23,17 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class PhysicalGraph extends CatalogEntity implements Physical { +public class PhysicalGraph extends PhysicalEntity { - public PhysicalGraph( long id, String name, EntityType type, NamespaceType namespaceType ) { - super( id, name, type, namespaceType ); + public PhysicalGraph( LogicalGraph logical, long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( logical, id, name, name, type, namespaceType, adapterId ); // for graph both name and namespaceName are the same } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index cc3dc1f0ed..8f15bf26b2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -32,9 +32,9 @@ import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.type.PolyTypeFactoryImpl; @@ -42,7 +42,7 @@ @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class PhysicalTable extends CatalogEntity implements Physical { +public class PhysicalTable extends PhysicalEntity { public ImmutableList placements; public ImmutableList columnIds; @@ -53,7 +53,7 @@ public class PhysicalTable extends CatalogEntity implements Physical { public PhysicalTable( AllocationTable allocation, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { - super( id, name, type, namespaceType ); + super( allocation.logical, id, name, namespaceName, type, namespaceType, allocation.adapterId ); this.allocation = allocation; this.namespaceName = namespaceName; this.placements = ImmutableList.copyOf( placements ); diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 80a2136a25..9048f8ff26 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -25,27 +25,26 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.ConstraintType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.NamespaceAlreadyExistsException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.ddl.exception.AlterSourceException; import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.ddl.exception.DdlOnSourceException; @@ -138,23 +137,13 @@ public static DdlManager getInstance() { */ public abstract void dropAdapter( String name, Statement statement ) throws UnknownAdapterException; - /** - * Change the owner of a schema - * - * @param schemaName the name of the schema for which to change the owner - * @param ownerName the name of the new owner - * @param databaseId the id of the database - */ - public abstract void alterSchemaOwner( String schemaName, String ownerName, long databaseId ) throws UnknownUserException, UnknownSchemaException; - /** * Change the name of a schema * * @param newName the new name for the schema * @param oldName the old name current name of the schema - * @param databaseId the id of the database the schema belongs to */ - public abstract void renameSchema( String newName, String oldName, long databaseId ) throws NamespaceAlreadyExistsException, UnknownSchemaException; + public abstract void renameSchema( String newName, String oldName ) throws NamespaceAlreadyExistsException, UnknownSchemaException; /** * Adds a column to an existing source table @@ -206,7 +195,7 @@ public static DdlManager getInstance() { * @param location instance of the data store on which to create the index; if null, default strategy is being used * @param statement the initial query statement */ - public abstract void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; + public abstract void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; /** * Adds an index located in Polypheny to a table @@ -218,7 +207,7 @@ public static DdlManager getInstance() { * @param isUnique whether the index is unique * @param statement the initial query statement */ - public abstract void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; + public abstract void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; /** * Adds new column placements to a table @@ -483,7 +472,7 @@ public static DdlManager getInstance() { * * @param partitionInfo the information concerning the partition */ - public abstract void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownDatabaseException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException; + public abstract void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException; /** * Removes partitioning from Table @@ -491,7 +480,7 @@ public static DdlManager getInstance() { * @param catalogTable teh table to be merged * @param statement the used Statement */ - public abstract void removePartitioning( LogicalTable catalogTable, Statement statement ) throws UnknownDatabaseException, GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException; + public abstract void removePartitioning( LogicalTable catalogTable, Statement statement ) throws GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException; /** * Adds a new constraint to a table diff --git a/core/src/main/java/org/polypheny/db/docker/DockerInstance.java b/core/src/main/java/org/polypheny/db/docker/DockerInstance.java index 46198af95a..66c2a00ab8 100644 --- a/core/src/main/java/org/polypheny/db/docker/DockerInstance.java +++ b/core/src/main/java/org/polypheny/db/docker/DockerInstance.java @@ -523,7 +523,7 @@ public void stopAll( int adapterId ) { @Override - public void destroyAll( int adapterId ) { + public void destroyAll( long adapterId ) { if ( containersOnAdapter.containsKey( adapterId ) ) { containersOnAdapter.get( adapterId ).forEach( containerName -> availableContainers.get( containerName ).destroy() ); } diff --git a/core/src/main/java/org/polypheny/db/docker/DockerManager.java b/core/src/main/java/org/polypheny/db/docker/DockerManager.java index 5adbe3cb57..56c24e3270 100644 --- a/core/src/main/java/org/polypheny/db/docker/DockerManager.java +++ b/core/src/main/java/org/polypheny/db/docker/DockerManager.java @@ -96,7 +96,7 @@ public static DockerManager getInstance() { * * @param adapterId the id of the adapter */ - public abstract void destroyAll( int adapterId ); + public abstract void destroyAll( long adapterId ); public abstract List getUsedNames(); diff --git a/core/src/main/java/org/polypheny/db/docker/DockerManagerImpl.java b/core/src/main/java/org/polypheny/db/docker/DockerManagerImpl.java index e53a79c107..f927871cf2 100644 --- a/core/src/main/java/org/polypheny/db/docker/DockerManagerImpl.java +++ b/core/src/main/java/org/polypheny/db/docker/DockerManagerImpl.java @@ -121,7 +121,7 @@ public void stopAll( int adapterId ) { @Override - public void destroyAll( int adapterId ) { + public void destroyAll( long adapterId ) { dockerInstances.values().forEach( dockerInstance -> dockerInstance.destroyAll( adapterId ) ); } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 1de0b08d96..847c6b9a8c 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -16,67 +16,26 @@ package org.polypheny.db.catalog; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; import lombok.NonNull; import org.apache.commons.lang3.NotImplementedException; -import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.MaterializedCriteria; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.DataPlacementRole; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; -import org.polypheny.db.catalog.logistic.IndexType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.logistic.PlacementType; -import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.transaction.Transaction; -import org.polypheny.db.type.PolyType; /** @@ -87,23 +46,6 @@ */ public abstract class MockCatalog extends Catalog { - @Override - public void dropCollectionPlacement( long id, int adapterId ) { - throw new NotImplementedException(); - } - - - @Override - public List getGraphPlacements( int adapterId ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteCollection( long id ) { - throw new NotImplementedException(); - } - @Override public void commit() throws NoTablePrimaryKeyException { @@ -141,12 +83,6 @@ public Map getNodeInfo() { } - @Override - public Map getAlgTypeInfo() { - throw new NotImplementedException(); - } - - @Override public void restoreViews( Transaction transaction ) { throw new NotImplementedException(); @@ -158,73 +94,7 @@ private List getDatabases( Pattern pattern ) { } - private CatalogDatabase getDatabase( String databaseName ) throws UnknownDatabaseException { - throw new NotImplementedException(); - } - - - @Override - public long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteGraph( long id ) { - throw new NotImplementedException(); - } - - - @Override - public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { - throw new NotImplementedException(); - } - - - @Override - public void removeGraphAlias( long graphId, String alias, boolean ifNotExists ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogGraphMapping getGraphMapping( long graphId ) { - throw new NotImplementedException(); - } - - - @Override - public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public long addGraphPlacement( int adapterId, long graphId ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteGraphPlacement( int adapterId, long graphId ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ) { - throw new NotImplementedException(); - } - - - @Override - public LogicalGraph getGraph( long id ) { - throw new NotImplementedException(); - } - - - @Override - public List getGraphs( Pattern graphName ) { + private CatalogDatabase getDatabase( String databaseName ) { throw new NotImplementedException(); } @@ -287,679 +157,91 @@ public void deleteNamespace( long id ) { @Override - public List getTables( long schemaId, Pattern tableNamePattern ) { - throw new NotImplementedException(); - } - - - @Override - public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { - throw new NotImplementedException(); - } - - - @Override - public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { - throw new NotImplementedException(); - } - - - @Override - public LogicalTable getTableFromPartition( long partitionId ) { - throw new NotImplementedException(); - } - - - @Override - public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { - throw new NotImplementedException(); - } - - - @Override - public LogicalTable getTable( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { - throw new NotImplementedException(); - } - - - @Override - public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException { - throw new NotImplementedException(); - } - - - @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { - throw new NotImplementedException(); - } - - - @Override - public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { - throw new NotImplementedException(); - } - - - @Override - public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteViewDependencies( CatalogView catalogView ) { - throw new NotImplementedException(); - } - - - @Override - public boolean checkIfExistsEntity( long namespaceId, String entityName ) { - throw new NotImplementedException(); - } - - - @Override - public boolean checkIfExistsEntity( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public void renameTable( long tableId, String name ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteTable( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public void setTableOwner( long tableId, int ownerId ) { - throw new NotImplementedException(); - } - - - @Override - public void setPrimaryKey( long tableId, Long keyId ) { - throw new NotImplementedException(); - } - - - @Override - public void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ) { - throw new NotImplementedException(); - } - - - @Override - public boolean checkIfExistsColumnPlacement( int adapterId, long columnId ) { - throw new NotImplementedException(); - } - - - @Override - public List getColumnPlacement( long columnId ) { - throw new NotImplementedException(); - } - - - @Override - public List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getColumnPlacementsOnAdapter( int adapterId ) { - throw new NotImplementedException(); - } - - - @Override - public List getColumnPlacementsByColumn( long columnId ) { - throw new NotImplementedException(); - } - - - @Override - public List getKeys() { - throw new NotImplementedException(); - } - - - @Override - public List getTableKeys( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ) { - throw new NotImplementedException(); - } - - - @Override - public void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ) { - throw new NotImplementedException(); - } - - - @Override - public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ) { - throw new NotImplementedException(); - } - - - @Override - public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ) { - throw new NotImplementedException(); - } - - - @Override - public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ) { - throw new NotImplementedException(); - } - - - @Override - public void updateMaterializedViewRefreshTime( long materializedId ) { + public CatalogUser getUser( String name ) throws UnknownUserException { throw new NotImplementedException(); } @Override - public LogicalCollection getCollection( long collectionId ) { + public CatalogUser getUser( long id ) { throw new NotImplementedException(); } @Override - public List getCollections( long namespaceId, Pattern namePattern ) { + public List getAdapters() { throw new NotImplementedException(); } @Override - public long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ) { + public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { throw new NotImplementedException(); } @Override - public long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ) { + public CatalogAdapter getAdapter( long id ) { throw new NotImplementedException(); } @Override - public CatalogCollectionMapping getCollectionMapping( long id ) { + public boolean checkIfExistsAdapter( long id ) { throw new NotImplementedException(); } @Override - public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { + public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { throw new NotImplementedException(); } @Override - public List getCollectionPlacementsByAdapter( int adapterId ) { + public void updateAdapterSettings( long adapterId, Map newSettings ) { throw new NotImplementedException(); } @Override - public CatalogCollectionPlacement getCollectionPlacement( long collectionId, int adapterId ) { + public void deleteAdapter( long id ) { throw new NotImplementedException(); } @Override - public List getColumns( long tableId ) { + public List getQueryInterfaces() { throw new NotImplementedException(); } @Override - public List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { + public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { throw new NotImplementedException(); } @Override - public LogicalColumn getColumn( long columnId ) { + public CatalogQueryInterface getQueryInterface( long id ) { throw new NotImplementedException(); } @Override - public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { + public long addQueryInterface( String uniqueName, String clazz, Map settings ) { throw new NotImplementedException(); } @Override - public LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { + public void deleteQueryInterface( long id ) { throw new NotImplementedException(); } @Override - public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { - throw new NotImplementedException(); - } - - - @Override - public void renameColumn( long columnId, String name ) { - throw new NotImplementedException(); - } - - - @Override - public void setColumnPosition( long columnId, int position ) { - throw new NotImplementedException(); - } - - - @Override - public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public void setCollation( long columnId, Collation collation ) { - throw new NotImplementedException(); - } - - - @Override - public boolean checkIfExistsColumn( long tableId, String columnName ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteColumn( long columnId ) { - throw new NotImplementedException(); - } - - - @Override - public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteDefaultValue( long columnId ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogPrimaryKey getPrimaryKey( long key ) { - throw new NotImplementedException(); - } - - - @Override - public boolean isPrimaryKey( long keyId ) { - throw new NotImplementedException(); - } - - - @Override - public boolean isForeignKey( long keyId ) { - throw new NotImplementedException(); - } - - - @Override - public boolean isIndex( long keyId ) { - throw new NotImplementedException(); - } - - - @Override - public boolean isConstraint( long keyId ) { - throw new NotImplementedException(); - } - - - @Override - public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public List getForeignKeys( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getExportedKeys( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getConstraints( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getIndexes( CatalogKey key ) { - throw new NotImplementedException(); - } - - - @Override - public List getForeignKeys( CatalogKey key ) { - throw new NotImplementedException(); - } - - - @Override - public List getConstraints( CatalogKey key ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { - throw new NotImplementedException(); - } - - - @Override - public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { - throw new NotImplementedException(); - } - - - @Override - public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public List getIndexes( long tableId, boolean onlyUnique ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { - throw new NotImplementedException(); - } - - - @Override - public boolean checkIfExistsIndex( long tableId, String indexName ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogIndex getIndex( long indexId ) { - throw new NotImplementedException(); - } - - - @Override - public List getIndexes() { - throw new NotImplementedException(); - } - - - @Override - public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public void setIndexPhysicalName( long indexId, String physicalName ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteIndex( long indexId ) { - throw new NotImplementedException(); - } - - - @Override - public void deletePrimaryKey( long tableId ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public void deleteConstraint( long constraintId ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public CatalogUser getUser( String name ) throws UnknownUserException { - throw new NotImplementedException(); - } - - - @Override - public CatalogUser getUser( long id ) { - throw new NotImplementedException(); - } - - - @Override - public List getAdapters() { - throw new NotImplementedException(); - } - - - @Override - public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { - throw new NotImplementedException(); - } - - - @Override - public CatalogAdapter getAdapter( long id ) { - throw new NotImplementedException(); - } - - - @Override - public boolean checkIfExistsAdapter( long id ) { - throw new NotImplementedException(); - } - - - @Override - public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { - throw new NotImplementedException(); - } - - - @Override - public void updateAdapterSettings( long adapterId, Map newSettings ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteAdapter( long id ) { - throw new NotImplementedException(); - } - - - @Override - public List getQueryInterfaces() { - throw new NotImplementedException(); - } - - - @Override - public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { - throw new NotImplementedException(); - } - - - @Override - public CatalogQueryInterface getQueryInterface( long id ) { - throw new NotImplementedException(); - } - - - @Override - public long addQueryInterface( String uniqueName, String clazz, Map settings ) { - throw new NotImplementedException(); - } - - - @Override - public void deleteQueryInterface( long id ) { - throw new NotImplementedException(); - } - - - @Override - public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - @Override - public void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) { - throw new NotImplementedException(); - } - - - @Override - public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { - throw new NotImplementedException(); - } - - - @Override - public void mergeTable( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionGroups( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionGroupNames( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { - throw new NotImplementedException(); - } - - - @Override - public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public void flagTableForDeletion( long tableId, boolean flag ) { - throw new NotImplementedException(); - } - - - @Override - public boolean isTableFlaggedForDeletion( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public void close() { + public void close() { throw new NotImplementedException(); } @@ -970,354 +252,8 @@ public void clear() { } - /** - * Adds a partition to the catalog - * - * @param tableId The unique id of the table - * @param schemaId The unique id of the table - * @param partitionGroupId partitionGroupId where the partition should be initially added to - * @return The id of the created partition - */ - @Override - public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { - throw new NotImplementedException(); - } - - - /** - * Deletes a single partition and all references. - * - * @param tableId The unique id of the table - * @param schemaId The unique id of the table - * @param partitionId The partitionId to be deleted - */ - @Override - public void deletePartition( long tableId, long schemaId, long partitionId ) { - throw new NotImplementedException(); - } - - - /** - * Get a partition object by its unique id - * - * @param partitionId The unique id of the partition - * @return A catalog partition - */ - @Override - public CatalogPartition getPartition( long partitionId ) { - throw new NotImplementedException(); - } - - - /** - * Updates partitionProperties on table - * - * @param tableId Table to be partitioned - * @param partitionProperty Partition properties - */ - @Override - public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { - throw new NotImplementedException(); - } - - - /** - * Get a List of all partitions belonging to a specific table - * - * @param partitionGroupId Table to be queried - * @return list of all partitions on this table - */ - @Override - public List getPartitions( long partitionGroupId ) { - throw new NotImplementedException(); - } - - - /** - * Get all partitions of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. - * - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - @Override - public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { - throw new NotImplementedException(); - } - - - /** - * Get all partitions of a DataPlacement (identified by adapterId and tableId) - * - * @param adapterId The unique id of the adapter - * @param tableId The unique id of the table - * @return List of partitionIds - */ - @Override - public List getPartitionsOnDataPlacement( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - /** - * Adds a placement for a partition. - * - * @param namespaceId - * @param adapterId The adapter on which the table should be placed on - * @param placementType The type of placement - * @param physicalSchemaName The schema name on the adapter - * @param physicalTableName The table name on the adapter - */ - @Override - public void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { - throw new NotImplementedException(); - } - - - /** - * Deletes a placement for a partition. - * - * @param adapterId The adapter on which the table should be placed on - */ - @Override - public void deletePartitionPlacement( int adapterId, long partitionId ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionPlacementsByAdapter( int adapterId ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getAllPartitionPlacementsByTable( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionPlacements( long partitionId ) { - throw new NotImplementedException(); - } - - - @Override - public boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ) { - throw new NotImplementedException(); - } - - - @Override - public void removeTableFromPeriodicProcessing( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public void addTableToPeriodicProcessing( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getTablesForPeriodicProcessing() { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionsByTable( long tableId ) { - throw new NotImplementedException(); - } - - - /** - * Updates the specified partition group with the attached partitionIds - * - * @param partitionIds List of new partitionIds - */ - @Override - public void updatePartitionGroup( long partitionGroupId, List partitionIds ) { - throw new NotImplementedException(); - } - - - @Override - public void addPartitionToGroup( long partitionGroupId, Long partitionId ) { - throw new NotImplementedException(); - } - - - @Override - public void removePartitionFromGroup( long partitionGroupId, Long partitionId ) { - throw new NotImplementedException(); - } - - - /** - * Assign the partition to a new partitionGroup - */ - @Override - public void updatePartition( long partitionId, Long partitionGroupId ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogDataPlacement getDataPlacement( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getDataPlacements( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { - throw new NotImplementedException(); - } - - - @Override - public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { - throw new NotImplementedException(); - } - - - @Override - public void addDataPlacement( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - protected void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ) { - throw new NotImplementedException(); - } - - - @Override - public void removeDataPlacement( int adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - protected void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { - throw new NotImplementedException(); - } - - - @Override - protected void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ) { - throw new NotImplementedException(); - } - - - @Override - protected void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ) { - throw new NotImplementedException(); - } - - - @Override - protected void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ) { - throw new NotImplementedException(); - } - - - @Override - protected void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ) { - throw new NotImplementedException(); - } - - - @Override - public void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ) { - throw new NotImplementedException(); - } - - - @Override - public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public ImmutableMap> getPartitionPlacementsByAdapter( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { - throw new NotImplementedException(); - } - - - @Override - public List getAllFullDataPlacements( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getAllColumnFullDataPlacements( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public List getAllPartitionFullDataPlacements( long tableId ) { - throw new NotImplementedException(); - } - - - @Override - public long getPartitionGroupByPartition( long partitionId ) { - throw new NotImplementedException(); - } - - @Override - public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { + public List> getPhysicalsOnAdapter( long tableId ) { throw new NotImplementedException(); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 0c0a0803f6..0bfb490ebb 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -82,7 +82,6 @@ import org.polypheny.db.catalog.exceptions.UnknownCollationException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; import org.polypheny.db.catalog.exceptions.UnknownGraphException; import org.polypheny.db.catalog.exceptions.UnknownIndexException; @@ -160,21 +159,21 @@ private void checkViewDependencies( LogicalTable catalogTable ) { if ( catalogTable.connectedViews.size() > 0 ) { List views = new ArrayList<>(); for ( Long id : catalogTable.connectedViews ) { - views.add( catalog.getTable( id ).name ); + views.add( catalog.getLogicalRel( catalogTable.namespaceId ).getTable( id ).name ); } throw new PolyphenyDbException( "Cannot alter table because of underlying View " + views.stream().map( String::valueOf ).collect( Collectors.joining( (", ") ) ) ); } } - private void addDefaultValue( String defaultValue, long addedColumnId ) { + private void addDefaultValue( long namespaceId, String defaultValue, long addedColumnId ) { if ( defaultValue != null ) { // TODO: String is only a temporal solution for default values String v = defaultValue; if ( v.startsWith( "'" ) ) { v = v.substring( 1, v.length() - 1 ); } - catalog.setDefaultValue( addedColumnId, PolyType.VARCHAR, v ); + catalog.getLogicalRel( namespaceId ).setDefaultValue( addedColumnId, PolyType.VARCHAR, v ); } } @@ -195,9 +194,9 @@ protected DataStore getDataStoreInstance( int storeId ) throws DdlOnSourceExcept } - private LogicalColumn getCatalogColumn( long tableId, String columnName ) throws ColumnNotExistsException { + private LogicalColumn getCatalogColumn( long namespaceId, long tableId, String columnName ) throws ColumnNotExistsException { try { - return catalog.getColumn( tableId, columnName ); + return catalog.getLogicalRel( namespaceId ).getColumn( tableId, columnName ); } catch ( UnknownColumnException e ) { throw new ColumnNotExistsException( tableId, columnName ); } @@ -212,7 +211,7 @@ public long createNamespace( String name, long databaseId, NamespaceType type, i if ( ifNotExists ) { // It is ok that there is already a schema with this name because "IF NOT EXISTS" was specified try { - return catalog.getSchema( Catalog.defaultDatabaseId, name ).id; + return catalog.getNamespace( name ).id; } catch ( UnknownSchemaException e ) { throw new RuntimeException( "The catalog seems to be corrupt, as it was impossible to retrieve an existing namespace." ); } @@ -232,84 +231,93 @@ public void addAdapter( String uniqueName, String adapterName, AdapterType adapt uniqueName = uniqueName.toLowerCase(); Adapter adapter = AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, config ); if ( adapter instanceof DataSource ) { - Map> exportedColumns; - try { - exportedColumns = ((DataSource) adapter).getExportedColumns(); - } catch ( Exception e ) { - AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); - throw new RuntimeException( "Could not deploy adapter", e ); - } - // Create table, columns etc. - for ( Map.Entry> entry : exportedColumns.entrySet() ) { - // Make sure the table name is unique - String tableName = entry.getKey(); - if ( catalog.checkIfExistsEntity( 1, tableName ) ) { - int i = 0; - while ( catalog.checkIfExistsEntity( 1, tableName + i ) ) { - i++; - } - tableName += i; - } + handleSource( (DataSource) adapter ); + } + } - long tableId = catalog.addTable( tableName, 1, 1, EntityType.SOURCE, !((DataSource) adapter).isDataReadOnly() ); - List primaryKeyColIds = new ArrayList<>(); - int colPos = 1; - String physicalSchemaName = null; - String physicalTableName = null; - for ( ExportedColumn exportedColumn : entry.getValue() ) { - long columnId = catalog.addColumn( - exportedColumn.name, - tableId, - colPos++, - exportedColumn.type, - exportedColumn.collectionsType, - exportedColumn.length, - exportedColumn.scale, - exportedColumn.dimension, - exportedColumn.cardinality, - exportedColumn.nullable, - Collation.getDefaultCollation() ); - catalog.addColumnPlacement( - adapter.getAdapterId(), - columnId, - PlacementType.STATIC, - exportedColumn.physicalSchemaName, - exportedColumn.physicalTableName, - exportedColumn.physicalColumnName - ); // Not a valid partitionGroupID --> placeholder - catalog.updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), columnId, exportedColumn.physicalPosition ); - if ( exportedColumn.primary ) { - primaryKeyColIds.add( columnId ); - } - if ( physicalSchemaName == null ) { - physicalSchemaName = exportedColumn.physicalSchemaName; - } - if ( physicalTableName == null ) { - physicalTableName = exportedColumn.physicalTableName; - } + + private void handleSource( DataSource adapter ) { + long defaultNamespaceId = 1; + Map> exportedColumns; + try { + exportedColumns = adapter.getExportedColumns(); + } catch ( Exception e ) { + AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); + throw new RuntimeException( "Could not deploy adapter", e ); + } + // Create table, columns etc. + for ( Map.Entry> entry : exportedColumns.entrySet() ) { + // Make sure the table name is unique + String tableName = entry.getKey(); + if ( catalog.getLogicalRel( defaultNamespaceId ).checkIfExistsEntity( tableName ) ) { // apparently we put them all into 1? + int i = 0; + while ( catalog.getLogicalRel( defaultNamespaceId ).checkIfExistsEntity( tableName + i ) ) { + i++; } - try { - catalog.addPrimaryKey( tableId, primaryKeyColIds ); - LogicalTable catalogTable = catalog.getTable( tableId ); - catalog.addPartitionPlacement( - catalogTable.namespaceId, - adapter.getAdapterId(), - catalogTable.id, - catalogTable.partitionProperty.partitionIds.get( 0 ), - PlacementType.AUTOMATIC, - physicalSchemaName, - physicalTableName, - DataPlacementRole.UPTODATE ); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( "Exception while adding primary key" ); + tableName += i; + } + + long tableId = catalog.getLogicalRel( defaultNamespaceId ).addTable( tableName, 1, EntityType.SOURCE, !(adapter).isDataReadOnly() ); + List primaryKeyColIds = new ArrayList<>(); + int colPos = 1; + String physicalSchemaName = null; + String physicalTableName = null; + for ( ExportedColumn exportedColumn : entry.getValue() ) { + long columnId = catalog.getLogicalRel( defaultNamespaceId ).addColumn( + exportedColumn.name, + tableId, + colPos++, + exportedColumn.type, + exportedColumn.collectionsType, + exportedColumn.length, + exportedColumn.scale, + exportedColumn.dimension, + exportedColumn.cardinality, + exportedColumn.nullable, + Collation.getDefaultCollation() ); + catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( + adapter.getAdapterId(), + columnId, + PlacementType.STATIC, + exportedColumn.physicalSchemaName, + exportedColumn.physicalTableName, + exportedColumn.physicalColumnName + ); // Not a valid partitionGroupID --> placeholder + catalog.getAllocRel( defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), columnId, exportedColumn.physicalPosition ); + if ( exportedColumn.primary ) { + primaryKeyColIds.add( columnId ); } + if ( physicalSchemaName == null ) { + physicalSchemaName = exportedColumn.physicalSchemaName; + } + if ( physicalTableName == null ) { + physicalTableName = exportedColumn.physicalTableName; + } + } + try { + catalog.getLogicalRel( defaultNamespaceId ).addPrimaryKey( tableId, primaryKeyColIds ); + LogicalTable catalogTable = catalog.getLogicalRel( defaultNamespaceId ).getTable( tableId ); + catalog.getAllocRel( defaultNamespaceId ) + .addPartitionPlacement( + catalogTable.namespaceId, + adapter.getAdapterId(), + catalogTable.id, + catalogTable.partitionProperty.partitionIds.get( 0 ), + PlacementType.AUTOMATIC, + physicalSchemaName, + physicalTableName, + DataPlacementRole.UPTODATE ); + } catch ( GenericCatalogException e ) { + throw new RuntimeException( "Exception while adding primary key" ); } } + } @Override public void dropAdapter( String name, Statement statement ) throws UnknownAdapterException { + long defaultNamespaceId = 1; if ( name.startsWith( "'" ) ) { name = name.substring( 1 ); } @@ -321,12 +329,12 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte if ( catalogAdapter.type == AdapterType.SOURCE ) { // Remove collection Set collectionsToDrop = new HashSet<>(); - for ( CatalogCollectionPlacement collectionPlacement : catalog.getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { + for ( CatalogCollectionPlacement collectionPlacement : catalog.getAllocDoc( defaultNamespaceId ).getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { collectionsToDrop.add( collectionPlacement.collectionId ); } for ( long id : collectionsToDrop ) { - LogicalCollection collection = catalog.getCollection( id ); + LogicalCollection collection = catalog.getLogicalDoc( 1 ).getCollection( id ); // Make sure that there is only one adapter if ( collection.placements.size() != 1 ) { @@ -339,21 +347,21 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte // Remove table Set tablesToDrop = new HashSet<>(); - for ( CatalogColumnPlacement ccp : catalog.getColumnPlacementsOnAdapter( catalogAdapter.id ) ) { + for ( CatalogColumnPlacement ccp : catalog.getAllocRel( defaultNamespaceId ).getColumnPlacementsOnAdapter( catalogAdapter.id ) ) { tablesToDrop.add( ccp.tableId ); } for ( Long id : tablesToDrop ) { - if ( catalog.getTable( id ).entityType != EntityType.MATERIALIZED_VIEW ) { + if ( catalog.getLogicalRel( defaultNamespaceId ).getTable( id ).entityType != EntityType.MATERIALIZED_VIEW ) { tablesToDrop.add( id ); } } // Remove foreign keys for ( Long tableId : tablesToDrop ) { - for ( CatalogForeignKey fk : catalog.getForeignKeys( tableId ) ) { + for ( CatalogForeignKey fk : catalog.getLogicalRel( defaultNamespaceId ).getForeignKeys( tableId ) ) { try { - catalog.deleteForeignKey( fk.id ); + catalog.getLogicalRel( defaultNamespaceId ).deleteForeignKey( fk.id ); } catch ( GenericCatalogException e ) { throw new PolyphenyDbContextException( "Exception while dropping foreign key", e ); } @@ -361,7 +369,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Drop tables for ( Long tableId : tablesToDrop ) { - LogicalTable table = catalog.getTable( tableId ); + LogicalTable table = catalog.getLogicalRel( defaultNamespaceId ).getTable( tableId ); // Make sure that there is only one adapter if ( table.dataPlacements.size() != 1 ) { @@ -374,26 +382,26 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Delete column placement in catalog - for ( Long columnId : table.fieldIds ) { - if ( catalog.checkIfExistsColumnPlacement( catalogAdapter.id, columnId ) ) { - catalog.deleteColumnPlacement( catalogAdapter.id, columnId, false ); + for ( LogicalColumn column : table.columns ) { + if ( catalog.getAllocRel( defaultNamespaceId ).checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { + catalog.getAllocRel( defaultNamespaceId ).deleteColumnPlacement( catalogAdapter.id, column.id, false ); } } // Remove primary keys try { - catalog.deletePrimaryKey( table.id ); + catalog.getLogicalRel( defaultNamespaceId ).deletePrimaryKey( table.id ); } catch ( GenericCatalogException e ) { throw new PolyphenyDbContextException( "Exception while dropping primary key", e ); } // Delete columns - for ( Long columnId : table.fieldIds ) { - catalog.deleteColumn( columnId ); + for ( LogicalColumn column : table.columns ) { + catalog.getLogicalRel( defaultNamespaceId ).deleteColumn( column.id ); } // Delete the table - catalog.deleteTable( table.id ); + catalog.getLogicalRel( defaultNamespaceId ).deleteTable( table.id ); } // Reset plan cache implementation cache & routing cache @@ -404,20 +412,12 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte @Override - public void alterSchemaOwner( String schemaName, String ownerName, long databaseId ) throws UnknownUserException, UnknownSchemaException { - LogicalNamespace logicalNamespace = catalog.getSchema( databaseId, schemaName ); - CatalogUser catalogUser = catalog.getUser( ownerName ); - // catalog.setNamespaceOwner( logicalNamespace.id, catalogUser.id ); - } - - - @Override - public void renameSchema( String newName, String oldName, long databaseId ) throws NamespaceAlreadyExistsException, UnknownSchemaException { + public void renameSchema( String newName, String oldName ) throws NamespaceAlreadyExistsException, UnknownSchemaException { newName = newName.toLowerCase(); if ( catalog.checkIfExistsNamespace( newName ) ) { throw new NamespaceAlreadyExistsException(); } - LogicalNamespace logicalNamespace = catalog.getSchema( databaseId, oldName ); + LogicalNamespace logicalNamespace = catalog.getNamespace( oldName ); catalog.renameNamespace( logicalNamespace.id, newName ); // Update Name in statistics @@ -428,12 +428,12 @@ public void renameSchema( String newName, String oldName, long databaseId ) thro @Override public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException { - if ( catalog.checkIfExistsColumn( catalogTable.id, columnLogicalName ) ) { + if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsColumn( catalogTable.id, columnLogicalName ) ) { throw new ColumnAlreadyExistsException( columnLogicalName, catalogTable.name ); } - LogicalColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.id, beforeColumnName ); - LogicalColumn afterColumn = afterColumnName == null ? null : getCatalogColumn( catalogTable.id, afterColumnName ); + LogicalColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.namespaceId, catalogTable.id, beforeColumnName ); + LogicalColumn afterColumn = afterColumnName == null ? null : getCatalogColumn( catalogTable.namespaceId, catalogTable.id, afterColumnName ); // Make sure that the table is of table type SOURCE if ( catalogTable.entityType != EntityType.SOURCE ) { @@ -441,14 +441,14 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure there is only one adapter - if ( catalog.getColumnPlacement( catalogTable.fieldIds.get( 0 ) ).size() != 1 ) { + if ( catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( catalogTable.columns.get( 0 ).id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } - int adapterId = catalog.getColumnPlacement( catalogTable.fieldIds.get( 0 ) ).get( 0 ).adapterId; + int adapterId = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( catalogTable.columns.get( 0 ).id ).get( 0 ).adapterId; DataSource dataSource = (DataSource) AdapterManager.getInstance().getAdapter( adapterId ); - String physicalTableName = catalog.getPartitionPlacement( adapterId, catalogTable.partitionProperty.partitionIds.get( 0 ) ).physicalTableName; + String physicalTableName = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( adapterId, catalogTable.partitionProperty.partitionIds.get( 0 ) ).physicalTableName; List exportedColumns = dataSource.getExportedColumns().get( physicalTableName ); // Check if physicalColumnName is valid @@ -463,7 +463,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure this physical column has not already been added to this table - for ( CatalogColumnPlacement ccp : catalog.getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { + for ( CatalogColumnPlacement ccp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { if ( ccp.physicalColumnName.equalsIgnoreCase( columnPhysicalName ) ) { throw new RuntimeException( "The physical column '" + columnPhysicalName + "' has already been added to this table!" ); } @@ -471,7 +471,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys int position = updateAdjacentPositions( catalogTable, beforeColumn, afterColumn ); - long columnId = catalog.addColumn( + long columnId = catalog.getLogicalRel( catalogTable.namespaceId ).addColumn( columnLogicalName, catalogTable.id, position, @@ -486,11 +486,11 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys ); // Add default value - addDefaultValue( defaultValue, columnId ); - LogicalColumn addedColumn = catalog.getColumn( columnId ); + addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); + LogicalColumn addedColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ); // Add column placement - catalog.addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( adapterId, addedColumn.id, PlacementType.STATIC, @@ -500,7 +500,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys );//Not a valid partitionID --> placeholder // Set column position - catalog.updateColumnPlacementPhysicalPosition( adapterId, columnId, exportedColumn.physicalPosition ); + catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, columnId, exportedColumn.physicalPosition ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -508,7 +508,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn beforeColumn, LogicalColumn afterColumn ) { - List columns = catalog.getColumns( catalogTable.id ); + List columns = catalog.getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ); int position = columns.size() + 1; if ( beforeColumn != null || afterColumn != null ) { if ( beforeColumn != null ) { @@ -518,7 +518,7 @@ private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn be } // Update position of the other columns for ( int i = columns.size(); i >= position; i-- ) { - catalog.setColumnPosition( columns.get( i - 1 ).id, i + 1 ); + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( columns.get( i - 1 ).id, i + 1 ); } } return position; @@ -533,16 +533,16 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo throw new NotNullAndDefaultValueException(); } - if ( catalog.checkIfExistsColumn( catalogTable.id, columnName ) ) { + if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsColumn( catalogTable.id, columnName ) ) { throw new ColumnAlreadyExistsException( columnName, catalogTable.name ); } // - LogicalColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.id, beforeColumnName ); - LogicalColumn afterColumn = afterColumnName == null ? null : getCatalogColumn( catalogTable.id, afterColumnName ); + LogicalColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.namespaceId, catalogTable.id, beforeColumnName ); + LogicalColumn afterColumn = afterColumnName == null ? null : getCatalogColumn( catalogTable.namespaceId, catalogTable.id, afterColumnName ); int position = updateAdjacentPositions( catalogTable, beforeColumn, afterColumn ); - long columnId = catalog.addColumn( + long columnId = catalog.getLogicalRel( catalogTable.namespaceId ).addColumn( columnName, catalogTable.id, position, @@ -557,15 +557,15 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo ); // Add default value - addDefaultValue( defaultValue, columnId ); - LogicalColumn addedColumn = catalog.getColumn( columnId ); + addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); + LogicalColumn addedColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ); // Ask router on which stores this column shall be placed List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); // Add column on underlying data stores and insert default value for ( DataStore store : stores ) { - catalog.addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( store.getAdapterId(), addedColumn.id, PlacementType.AUTOMATIC, @@ -585,23 +585,23 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } List referencesIds = new LinkedList<>(); for ( String columnName : refColumnNames ) { - LogicalColumn logicalColumn = catalog.getColumn( refTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( refTable.id, columnName ); referencesIds.add( logicalColumn.id ); } - catalog.addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); + catalog.getLogicalRel( catalogTable.namespaceId ).addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); } @Override - public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { + public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } @@ -613,7 +613,7 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List 0 ) { for ( long columnId : columnIds ) { - if ( !catalog.checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { hasAllColumns = false; } } @@ -648,11 +648,11 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List 0 ) { for ( long columnId : columnIds ) { - if ( !catalog.checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { hasAllColumns = false; } } @@ -661,7 +661,7 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnIds, IndexType type ) throws MissingColumnPlacementException, UnknownIndexMethodException, GenericCatalogException { // Check if all required columns are present on this store for ( long columnId : columnIds ) { - if ( !catalog.checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { - throw new MissingColumnPlacementException( catalog.getColumn( columnId ).name ); + if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { + throw new MissingColumnPlacementException( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ).name ); } } @@ -706,7 +706,7 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam methodDisplayName = location.getDefaultIndexMethod().displayName; } - long indexId = catalog.addIndex( + long indexId = catalog.getLogicalRel( catalogTable.namespaceId ).addIndex( catalogTable.id, columnIds, isUnique, @@ -718,16 +718,16 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam location.addIndex( statement.getPrepareContext(), - catalog.getIndex( indexId ), - catalog.getPartitionsOnDataPlacement( location.getAdapterId(), catalogTable.id ) ); + catalog.getLogicalRel( catalogTable.namespaceId ).getIndex( indexId ), + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( location.getAdapterId(), catalogTable.id ) ); } - public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { + public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { indexName = indexName.toLowerCase(); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } @@ -739,7 +739,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName } // Check if there is already an index with this name for this table - if ( catalog.checkIfExistsIndex( catalogTable.id, indexName ) ) { + if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsIndex( catalogTable.id, indexName ) ) { throw new IndexExistsException(); } @@ -762,7 +762,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName methodDisplayName = IndexManager.getDefaultIndexMethod().displayName; } - long indexId = catalog.addIndex( + long indexId = catalog.getLogicalRel( catalogTable.namespaceId ).addIndex( catalogTable.id, columnIds, isUnique, @@ -772,7 +772,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName type, indexName ); - IndexManager.getInstance().addIndex( catalog.getIndex( indexId ), statement ); + IndexManager.getInstance().addIndex( catalog.getLogicalRel( catalogTable.namespaceId ).getIndex( indexId ), statement ); } @@ -785,12 +785,12 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L if ( catalogTable.dataPlacements.contains( dataStore.getAdapterId() ) ) { throw new PlacementAlreadyExistsException(); } else { - catalog.addDataPlacement( dataStore.getAdapterId(), catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).addDataPlacement( dataStore.getAdapterId(), catalogTable.id ); } // Check whether the list is empty (this is a shorthand for a full placement) if ( columnIds.size() == 0 ) { - columnIds = ImmutableList.copyOf( catalogTable.fieldIds ); + columnIds = ImmutableList.copyOf( catalogTable.getColumnIds() ); } // Select partitions to create on this placement diff --git a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java index d02a1527a2..f6209f60cc 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java @@ -228,12 +228,12 @@ public void dataPlacementTest() throws SQLException { Assert.assertEquals( 2, dp.columnPlacementsOnAdapter.size() ); Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsByAdapter( table.id ).get( adapterId ).size() ); - Assert.assertEquals( 1, Catalog.getInstance().getPartitionPlacementsByAdapter( table.id ).get( adapterId ).size() ); + Assert.assertEquals( 1, Catalog.getInstance().getPhysicalsOnAdapter( table.id ).get( adapterId ).size() ); } else if ( dp.adapterId == initialAdapterId ) { Assert.assertEquals( 2, dp.columnPlacementsOnAdapter.size() ); Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsByAdapter( table.id ).get( initialAdapterId ).size() ); - Assert.assertEquals( 1, Catalog.getInstance().getPartitionPlacementsByAdapter( table.id ).get( initialAdapterId ).size() ); + Assert.assertEquals( 1, Catalog.getInstance().getPhysicalsOnAdapter( table.id ).get( initialAdapterId ).size() ); } } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 111585c588..423f879ed7 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -41,9 +41,9 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.csv.CsvTable.Flavor; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.information.InformationGroup; @@ -159,12 +159,12 @@ public Map> getExportedColumns() { Map> exportedColumnCache = new HashMap<>(); Set fileNames; if ( csvDir.getProtocol().equals( "jar" ) ) { - List placements = Catalog + List> placements = Catalog .getInstance() - .getColumnPlacementsOnAdapter( getAdapterId() ); + .getPhysicalsOnAdapter( getAdapterId() ); fileNames = new HashSet<>(); - for ( CatalogColumnPlacement ccp : placements ) { - fileNames.add( ccp.physicalSchemaName ); + for ( PhysicalEntity ccp : placements ) { + fileNames.add( ccp.namespaceName ); } } else if ( Sources.of( csvDir ).file().isFile() ) { // single files diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 46c597e3f4..d3bc381ca5 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -88,6 +88,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.GraphAlreadyExistsException; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; @@ -2362,7 +2363,7 @@ public ImmutableMap> getColumnPlacementsByAdapter( * {@inheritDoc} */ @Override - public ImmutableMap> getPartitionPlacementsByAdapter( long tableId ) { + public List getPhysicalsOnAdapter( long tableId ) { LogicalTable table = getTable( tableId ); Map> partitionPlacementsByAdapter = new HashMap<>(); diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java index dcbcc81add..6957786946 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java @@ -90,6 +90,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.GraphAlreadyExistsException; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; @@ -2450,7 +2451,7 @@ public ImmutableMap> getColumnPlacementsByAdapter( * {@inheritDoc} */ @Override - public ImmutableMap> getPartitionPlacementsByAdapter( long tableId ) { + public List getPhysicalsOnAdapter( long tableId ) { LogicalTable table = getTable( tableId ); Map> partitionPlacementsByAdapter = new HashMap<>(); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 52b8556fc4..d05ae664c8 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -30,12 +30,23 @@ import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.catalogs.AllocationCatalog; +import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; +import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; +import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; import org.polypheny.db.catalog.catalogs.LogicalCatalog; +import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; +import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; +import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; +import org.polypheny.db.catalog.catalogs.PhysicalCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -64,7 +75,13 @@ public class PolyCatalog extends Catalog implements Serializable { public final BinarySerializer serializer = Serializable.builder.get().build( PolyCatalog.class ); @Serialize - public final Map catalogs; + public final Map logicalCatalogs; + + @Serialize + public final Map allocationCatalogs; + + @Serialize + public final Map physicalCatalogs; @Serialize public final Map users; @@ -82,18 +99,28 @@ public class PolyCatalog extends Catalog implements Serializable { public PolyCatalog() { - this( new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); + this( + new ConcurrentHashMap<>(), + new ConcurrentHashMap<>(), + new ConcurrentHashMap<>(), + new ConcurrentHashMap<>(), + new ConcurrentHashMap<>(), + new ConcurrentHashMap<>() ); } public PolyCatalog( @Deserialize("users") Map users, - @Deserialize("catalogs") Map catalogs, + @Deserialize("logicalCatalogs") Map logicalCatalogs, + @Deserialize("allocationCatalogs") Map allocationCatalogs, + @Deserialize("physicalCatalogs") Map physicalCatalogs, @Deserialize("adapters") Map adapters, @Deserialize("interfaces") Map interfaces ) { this.users = users; - this.catalogs = catalogs; + this.logicalCatalogs = logicalCatalogs; + this.allocationCatalogs = allocationCatalogs; + this.physicalCatalogs = physicalCatalogs; this.adapters = adapters; this.interfaces = interfaces; updateSnapshot(); @@ -101,7 +128,7 @@ public PolyCatalog( private void updateSnapshot() { - this.fullSnapshot = new FullSnapshot( idBuilder.getNewSnapshotId(), catalogs ); + this.fullSnapshot = new FullSnapshot( idBuilder.getNewSnapshotId(), logicalCatalogs ); } @@ -121,6 +148,55 @@ public void rollback() { } + private void validateNamespaceType( long id, NamespaceType type ) { + if ( logicalCatalogs.get( id ).getLogicalNamespace().namespaceType != type ) { + throw new RuntimeException( "error while retrieving catalog" ); + } + } + + + @Override + public LogicalRelationalCatalog getLogicalRel( long id ) { + validateNamespaceType( id, NamespaceType.RELATIONAL ); + return (LogicalRelationalCatalog) logicalCatalogs.get( id ); + } + + + @Override + public LogicalDocumentCatalog getLogicalDoc( long id ) { + validateNamespaceType( id, NamespaceType.DOCUMENT ); + return (LogicalDocumentCatalog) logicalCatalogs.get( id ); + } + + + @Override + public LogicalGraphCatalog getLogicalGraph( long id ) { + validateNamespaceType( id, NamespaceType.GRAPH ); + return (LogicalGraphCatalog) logicalCatalogs.get( id ); + } + + + @Override + public AllocationRelationalCatalog getAllocRel( long id ) { + validateNamespaceType( id, NamespaceType.RELATIONAL ); + return (AllocationRelationalCatalog) allocationCatalogs.get( id ); + } + + + @Override + public AllocationDocumentCatalog getAllocDoc( long id ) { + validateNamespaceType( id, NamespaceType.DOCUMENT ); + return (AllocationDocumentCatalog) allocationCatalogs.get( id ); + } + + + @Override + public AllocationGraphCatalog getAllocGraph( long id ) { + validateNamespaceType( id, NamespaceType.GRAPH ); + return (AllocationGraphCatalog) allocationCatalogs.get( id ); + } + + @Override @Deprecated public Map getNodeInfo() { @@ -168,13 +244,13 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case switch ( namespaceType ) { case RELATIONAL: - catalogs.put( id, new RelationalCatalog( namespace, idBuilder ) ); + logicalCatalogs.put( id, new RelationalCatalog( namespace, idBuilder ) ); break; case DOCUMENT: - catalogs.put( id, new DocumentCatalog( namespace, idBuilder ) ); + logicalCatalogs.put( id, new DocumentCatalog( namespace, idBuilder ) ); break; case GRAPH: - catalogs.put( id, new GraphCatalog( namespace, idBuilder ) ); + logicalCatalogs.put( id, new GraphCatalog( namespace, idBuilder ) ); break; } change(); @@ -184,7 +260,7 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case @Override public @NonNull List getNamespaces( Pattern name ) { - return catalogs.values().stream().filter( c -> + return logicalCatalogs.values().stream().filter( c -> c.getLogicalNamespace().caseSensitive ? c.getLogicalNamespace().name.toLowerCase( Locale.ROOT ).matches( name.pattern ) : c.getLogicalNamespace().name.matches( name.pattern ) ) @@ -194,7 +270,7 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case @Override public LogicalNamespace getNamespace( long id ) { - return catalogs.get( id ).getLogicalNamespace(); + return logicalCatalogs.get( id ).getLogicalNamespace(); } @@ -219,10 +295,10 @@ public boolean checkIfExistsNamespace( String name ) { @Override public void renameNamespace( long id, String name ) { - if ( catalogs.get( id ) == null ) { + if ( logicalCatalogs.get( id ) == null ) { return; } - catalogs.get( id ).withLogicalNamespace( catalogs.get( id ).getLogicalNamespace().withName( name ) ); + logicalCatalogs.get( id ).withLogicalNamespace( logicalCatalogs.get( id ).getLogicalNamespace().withName( name ) ); change(); } @@ -230,7 +306,7 @@ public void renameNamespace( long id, String name ) { @Override public void deleteNamespace( long id ) { - catalogs.remove( id ); + logicalCatalogs.remove( id ); change(); } @@ -347,6 +423,24 @@ public Snapshot getSnapshot( long id ) { } + @Override + public List> getAllocationsOnAdapter( long id ) { + return allocationCatalogs.values().stream().flatMap( c -> c.getAllocationsOnAdapter( id ).stream() ).collect( Collectors.toList() ); + } + + + @Override + public List> getPhysicalsOnAdapter( long id ) { + return physicalCatalogs.values().stream().flatMap( c -> c.getPhysicalsOnAdapter( id ).stream() ).collect( Collectors.toList() ); + } + + + @Override + public List getIndexes() { + return null; + } + + @Override public PolyCatalog copy() { return deserialize( serialize(), PolyCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java index dfaad81503..61f2c0b1d7 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java @@ -109,7 +109,7 @@ public DocumentCatalog copy() { @Override - public boolean checkIfExistsEntity( long namespaceId, String entityName ) { + public boolean checkIfExistsEntity( String entityName ) { return false; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java index f06fe4e3a8..8e28105759 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java @@ -89,7 +89,7 @@ public GraphCatalog copy() { @Override - public boolean checkIfExistsEntity( long namespaceId, String entityName ) { + public boolean checkIfExistsEntity( String entityName ) { return false; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java index 750a14475a..302b9f2020 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java @@ -156,7 +156,7 @@ public RelationalCatalog copy() { @Override - public boolean checkIfExistsEntity( long namespaceId, String entityName ) { + public boolean checkIfExistsEntity( String entityName ) { return false; } @@ -204,7 +204,7 @@ public LogicalTable getTableFromPartition( long partitionId ) { @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { + public long addTable( String name, int ownerId, EntityType entityType, boolean modifiable ) { return 0; } @@ -308,7 +308,7 @@ public List getIndexes() { @Override - public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException { + public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ) throws GenericCatalogException { long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); if ( unique ) { // TODO: Check if the current values are unique @@ -322,7 +322,7 @@ public long addIndex( long tableId, List columnIds, boolean unique, String method, methodDisplayName, type, - location, + adapterId, keyId, Objects.requireNonNull( keys.get( keyId ) ), null ) ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaOwner.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaOwner.java index 1f6f253a46..df0ba88eb7 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaOwner.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaOwner.java @@ -17,13 +17,8 @@ package org.polypheny.db.sql.language.ddl.alterschema; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; -import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; @@ -33,7 +28,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterSchema; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -81,13 +75,8 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - try { - DdlManager.getInstance().alterSchemaOwner( schema.getSimple(), owner.getSimple(), context.getDatabaseId() ); - } catch ( UnknownSchemaException e ) { - throw CoreUtil.newContextException( schema.getPos(), RESOURCE.schemaNotFound( schema.getSimple() ) ); - } catch ( UnknownUserException e ) { - throw CoreUtil.newContextException( owner.getPos(), RESOURCE.userNotFound( owner.getSimple() ) ); - } + // DdlManager.getInstance().alterSchemaOwner( schema.getSimple(), owner.getSimple(), context.getDatabaseId() ); + throw new UnsupportedOperationException( "This functionality is at the moment not supported" ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaRename.java index bd654d39f9..b9fdca20ba 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaRename.java @@ -79,7 +79,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { try { - DdlManager.getInstance().renameSchema( newName.getSimple(), oldName.getSimple(), context.getDatabaseId() ); + DdlManager.getInstance().renameSchema( newName.getSimple(), oldName.getSimple() ); } catch ( NamespaceAlreadyExistsException e ) { throw CoreUtil.newContextException( newName.getPos(), RESOURCE.schemaExists( newName.getSimple() ) ); } catch ( UnknownSchemaException e ) { From ad5e0f8bd57539ca3c86a6985497714e3c5926d8 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 1 Mar 2023 23:26:34 +0100 Subject: [PATCH 032/436] temp transform of DdlManagerImpl --- .../org/polypheny/db/adapter/DataStore.java | 24 +- .../db/adapter/index/IndexManager.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 6 + .../catalogs/AllocationDocumentCatalog.java | 10 + .../catalogs/AllocationGraphCatalog.java | 8 +- .../catalogs/AllocationRelationalCatalog.java | 2 +- .../catalogs/LogicalDocumentCatalog.java | 9 +- .../db/catalog/entity/CatalogEntity.java | 4 +- .../entity/allocation/AllocationEntity.java | 4 +- .../entity/allocation/AllocationTable.java | 4 +- .../entity/logical/LogicalCollection.java | 24 +- .../catalog/entity/logical/LogicalEntity.java | 6 +- .../catalog/entity/logical/LogicalGraph.java | 9 +- .../catalog/entity/logical/LogicalTable.java | 2 +- .../entity/physical/PhysicalCollection.java | 4 +- .../entity/physical/PhysicalEntity.java | 4 +- .../entity/physical/PhysicalGraph.java | 2 +- .../entity/physical/PhysicalTable.java | 6 +- .../java/org/polypheny/db/ddl/DdlManager.java | 24 +- .../db/partition/PartitionManager.java | 2 +- .../java/org/polypheny/db/schema/Entity.java | 10 +- .../db/transaction/TransactionManager.java | 5 +- .../db/view/MaterializedViewManager.java | 2 +- .../java/org/polypheny/db/PolyphenyDb.java | 6 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 648 +-- .../partition/AbstractPartitionManager.java | 16 +- .../db/partition/FrequencyMapImpl.java | 152 +- .../TemperatureAwarePartitionManager.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 2 +- .../db/routing/routers/BaseRouter.java | 110 +- .../db/routing/routers/DmlRouterImpl.java | 137 +- .../transaction/TransactionManagerImpl.java | 4 +- .../db/view/MaterializedViewManagerImpl.java | 1 - .../java/org/polypheny/db/TestHelper.java | 2 +- .../statistics/StatisticQueryProcessor.java | 2 +- .../statistics/StatisticsManagerImpl.java | 2 +- .../polypheny/db/adapter/csv/CsvSchema.java | 3 +- .../db/cypher/CypherProcessorImpl.java | 3 +- .../db/cypher/admin/CypherCreateDatabase.java | 5 +- .../db/cypher/admin/CypherDropDatabase.java | 2 +- .../ExploreQueryProcessor.java | 2 +- .../org/polypheny/db/catalog/CatalogImpl.java | 5017 ---------------- .../db/catalog/CatalogImplBackup.java | 5107 ----------------- .../db/languages/mql/MqlAddPlacement.java | 6 +- .../db/languages/mql/MqlDropDatabase.java | 2 +- .../org/polypheny/db/catalog/PolyCatalog.java | 7 + .../logical/document/DocumentCatalog.java | 4 +- .../java/org/polypheny/db/restapi/Rest.java | 2 +- .../db/sql/language/ddl/SqlDropSchema.java | 2 +- .../java/org/polypheny/db/webui/Crud.java | 2 +- 50 files changed, 601 insertions(+), 10820 deletions(-) delete mode 100644 plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java delete mode 100644 plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index f5ceb95b66..38d26bad57 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -29,9 +29,7 @@ import org.pf4j.ExtensionPoint; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.allocation.AllocationTable; @@ -115,7 +113,7 @@ public void dropGraph( Context context, CatalogGraphPlacement graphPlacement ) { * as if the data model would be {@link NamespaceType#RELATIONAL}. */ private void createGraphSubstitution( Context context, LogicalGraph graphDatabase ) { - CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( graphDatabase.id ); + /*CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( graphDatabase.id ); LogicalTable nodes = Catalog.getInstance().getTable( mapping.nodesId ); createPhysicalTable( context, nodes, null ); @@ -127,7 +125,8 @@ private void createGraphSubstitution( Context context, LogicalGraph graphDatabas createPhysicalTable( context, edges, null ); LogicalTable edgeProperty = Catalog.getInstance().getTable( mapping.edgesPropertyId ); - createPhysicalTable( context, edgeProperty, null ); + createPhysicalTable( context, edgeProperty, null );*/ + // todo dl } @@ -136,7 +135,7 @@ private void createGraphSubstitution( Context context, LogicalGraph graphDatabas * as if the data model would be {@link NamespaceType#RELATIONAL}. */ private void dropGraphSubstitution( Context context, CatalogGraphPlacement graphPlacement ) { - Catalog catalog = Catalog.getInstance(); + /*Catalog catalog = Catalog.getInstance(); CatalogGraphMapping mapping = catalog.getGraphMapping( graphPlacement.graphId ); LogicalTable nodes = catalog.getTable( mapping.nodesId ); @@ -149,7 +148,8 @@ private void dropGraphSubstitution( Context context, CatalogGraphPlacement graph dropTable( context, edges, edges.partitionProperty.partitionIds ); LogicalTable edgeProperty = catalog.getTable( mapping.edgesPropertyId ); - dropTable( context, edgeProperty, edgeProperty.partitionProperty.partitionIds ); + dropTable( context, edgeProperty, edgeProperty.partitionProperty.partitionIds );*/ + // todo dl } @@ -169,11 +169,12 @@ public void createCollection( Context prepareContext, LogicalCollection catalogC * as if the data model would be {@link NamespaceType#RELATIONAL}. */ private void createCollectionSubstitution( Context prepareContext, LogicalCollection catalogCollection ) { - Catalog catalog = Catalog.getInstance(); + /*Catalog catalog = Catalog.getInstance(); CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); LogicalTable collectionEntity = catalog.getTable( mapping.collectionId ); - createPhysicalTable( prepareContext, collectionEntity, null ); + createPhysicalTable( prepareContext, collectionEntity, null );*/ + // todo dl } @@ -193,11 +194,12 @@ public void dropCollection( Context prepareContext, LogicalCollection catalogCol * as if the data model would be {@link NamespaceType#RELATIONAL}. */ private void dropCollectionSubstitution( Context prepareContext, LogicalCollection catalogCollection ) { - Catalog catalog = Catalog.getInstance(); + /*Catalog catalog = Catalog.getInstance(); CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); LogicalTable collectionEntity = catalog.getTable( mapping.collectionId ); - dropTable( prepareContext, collectionEntity, collectionEntity.partitionProperty.partitionIds ); + dropTable( prepareContext, collectionEntity, collectionEntity.partitionProperty.partitionIds );*/ + // todo dl } @@ -220,7 +222,7 @@ public static class FunctionalIndexInfo { public List getColumnNames() { List columnNames = new ArrayList<>( columnIds.size() ); for ( long columnId : columnIds ) { - columnNames.add( Catalog.getInstance().getColumn( columnId ).name ); + columnNames.add( Catalog.getInstance().getLogicalRel( names ).getColumn( columnId ).name ); } return columnNames; } diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index 4835821fde..f89a57209d 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -189,7 +189,7 @@ protected void addIndex( final long id, final String name, final CatalogKey key, indexByName.put( name, index ); final Transaction tx = statement != null ? statement.getTransaction() - : transactionManager.startTransaction( Catalog.defaultUserId, Catalog.defaultDatabaseId, false, "Index Manager" ); + : transactionManager.startTransaction( Catalog.defaultUserId, false, "Index Manager" ); try { index.rebuild( tx ); if ( statement == null ) { diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 020877d289..37a1164227 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -39,6 +39,7 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; @@ -101,6 +102,8 @@ public static Catalog getInstance() { public abstract AllocationGraphCatalog getAllocGraph( long id ); + public abstract PhysicalEntity getPhysicalEntity( long id ); + public abstract Map getNodeInfo(); @@ -355,4 +358,7 @@ protected final boolean isValidIdentifier( final String str ) { public abstract List getIndexes(); + + public abstract List getTablesForPeriodicProcessing(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java index 1f6005d28b..9ece5287e6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java @@ -18,8 +18,10 @@ import java.util.List; import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.logistic.PlacementType; public interface AllocationDocumentCatalog extends AllocationCatalog { @@ -36,4 +38,12 @@ public interface AllocationDocumentCatalog extends AllocationCatalog { List getCollectionPlacementsByAdapter( long id ); + void addCollectionPlacement( long namespaceId, long adapterId, long id, PlacementType placementType ); + + CatalogCollectionMapping getCollectionMapping( long id ); + + void dropCollectionPlacement( long id, long adapterId ); + + CatalogCollectionPlacement getCollectionPlacement( long id, int placementId ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java index 6a0843505b..8379f33ac3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java @@ -29,7 +29,7 @@ public interface AllocationGraphCatalog extends AllocationCatalog { * @param graphId The id of the graph for which a new placement is added * @return The id of the new placement */ - public abstract long addGraphPlacement( int adapterId, long graphId ); + public abstract long addGraphPlacement( long adapterId, long graphId ); /** * Gets a collection of graph placements for a given adapter. @@ -37,7 +37,7 @@ public interface AllocationGraphCatalog extends AllocationCatalog { * @param adapterId The id of the adapter on which the placements are placed * @return The collection of graph placements */ - public abstract List getGraphPlacements( int adapterId ); + public abstract List getGraphPlacements( long adapterId ); /** * Deletes a specific graph placement for a given graph and adapter. @@ -45,7 +45,7 @@ public interface AllocationGraphCatalog extends AllocationCatalog { * @param adapterId The id of the adapter on which the placement is removed * @param graphId The id of the graph for which the placement is removed */ - public abstract void deleteGraphPlacement( int adapterId, long graphId ); + public abstract void deleteGraphPlacement( long adapterId, long graphId ); /** * Gets a specific placement for a graph on a given adapter. @@ -54,6 +54,6 @@ public interface AllocationGraphCatalog extends AllocationCatalog { * @param adapterId The id of the adapter on which the placement is placed * @return The placement matching the conditions */ - public abstract CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ); + public abstract CatalogGraphPlacement getGraphPlacement( long graphId, long adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index fe4dd42af3..6b2eb2b889 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -90,7 +90,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The id of the adapter * @return List of column placements of the table on the specified adapter */ - public abstract List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ); + public abstract List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ); /** * Get column placements on a adapter. On column detail level diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java index c7d1f9d2e8..587d02ebfc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.catalogs; import java.util.List; +import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.Pattern; @@ -34,24 +35,22 @@ public interface LogicalDocumentCatalog extends LogicalCatalog { /** * Get a collection of collections which match the given naming pattern. * - * @param namespaceId The id of the namespace to which the collection belongs * @param namePattern The naming pattern of the collection itself, null if all are matched * @return collection of collections matching conditions */ - public abstract List getCollections( long namespaceId, Pattern namePattern ); + public abstract List getCollections( Pattern namePattern ); /** * Add a new collection with the given parameters. * * @param id ID of the collection to add, null if a new one needs to be generated * @param name The name of the collection - * @param schemaId The id of the namespace to which the collection is added * @param currentUserId The user, which adds the collection * @param entity The type of entity of the collection * @param modifiable If the collection is modifiable * @return The id of the added collection */ - public abstract long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ); + public abstract long addCollection( Long id, String name, int currentUserId, EntityType entity, boolean modifiable ); /** @@ -61,4 +60,6 @@ public interface LogicalDocumentCatalog extends LogicalCatalog { */ public abstract void deleteCollection( long id ); + long addCollectionLogistics( String name, List stores, boolean placementOnly ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index 67c08571db..22473d6191 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -44,10 +44,12 @@ public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializa public EntityType entityType; public NamespaceType namespaceType; public String name; + public long namespaceId; - protected CatalogEntity( long id, String name, EntityType type, NamespaceType namespaceType ) { + protected CatalogEntity( long id, String name, long namespaceId, EntityType type, NamespaceType namespaceType ) { this.id = id; + this.namespaceId = namespaceId; this.name = name; this.entityType = type; this.namespaceType = namespaceType; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java index f41ccf8e6f..0620c4f238 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -32,8 +32,8 @@ public abstract class AllocationEntity extends LogicalE public L logical; - protected AllocationEntity( L logical, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( id, name, namespaceName, type, namespaceType ); + protected AllocationEntity( L logical, long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( id, name, namespaceId, namespaceName, type, namespaceType ); this.adapterId = adapterId; this.logical = logical; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 59109053b0..0895f34490 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -42,8 +42,8 @@ public class AllocationTable extends AllocationEntity { public LogicalTable logicalTable; - public AllocationTable( LogicalTable logicalTable, long id, long logicalId, String name, String namespaceName, long adapterId, List placements ) { - super( logicalTable, id, name, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); + public AllocationTable( LogicalTable logicalTable, long id, long logicalId, String name, long namespaceId, String namespaceName, long adapterId, List placements ) { + super( logicalTable, id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.logicalTable = logicalTable; this.logicalId = logicalId; this.adapterId = adapterId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index d8e4ce0254..410835c91b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -25,19 +25,19 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NonNull; -import lombok.SneakyThrows; import lombok.Value; +import lombok.With; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value -public class LogicalCollection extends CatalogEntity implements CatalogObject, LogicalEntity { +@With +public class LogicalCollection extends LogicalEntity implements CatalogObject { private static final long serialVersionUID = -6490762948368178584L; @@ -45,23 +45,21 @@ public class LogicalCollection extends CatalogEntity implements CatalogObject, L public long id; public ImmutableList placements; public String name; - public long databaseId; public long namespaceId; public EntityType entityType; public String physicalName; public LogicalCollection( - long databaseId, long namespaceId, + String namespaceName, long id, String name, @NonNull Collection placements, EntityType type, String physicalName ) { - super( id, name, EntityType.ENTITY, NamespaceType.DOCUMENT ); + super( id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.DOCUMENT ); this.id = id; - this.databaseId = databaseId; this.namespaceId = namespaceId; this.name = name; this.placements = ImmutableList.copyOf( placements ); @@ -79,25 +77,17 @@ public Serializable[] getParameterArray() { public LogicalCollection addPlacement( int adapterId ) { List placements = new ArrayList<>( this.placements ); placements.add( adapterId ); - return new LogicalCollection( databaseId, namespaceId, id, name, placements, EntityType.ENTITY, physicalName ); + return new LogicalCollection( id, name, namespaceId, namespaceName, placements, EntityType.ENTITY, physicalName ); } public LogicalCollection removePlacement( int adapterId ) { List placements = this.placements.stream().filter( id -> id != adapterId ).collect( Collectors.toList() ); - return new LogicalCollection( databaseId, namespaceId, id, name, placements, EntityType.ENTITY, physicalName ); + return new LogicalCollection( id, name, namespaceId, namespaceName, placements, EntityType.ENTITY, physicalName ); } - @SneakyThrows - public String getNamespaceName() { - return Catalog.getInstance().getNamespace( namespaceId ).name; - } - - public LogicalCollection setPhysicalName( String physicalCollectionName ) { - return new LogicalCollection( databaseId, namespaceId, id, name, placements, entityType, physicalCollectionName ); - } @Override diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java index db727f095a..41fb68897a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java @@ -29,11 +29,13 @@ public abstract class LogicalEntity extends CatalogEntity { public String namespaceName; + public long namespaceId; - protected LogicalEntity( long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType ) { - super( id, name, type, namespaceType ); + protected LogicalEntity( long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType ) { + super( id, name, namespaceId, type, namespaceType ); this.namespaceName = namespaceName; + this.namespaceId = namespaceId; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index bf6e4b8158..2c35a7ccb2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -30,14 +30,13 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = true) @Value -public class LogicalGraph extends CatalogEntity implements Comparable, LogicalEntity { +public class LogicalGraph extends LogicalEntity implements Comparable { private static final long serialVersionUID = 7343856827901459672L; @@ -48,8 +47,8 @@ public class LogicalGraph extends CatalogEntity implements Comparable placements, boolean caseSensitive ) { - super( id, name, EntityType.ENTITY, NamespaceType.GRAPH ); + public LogicalGraph( long id, String name, long namespaceId, String namespaceName, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { + super( id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.GRAPH ); this.ownerId = ownerId; this.modifiable = modifiable; this.placements = ImmutableList.copyOf( placements ); @@ -58,7 +57,7 @@ public LogicalGraph( long id, String name, int ownerId, boolean modifiable, @Non public LogicalGraph( LogicalGraph graph ) { - this( graph.id, graph.name, graph.ownerId, graph.modifiable, graph.placements, graph.caseSensitive ); + this( graph.id, graph.name, graph.namespaceId, graph.namespaceName, graph.ownerId, graph.modifiable, graph.placements, graph.caseSensitive ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 6e7d5795cd..a995d87907 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -97,7 +97,7 @@ public LogicalTable( @Deserialize("modifiable") boolean modifiable, @Deserialize("partitionProperty") PartitionProperty partitionProperty, @Deserialize("connectedViews") List connectedViews ) { - super( id, name, namespaceName, type, NamespaceType.RELATIONAL ); + super( id, name, namespaceId, namespaceName, type, NamespaceType.RELATIONAL ); this.id = id; this.name = name; this.columns = ImmutableList.copyOf( columns ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java index 0f94b43465..63c91d1602 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -32,8 +32,8 @@ @NonFinal public class PhysicalCollection extends PhysicalEntity { - public PhysicalCollection( LogicalCollection logical, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( logical, id, name, namespaceName, type, namespaceType, adapterId ); + public PhysicalCollection( LogicalCollection logical, long id, long namespaceId, String name, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( logical, id, name, namespaceId, namespaceName, type, namespaceType, adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java index 18257f0c7f..69701b89ed 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java @@ -23,8 +23,8 @@ public abstract class PhysicalEntity extends AllocationEntity { - protected PhysicalEntity( L logical, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( logical, id, name, namespaceName, type, namespaceType, adapterId ); + protected PhysicalEntity( L logical, long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( logical, id, name, namespaceId, namespaceName, type, namespaceType, adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index 454e7aeddf..4c5eb254cf 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -33,7 +33,7 @@ public class PhysicalGraph extends PhysicalEntity { public PhysicalGraph( LogicalGraph logical, long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( logical, id, name, name, type, namespaceType, adapterId ); // for graph both name and namespaceName are the same + super( logical, id, name, id, name, type, namespaceType, adapterId ); // for graph both name and namespaceName are the same } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 8f15bf26b2..bb21e4d861 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -52,8 +52,8 @@ public class PhysicalTable extends PhysicalEntity { public AllocationTable allocation; - public PhysicalTable( AllocationTable allocation, long id, String name, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { - super( allocation.logical, id, name, namespaceName, type, namespaceType, allocation.adapterId ); + public PhysicalTable( AllocationTable allocation, long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { + super( allocation.logical, id, name, namespaceId, namespaceName, type, namespaceType, allocation.adapterId ); this.allocation = allocation; this.namespaceName = namespaceName; this.placements = ImmutableList.copyOf( placements ); @@ -63,7 +63,7 @@ public PhysicalTable( AllocationTable allocation, long id, String name, String n public PhysicalTable( AllocationTable table, String name, String namespaceName, List columnNames ) { - this( table, table.id, name, namespaceName, table.entityType, table.namespaceType, table.placements, columnNames ); + this( table, table.id, name, table.namespaceId, namespaceName, table.entityType, table.namespaceType, table.placements, columnNames ); } diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 9048f8ff26..e374299f15 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -426,12 +426,12 @@ public static DdlManager getInstance() { */ public abstract void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException; - public abstract void removeGraphDatabase( long graphId, boolean ifExists, Statement statement ); + public abstract void removeGraph( long graphId, boolean ifExists, Statement statement ); /** * Create a new table * - * @param schemaId the id of the schema to which the table belongs + * @param namespaceId the id of the schema to which the table belongs * @param tableName the name of the new table * @param columns all columns of the table * @param constraints all constraints for the table @@ -440,30 +440,30 @@ public static DdlManager getInstance() { * @param placementType which placement type should be used for the initial placements * @param statement the used statement */ - public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; + public abstract void createTable( long namespaceId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; /** * Create a new view * * @param viewName the name of the new view - * @param schemaId the id of the schema to which the view belongs + * @param namespaceId the id of the schema to which the view belongs * @param algNode the algNode which was built form the Select part of the view * @param statement the used Statement */ - public abstract void createView( String viewName, long schemaId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) throws EntityAlreadyExistsException, GenericCatalogException, UnknownColumnException; + public abstract void createView( String viewName, long namespaceId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) throws EntityAlreadyExistsException, GenericCatalogException, UnknownColumnException; /** * Create a new materialized view * * @param viewName the name of the new view - * @param schemaId the id of the schema to which the view belongs + * @param namespaceId the id of the schema to which the view belongs * @param algRoot the relNode which was built form the Select part of the view * @param statement the used Statement */ - public abstract void createMaterializedView( String viewName, long schemaId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ) throws EntityAlreadyExistsException, GenericCatalogException, UnknownColumnException, ColumnNotExistsException, ColumnAlreadyExistsException; + public abstract void createMaterializedView( String viewName, long namespaceId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ) throws EntityAlreadyExistsException, GenericCatalogException, UnknownColumnException, ColumnNotExistsException, ColumnAlreadyExistsException; - public abstract void createCollection( long schemaId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException; + public abstract void createCollection( long namespaceId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException; public abstract void addCollectionPlacement( long namespaceId, String name, List stores, Statement statement ); @@ -485,22 +485,22 @@ public static DdlManager getInstance() { /** * Adds a new constraint to a table * + * @param namespaceId * @param constraintName the name of the constraint * @param constraintType the type of the constraint * @param columnNames the names of the columns for which to create the constraint * @param tableId the id of the table */ - public abstract void addConstraint( String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException; + public abstract void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException; /** * Drop a schema * - * @param databaseId the id of the database the schema belongs * @param schemaName the name of the schema to drop * @param ifExists whether to silently ignore if the schema does not exist * @param statement the used statement */ - public abstract void dropSchema( long databaseId, String schemaName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException; + public abstract void dropNamespace( String schemaName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException; /** * Drop a table @@ -556,7 +556,7 @@ public static DdlManager getInstance() { public abstract void refreshView( Statement statement, Long materializedId ); - public abstract long createGraph( long databaseId, String namespaceName, boolean modifiable, @Nullable List stores, boolean ifNotExists, boolean replace, Statement statement ); + public abstract long createGraph( String namespaceName, boolean modifiable, @Nullable List stores, boolean ifNotExists, boolean replace, boolean caseSensitive, Statement statement ); public abstract void addGraphAlias( long graphId, String alias, boolean ifNotExists ); diff --git a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java index 7766d99ef9..be115016a6 100644 --- a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java +++ b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java @@ -44,7 +44,7 @@ public interface PartitionManager { * @param partitionIds List of all requested partitions ids * @return Returns map of AdapterId {@code ->} [Map PartitionsId {@code ->}needed Columns Placements] */ - Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ); + Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ); int getNumberOfPartitionsPerGroup( int numberOfPartitions ); diff --git a/core/src/main/java/org/polypheny/db/schema/Entity.java b/core/src/main/java/org/polypheny/db/schema/Entity.java index 8cb513dbd4..6f27987b91 100644 --- a/core/src/main/java/org/polypheny/db/schema/Entity.java +++ b/core/src/main/java/org/polypheny/db/schema/Entity.java @@ -36,9 +36,9 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; import org.polypheny.db.prepare.JavaTypeFactoryImpl; @@ -95,11 +95,11 @@ default CatalogEntity getCatalogEntity() { } switch ( getNamespaceType() ) { case RELATIONAL: - return Catalog.getInstance().getTable( getId() ); + return Catalog.getInstance().getLogicalRel( -1 ).getTable( getId() ); case DOCUMENT: - return Catalog.getInstance().getCollection( getId() ); + return Catalog.getInstance().getLogicalDoc( -1 ).getCollection( getId() ); case GRAPH: - return Catalog.getInstance().getGraph( getId() ); + return Catalog.getInstance().getLogicalGraph( -1 ).getGraph( getId() ); } return null; } @@ -113,7 +113,7 @@ default CatalogPartitionPlacement getPartitionPlacement() { if ( getAdapterId() == null || getPartitionId() == null ) { return null; } - return Catalog.getInstance().getPartitionPlacement( Math.toIntExact( getAdapterId() ), getPartitionId() ); + return Catalog.getInstance().getAllocRel( -1 ).getPartitionPlacement( Math.toIntExact( getAdapterId() ), getPartitionId() ); } /** diff --git a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java index 48a5601e1a..1631a0b583 100644 --- a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java +++ b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java @@ -21,7 +21,6 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.transaction.Transaction.MultimediaFlavor; @@ -33,9 +32,9 @@ public interface TransactionManager { Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin, MultimediaFlavor flavor ); - Transaction startTransaction( long userId, long databaseId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownDatabaseException, UnknownSchemaException; + Transaction startTransaction( long userId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownSchemaException; - Transaction startTransaction( long userId, long databaseId, boolean analyze, String origin, MultimediaFlavor flavor ) throws UnknownUserException, UnknownDatabaseException, UnknownSchemaException; + Transaction startTransaction( long userId, boolean analyze, String origin, MultimediaFlavor flavor ) throws UnknownUserException, UnknownSchemaException; void removeTransaction( PolyXid xid ); diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index fb0f572952..28971b1831 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -64,7 +64,7 @@ public static MaterializedViewManager getInstance() { public abstract void addData( Transaction transaction, List stores, - Map> addedColumns, + Map> addedColumns, AlgRoot algRoot, CatalogMaterializedView materializedView ); diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index 2eee906ecb..a662f7f17f 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -34,7 +34,6 @@ import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; @@ -42,6 +41,7 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.ConfigManager; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; @@ -346,12 +346,12 @@ public void join( final long millis ) throws InterruptedException { throw new RuntimeException( "There was no catalog submitted, aborting." ); } - trx = transactionManager.startTransaction( Catalog.defaultUserId, Catalog.defaultDatabaseId, false, "Catalog Startup" ); + trx = transactionManager.startTransaction( Catalog.defaultUserId, false, "Catalog Startup" ); AdapterManager.getInstance().restoreAdapters(); loadDefaults(); QueryInterfaceManager.getInstance().restoreInterfaces( catalog ); trx.commit(); - trx = transactionManager.startTransaction( Catalog.defaultUserId, Catalog.defaultDatabaseId, false, "Catalog Startup" ); + trx = transactionManager.startTransaction( Catalog.defaultUserId, false, "Catalog Startup" ); catalog.restoreColumnPlacements( trx ); catalog.restoreViews( trx ); trx.commit(); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 0bfb490ebb..5477533b28 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -58,7 +58,6 @@ import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; @@ -178,7 +177,7 @@ private void addDefaultValue( long namespaceId, String defaultValue, long addedC } - protected DataStore getDataStoreInstance( int storeId ) throws DdlOnSourceException { + protected DataStore getDataStoreInstance( long storeId ) throws DdlOnSourceException { Adapter adapterInstance = AdapterManager.getInstance().getAdapter( storeId ); if ( adapterInstance == null ) { throw new RuntimeException( "Unknown store id: " + storeId ); @@ -799,7 +798,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L // Needed to ensure that column placements on the same store contain all the same partitions // Check if this column placement is the first on the data placement // If this returns null this means that this is the first placement and partition list can therefore be specified - List currentPartList = catalog.getPartitionGroupsOnDataPlacement( dataStore.getAdapterId(), catalogTable.id ); + List currentPartList = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupsOnDataPlacement( dataStore.getAdapterId(), catalogTable.id ); isDataPlacementPartitioned = !currentPartList.isEmpty(); @@ -829,7 +828,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L + dataStore.getUniqueName() + "' already contains manually specified partitions: " + currentPartList + ". Use 'ALTER TABLE ... MODIFY PARTITIONS...' instead" ); } - List catalogPartitionGroups = catalog.getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNames ) { boolean isPartOfTable = false; for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { @@ -841,7 +840,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L } if ( !isPartOfTable ) { throw new RuntimeException( "Specified Partition-Name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getPartitionGroupNames( tableId ) ) ); + + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupNames( tableId ) ) ); } } @@ -863,11 +862,11 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { List partitionIds = new ArrayList<>(); // Gather all partitions relevant to add depending on the specified partitionGroup - tempPartitionGroupList.forEach( pg -> catalog.getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); + tempPartitionGroupList.forEach( pg -> catalog.getAllocRel( catalogTable.namespaceId ).getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); // Create column placements for ( long cid : columnIds ) { - catalog.addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( dataStore.getAdapterId(), cid, PlacementType.MANUAL, @@ -875,13 +874,13 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { null, null ); - addedColumns.add( catalog.getColumn( cid ) ); + addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); } // Check if placement includes primary key columns - CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( long cid : primaryKey.columnIds ) { if ( !columnIds.contains( cid ) ) { - catalog.addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( dataStore.getAdapterId(), cid, PlacementType.AUTOMATIC, @@ -889,13 +888,13 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { null, null ); - addedColumns.add( catalog.getColumn( cid ) ); + addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); } } // Need to create partitionPlacements first in order to trigger schema creation on PolySchemaBuilder for ( long partitionId : partitionIds ) { - catalog.addPartitionPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( catalogTable.namespaceId, dataStore.getAdapterId(), catalogTable.id, partitionId, @@ -927,22 +926,22 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, checkModelLogic( catalogTable ); try { - CatalogPrimaryKey oldPk = catalog.getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey oldPk = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } - catalog.addPrimaryKey( catalogTable.id, columnIds ); + catalog.getLogicalRel( catalogTable.namespaceId ).addPrimaryKey( catalogTable.id, columnIds ); // Add new column placements long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores - List oldPkPlacements = catalog.getColumnPlacement( pkColumnId ); + List oldPkPlacements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( pkColumnId ); for ( CatalogColumnPlacement ccp : oldPkPlacements ) { for ( long columnId : columnIds ) { - if ( !catalog.checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { - catalog.addColumnPlacement( + if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( ccp.adapterId, columnId, PlacementType.AUTOMATIC, @@ -952,8 +951,8 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, ); AdapterManager.getInstance().getStore( ccp.adapterId ).addColumn( statement.getPrepareContext(), - catalog.getTable( ccp.tableId ), - catalog.getColumn( columnId ) ); + catalog.getLogicalRel( catalogTable.namespaceId ).getTable( ccp.tableId ), + catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ) ); } } } @@ -973,10 +972,10 @@ public void addUniqueConstraint( LogicalTable catalogTable, List columnN try { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } - catalog.addUniqueConstraint( catalogTable.id, constraintName, columnIds ); + catalog.getLogicalRel( catalogTable.namespaceId ).addUniqueConstraint( catalogTable.id, constraintName, columnIds ); } catch ( GenericCatalogException | UnknownColumnException e ) { throw new RuntimeException( e ); } @@ -985,7 +984,7 @@ public void addUniqueConstraint( LogicalTable catalogTable, List columnN @Override public void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { - if ( catalogTable.fieldIds.size() < 2 ) { + if ( catalogTable.columns.size() < 2 ) { throw new RuntimeException( "Cannot drop sole column of table " + catalogTable.name ); } @@ -995,39 +994,39 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement //check if views are dependent from this view checkViewDependencies( catalogTable ); - LogicalColumn column = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn column = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Check if column is part of a key - for ( CatalogKey key : catalog.getTableKeys( catalogTable.id ) ) { + for ( CatalogKey key : catalog.getLogicalRel( catalogTable.namespaceId ).getTableKeys( catalogTable.id ) ) { if ( key.columnIds.contains( column.id ) ) { - if ( catalog.isPrimaryKey( key.id ) ) { + if ( catalog.getLogicalRel( catalogTable.namespaceId ).isPrimaryKey( key.id ) ) { throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the primary key." ); - } else if ( catalog.isIndex( key.id ) ) { - throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the index with the name: '" + catalog.getIndexes( key ).get( 0 ).name + "'." ); - } else if ( catalog.isForeignKey( key.id ) ) { - throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the foreign key with the name: '" + catalog.getForeignKeys( key ).get( 0 ).name + "'." ); - } else if ( catalog.isConstraint( key.id ) ) { - throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the constraint with the name: '" + catalog.getConstraints( key ).get( 0 ).name + "'." ); + } else if ( catalog.getLogicalRel( catalogTable.namespaceId ).isIndex( key.id ) ) { + throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the index with the name: '" + catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( key ).get( 0 ).name + "'." ); + } else if ( catalog.getLogicalRel( catalogTable.namespaceId ).isForeignKey( key.id ) ) { + throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the foreign key with the name: '" + catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKeys( key ).get( 0 ).name + "'." ); + } else if ( catalog.getLogicalRel( catalogTable.namespaceId ).isConstraint( key.id ) ) { + throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the constraint with the name: '" + catalog.getLogicalRel( catalogTable.namespaceId ).getConstraints( key ).get( 0 ).name + "'." ); } throw new PolyphenyDbException( "Ok, strange... Something is going wrong here!" ); } } // Delete column from underlying data stores - for ( CatalogColumnPlacement dp : catalog.getColumnPlacementsByColumn( column.id ) ) { + for ( CatalogColumnPlacement dp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByColumn( column.id ) ) { if ( catalogTable.entityType == EntityType.ENTITY ) { AdapterManager.getInstance().getStore( dp.adapterId ).dropColumn( statement.getPrepareContext(), dp ); } - catalog.deleteColumnPlacement( dp.adapterId, dp.columnId, true ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( dp.adapterId, dp.columnId, true ); } // Delete from catalog - List columns = catalog.getColumns( catalogTable.id ); - catalog.deleteColumn( column.id ); + List columns = catalog.getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteColumn( column.id ); if ( column.position != columns.size() ) { // Update position of the other columns for ( int i = column.position; i < columns.size(); i++ ) { - catalog.setColumnPosition( columns.get( i ).id, i ); + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( columns.get( i ).id, i ); } } @@ -1060,8 +1059,8 @@ public void dropConstraint( LogicalTable catalogTable, String constraintName ) t checkIfDdlPossible( catalogTable.entityType ); try { - CatalogConstraint constraint = catalog.getConstraint( catalogTable.id, constraintName ); - catalog.deleteConstraint( constraint.id ); + CatalogConstraint constraint = catalog.getLogicalRel( catalogTable.namespaceId ).getConstraint( catalogTable.id, constraintName ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); } catch ( GenericCatalogException | UnknownConstraintException e ) { throw new RuntimeException( e ); } @@ -1074,8 +1073,8 @@ public void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) t checkIfDdlPossible( catalogTable.entityType ); try { - CatalogForeignKey foreignKey = catalog.getForeignKey( catalogTable.id, foreignKeyName ); - catalog.deleteForeignKey( foreignKey.id ); + CatalogForeignKey foreignKey = catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKey( catalogTable.id, foreignKeyName ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } catch ( GenericCatalogException | UnknownForeignKeyException e ) { throw new RuntimeException( e ); } @@ -1088,16 +1087,16 @@ public void dropIndex( LogicalTable catalogTable, String indexName, Statement st checkIfDdlPossible( catalogTable.entityType ); try { - CatalogIndex index = catalog.getIndex( catalogTable.id, indexName ); + CatalogIndex index = catalog.getLogicalRel( catalogTable.namespaceId ).getIndex( catalogTable.id, indexName ); if ( index.location == 0 ) { IndexManager.getInstance().deleteIndex( index ); } else { DataStore storeInstance = AdapterManager.getInstance().getStore( index.location ); - storeInstance.dropIndex( statement.getPrepareContext(), index, catalog.getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + storeInstance.dropIndex( statement.getPrepareContext(), index, catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } - catalog.deleteIndex( index.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); } catch ( UnknownIndexException e ) { throw new RuntimeException( e ); } @@ -1111,15 +1110,15 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc throw new PlacementNotExistsException(); } - CatalogDataPlacement dataPlacement = catalog.getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); - if ( !catalog.validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), + CatalogDataPlacement dataPlacement = catalog.getAllocRel( catalogTable.namespaceId ).getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), dataPlacement.columnPlacementsOnAdapter, dataPlacement.getAllPartitionIds() ) ) { throw new LastPlacementException(); } // Drop all indexes on this store - for ( CatalogIndex index : catalog.getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() ) { if ( index.location == 0 ) { // Delete polystore index @@ -1129,17 +1128,17 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc AdapterManager.getInstance().getStore( index.location ).dropIndex( statement.getPrepareContext(), index, - catalog.getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } // Delete index in catalog - catalog.deleteIndex( index.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); } } // Physically delete the data from the store - storeInstance.dropTable( statement.getPrepareContext(), catalogTable, catalog.getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); + storeInstance.dropTable( statement.getPrepareContext(), catalogTable, catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); // Remove physical stores afterwards - catalog.removeDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).removeDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1151,7 +1150,7 @@ public void dropPrimaryKey( LogicalTable catalogTable ) throws DdlOnSourceExcept try { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); - catalog.deletePrimaryKey( catalogTable.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); } catch ( GenericCatalogException e ) { throw new RuntimeException( e ); } @@ -1166,9 +1165,9 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT // check if model permits operation checkModelLogic( catalogTable, columnName ); - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); - catalog.setColumnType( + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnType( logicalColumn.id, type.type, type.collectionType, @@ -1176,11 +1175,11 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT type.scale, type.dimension, type.cardinality ); - for ( CatalogColumnPlacement placement : catalog.getColumnPlacement( logicalColumn.id ) ) { + for ( CatalogColumnPlacement placement : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( logicalColumn.id ) ) { AdapterManager.getInstance().getStore( placement.adapterId ).updateColumnType( statement.getPrepareContext(), placement, - catalog.getColumn( logicalColumn.id ), + catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( logicalColumn.id ), logicalColumn.type ); } @@ -1191,7 +1190,7 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT @Override public void setColumnNullable( LogicalTable catalogTable, String columnName, boolean nullable, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException, GenericCatalogException { - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -1199,7 +1198,7 @@ public void setColumnNullable( LogicalTable catalogTable, String columnName, boo // Check if model permits operation checkModelLogic( catalogTable, columnName ); - catalog.setNullable( logicalColumn.id, nullable ); + catalog.getLogicalRel( catalogTable.namespaceId ).setNullable( logicalColumn.id, nullable ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1211,42 +1210,42 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str // Check if model permits operation checkModelLogic( catalogTable, columnName ); - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); int targetPosition; LogicalColumn refColumn; if ( beforeColumnName != null ) { - refColumn = getCatalogColumn( catalogTable.id, beforeColumnName ); + refColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, beforeColumnName ); targetPosition = refColumn.position; } else { - refColumn = getCatalogColumn( catalogTable.id, afterColumnName ); + refColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, afterColumnName ); targetPosition = refColumn.position + 1; } if ( logicalColumn.id == refColumn.id ) { throw new RuntimeException( "Same column!" ); } - List columns = catalog.getColumns( catalogTable.id ); + List columns = catalog.getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ); if ( targetPosition < logicalColumn.position ) { // Walk from last column to first column for ( int i = columns.size(); i >= 1; i-- ) { if ( i < logicalColumn.position && i >= targetPosition ) { - catalog.setColumnPosition( columns.get( i - 1 ).id, i + 1 ); + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( columns.get( i - 1 ).id, i + 1 ); } else if ( i == logicalColumn.position ) { - catalog.setColumnPosition( logicalColumn.id, columns.size() + 1 ); + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( logicalColumn.id, columns.size() + 1 ); } if ( i == targetPosition ) { - catalog.setColumnPosition( logicalColumn.id, targetPosition ); + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( logicalColumn.id, targetPosition ); } } } else if ( targetPosition > logicalColumn.position ) { // Walk from first column to last column targetPosition--; for ( int i = 1; i <= columns.size(); i++ ) { if ( i > logicalColumn.position && i <= targetPosition ) { - catalog.setColumnPosition( columns.get( i - 1 ).id, i - 1 ); + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( columns.get( i - 1 ).id, i - 1 ); } else if ( i == logicalColumn.position ) { - catalog.setColumnPosition( logicalColumn.id, columns.size() + 1 ); + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( logicalColumn.id, columns.size() + 1 ); } if ( i == targetPosition ) { - catalog.setColumnPosition( logicalColumn.id, targetPosition ); + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( logicalColumn.id, targetPosition ); } } } @@ -1259,7 +1258,7 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str @Override public void setColumnCollation( LogicalTable catalogTable, String columnName, Collation collation, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException { - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Check if model permits operation checkModelLogic( catalogTable, columnName ); @@ -1267,7 +1266,7 @@ public void setColumnCollation( LogicalTable catalogTable, String columnName, Co // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); - catalog.setCollation( logicalColumn.id, collation ); + catalog.getLogicalRel( catalogTable.namespaceId ).setCollation( logicalColumn.id, collation ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1276,12 +1275,12 @@ public void setColumnCollation( LogicalTable catalogTable, String columnName, Co @Override public void setDefaultValue( LogicalTable catalogTable, String columnName, String defaultValue, Statement statement ) throws ColumnNotExistsException { - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Check if model permits operation checkModelLogic( catalogTable, columnName ); - addDefaultValue( defaultValue, logicalColumn.id ); + addDefaultValue( catalogTable.namespaceId, defaultValue, logicalColumn.id ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1290,12 +1289,12 @@ public void setDefaultValue( LogicalTable catalogTable, String columnName, Strin @Override public void dropDefaultValue( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // check if model permits operation checkModelLogic( catalogTable, columnName ); - catalog.deleteDefaultValue( logicalColumn.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteDefaultValue( logicalColumn.id ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1318,21 +1317,21 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Checks before physically removing of placement that the partition distribution is still valid and sufficient // Identifies which columns need to be removed - for ( CatalogColumnPlacement placement : catalog.getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { + for ( CatalogColumnPlacement placement : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { if ( !columnIds.contains( placement.columnId ) ) { // Check whether there are any indexes located on the store requiring this column - for ( CatalogIndex index : catalog.getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( placement.columnId ) ) { - throw new IndexPreventsRemovalException( index.name, catalog.getColumn( placement.columnId ).name ); + throw new IndexPreventsRemovalException( index.name, catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( placement.columnId ).name ); } } // Check whether the column is a primary key column - CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); if ( primaryKey.columnIds.contains( placement.columnId ) ) { // Check if the placement type is manual. If so, change to automatic if ( placement.placementType == PlacementType.MANUAL ) { // Make placement manual - catalog.updateColumnPlacementType( + catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), placement.columnId, PlacementType.AUTOMATIC ); @@ -1344,7 +1343,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds } } - if ( !catalog.validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), columnsToRemove, new ArrayList<>() ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), columnsToRemove, new ArrayList<>() ) ) { throw new LastPlacementException(); } @@ -1352,9 +1351,9 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Remove columns physically for ( long columnId : columnsToRemove ) { // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); // Drop column placement - catalog.deleteColumnPlacement( storeInstance.getAdapterId(), columnId, true ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), columnId, true ); } List tempPartitionGroupList = new ArrayList<>(); @@ -1365,7 +1364,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // If index partitions are specified if ( !partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { // First convert specified index to correct partitionGroupId - for ( int partitionGroupId : partitionGroupIds ) { + for ( long partitionGroupId : partitionGroupIds ) { // Check if specified partition index is even part of table and if so get corresponding uniquePartId try { int index = catalogTable.partitionProperty.partitionGroupIds.indexOf( partitionGroupId ); @@ -1378,7 +1377,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds } // If name partitions are specified else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { - List catalogPartitionGroups = catalog.getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNames ) { boolean isPartOfTable = false; for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { @@ -1390,7 +1389,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { } if ( !isPartOfTable ) { throw new RuntimeException( "Specified partition name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getPartitionGroupNames( tableId ) ) ); + + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupNames( tableId ) ) ); } } } else if ( partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { @@ -1407,21 +1406,21 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { List intendedPartitionIds = new ArrayList<>(); // Gather all partitions relevant to add depending on the specified partitionGroup - tempPartitionGroupList.forEach( pg -> catalog.getPartitions( pg ).forEach( p -> intendedPartitionIds.add( p.id ) ) ); + tempPartitionGroupList.forEach( pg -> catalog.getAllocRel( catalogTable.namespaceId ).getPartitions( pg ).forEach( p -> intendedPartitionIds.add( p.id ) ) ); // Which columns to add List addedColumns = new LinkedList<>(); for ( long cid : columnIds ) { - if ( catalog.checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { - CatalogColumnPlacement placement = catalog.getColumnPlacement( storeInstance.getAdapterId(), cid ); + if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { + CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), cid ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual - catalog.updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); + catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); } } else { // Create column placement - catalog.addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( storeInstance.getAdapterId(), cid, PlacementType.MANUAL, @@ -1430,13 +1429,13 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { null ); // Add column on store - storeInstance.addColumn( statement.getPrepareContext(), catalogTable, catalog.getColumn( cid ) ); + storeInstance.addColumn( statement.getPrepareContext(), catalogTable, catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); // Add to list of columns for which we need to copy data - addedColumns.add( catalog.getColumn( cid ) ); + addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); } } - CatalogDataPlacement dataPlacement = catalog.getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + CatalogDataPlacement dataPlacement = catalog.getAllocRel( catalogTable.namespaceId ).getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); List removedPartitionIdsFromDataPlacement = new ArrayList<>(); // Removed Partition Ids for ( long partitionId : dataPlacement.getAllPartitionIds() ) { @@ -1458,7 +1457,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { } if ( newPartitionIdsOnDataPlacement.size() > 0 ) { - newPartitionIdsOnDataPlacement.forEach( partitionId -> catalog.addPartitionPlacement( + newPartitionIdsOnDataPlacement.forEach( partitionId -> catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( catalogTable.namespaceId, storeInstance.getAdapterId(), catalogTable.id, partitionId, @@ -1483,27 +1482,27 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { @Override public void modifyPartitionPlacement( LogicalTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ) throws LastPlacementException { - int storeId = storeInstance.getAdapterId(); + long storeId = storeInstance.getAdapterId(); List newPartitions = new ArrayList<>(); List removedPartitions = new ArrayList<>(); - List currentPartitionGroupsOnStore = catalog.getPartitionGroupsOnDataPlacement( storeId, catalogTable.id ); + List currentPartitionGroupsOnStore = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupsOnDataPlacement( storeId, catalogTable.id ); // Get PartitionGroups that have been removed for ( long partitionGroupId : currentPartitionGroupsOnStore ) { if ( !partitionGroupIds.contains( partitionGroupId ) ) { - catalog.getPartitions( partitionGroupId ).forEach( p -> removedPartitions.add( p.id ) ); + catalog.getAllocRel( catalogTable.namespaceId ).getPartitions( partitionGroupId ).forEach( p -> removedPartitions.add( p.id ) ); } } - if ( !catalog.validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), new ArrayList<>(), removedPartitions ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), new ArrayList<>(), removedPartitions ) ) { throw new LastPlacementException(); } // Get PartitionGroups that have been newly added for ( Long partitionGroupId : partitionGroupIds ) { if ( !currentPartitionGroupsOnStore.contains( partitionGroupId ) ) { - catalog.getPartitions( partitionGroupId ).forEach( p -> newPartitions.add( p.id ) ); + catalog.getAllocRel( catalogTable.namespaceId ).getPartitions( partitionGroupId ).forEach( p -> newPartitions.add( p.id ) ); } } @@ -1512,7 +1511,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part if ( newPartitions.size() > 0 ) { // Need to create partitionPlacements first in order to trigger schema creation on PolySchemaBuilder for ( long partitionId : newPartitions ) { - catalog.addPartitionPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( catalogTable.namespaceId, storeInstance.getAdapterId(), catalogTable.id, partitionId, @@ -1526,11 +1525,11 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); - catalog.getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ).forEach( cp -> necessaryColumns.add( catalog.getColumn( cp.columnId ) ) ); + catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ).forEach( cp -> necessaryColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cp.columnId ) ) ); dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( storeId ), necessaryColumns, newPartitions ); // Add indexes on this new Partition Placement if there is already an index - for ( CatalogIndex currentIndex : catalog.getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex currentIndex : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( currentIndex.location == storeId ) { storeInstance.addIndex( statement.getPrepareContext(), currentIndex, newPartitions ); } @@ -1539,7 +1538,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part if ( removedPartitions.size() > 0 ) { // Remove indexes - for ( CatalogIndex currentIndex : catalog.getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex currentIndex : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( currentIndex.location == storeId ) { storeInstance.dropIndex( statement.getPrepareContext(), currentIndex, removedPartitions ); } @@ -1564,14 +1563,14 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da throw new PlacementNotExistsException(); } - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Make sure that this store does not contain a placement of this column - if ( catalog.checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { - CatalogColumnPlacement placement = catalog.getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); + if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { + CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual - catalog.updateColumnPlacementType( + catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), logicalColumn.id, PlacementType.MANUAL ); @@ -1580,7 +1579,7 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da } } else { // Create column placement - catalog.addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id, PlacementType.MANUAL, @@ -1593,7 +1592,7 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da // Copy the data to the newly added column placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( storeInstance.getAdapterId() ), - ImmutableList.of( logicalColumn ), catalog.getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); + ImmutableList.of( logicalColumn ), catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); } // Reset query plan cache, implementation cache & routing cache @@ -1611,32 +1610,32 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D throw new PlacementNotExistsException(); } - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Check whether this store actually contains a placement of this column - if ( !catalog.checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { throw new PlacementNotExistsException(); } // Check whether there are any indexes located on the store requiring this column - for ( CatalogIndex index : catalog.getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( logicalColumn.id ) ) { throw new IndexPreventsRemovalException( index.name, columnName ); } } - if ( !catalog.validateDataPlacementsConstraints( logicalColumn.tableId, storeInstance.getAdapterId(), Arrays.asList( logicalColumn.id ), new ArrayList<>() ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( logicalColumn.tableId, storeInstance.getAdapterId(), Arrays.asList( logicalColumn.id ), new ArrayList<>() ) ) { throw new LastPlacementException(); } // Check whether the column to drop is a primary key - CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); if ( primaryKey.columnIds.contains( logicalColumn.id ) ) { throw new PlacementIsPrimaryException(); } // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); // Drop column placement - catalog.deleteColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id, false ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id, false ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1646,13 +1645,13 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D @Override public void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) throws UnknownUserException { CatalogUser catalogUser = catalog.getUser( newOwnerName ); - catalog.setTableOwner( catalogTable.id, catalogUser.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).setTableOwner( catalogTable.id, catalogUser.id ); } @Override public void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException { - if ( catalog.checkIfExistsEntity( catalogTable.namespaceId, newTableName ) ) { + if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsEntity( newTableName ) ) { throw new EntityAlreadyExistsException(); } // Check if views are dependent from this view @@ -1662,7 +1661,7 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme newTableName = newTableName.toLowerCase(); } - catalog.renameTable( catalogTable.id, newTableName ); + catalog.getLogicalRel( catalogTable.namespaceId ).renameTable( catalogTable.id, newTableName ); // Update Name in statistics StatisticsManager.getInstance().updateTableName( catalogTable, newTableName ); @@ -1674,15 +1673,15 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme @Override public void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException { - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); - if ( catalog.checkIfExistsColumn( logicalColumn.tableId, newColumnName ) ) { + if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsColumn( logicalColumn.tableId, newColumnName ) ) { throw new ColumnAlreadyExistsException( newColumnName, logicalColumn.getTableName() ); } // Check if views are dependent from this view checkViewDependencies( catalogTable ); - catalog.renameColumn( logicalColumn.id, newColumnName ); + catalog.getLogicalRel( catalogTable.namespaceId ).renameColumn( logicalColumn.id, newColumnName ); // Update Name in statistics StatisticsManager.getInstance().updateColumnName( logicalColumn, newColumnName ); @@ -1693,13 +1692,13 @@ public void renameColumn( LogicalTable catalogTable, String columnName, String n @Override - public void createView( String viewName, long schemaId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) throws EntityAlreadyExistsException { - viewName = adjustNameIfNeeded( viewName, schemaId ); + public void createView( String viewName, long namespaceId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) throws EntityAlreadyExistsException { + viewName = adjustNameIfNeeded( viewName, namespaceId ); - if ( catalog.checkIfExistsEntity( schemaId, viewName ) ) { + if ( catalog.getLogicalRel( namespaceId ).checkIfExistsEntity( viewName ) ) { if ( replace ) { try { - dropView( catalog.getTable( schemaId, viewName ), statement ); + dropView( catalog.getLogicalRel( namespaceId ).getTable( namespaceId, viewName ), statement ); } catch ( UnknownTableException | DdlOnSourceException e ) { throw new RuntimeException( "Unable tp drop the existing View with this name." ); } @@ -1717,11 +1716,11 @@ public void createView( String viewName, long schemaId, AlgNode algNode, AlgColl findUnderlyingTablesOfView( algNode, underlyingTables, fieldList ); // add check if underlying table is of model document -> mql, relational -> sql - underlyingTables.keySet().forEach( tableId -> checkModelLangCompatibility( language, tableId ) ); + underlyingTables.keySet().forEach( tableId -> checkModelLangCompatibility( language, namespaceId, tableId ) ); - long tableId = catalog.addView( + long tableId = catalog.getLogicalRel( namespaceId ).addView( viewName, - schemaId, + namespaceId, statement.getPrepareContext().getCurrentUserId(), EntityType.VIEW, false, @@ -1734,7 +1733,7 @@ public void createView( String viewName, long schemaId, AlgNode algNode, AlgColl ); for ( FieldInformation column : columns ) { - catalog.addColumn( + catalog.getLogicalRel( namespaceId ).addColumn( column.name, tableId, column.position, @@ -1759,10 +1758,10 @@ private String adjustNameIfNeeded( String name, long namespaceId ) { @Override - public void createMaterializedView( String viewName, long schemaId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ) throws EntityAlreadyExistsException, GenericCatalogException { - viewName = adjustNameIfNeeded( viewName, schemaId ); + public void createMaterializedView( String viewName, long namespaceId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ) throws EntityAlreadyExistsException, GenericCatalogException { + viewName = adjustNameIfNeeded( viewName, namespaceId ); // Check if there is already a table with this name - if ( assertEntityExists( schemaId, viewName, ifNotExists ) ) { + if ( assertEntityExists( namespaceId, viewName, ifNotExists ) ) { return; } @@ -1777,19 +1776,19 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR Map> underlying = findUnderlyingTablesOfView( algRoot.alg, underlyingTables, fieldList ); // add check if underlying table is of model document -> mql, relational -> sql - underlying.keySet().forEach( tableId -> checkModelLangCompatibility( language, tableId ) ); + underlying.keySet().forEach( tableId -> checkModelLangCompatibility( language, namespaceId, tableId ) ); if ( materializedCriteria.getCriteriaType() == CriteriaType.UPDATE ) { List entityTypes = new ArrayList<>(); - underlying.keySet().forEach( t -> entityTypes.add( catalog.getTable( t ).entityType ) ); + underlying.keySet().forEach( t -> entityTypes.add( catalog.getLogicalRel( namespaceId ).getTable( t ).entityType ) ); if ( !(entityTypes.contains( EntityType.ENTITY )) ) { throw new GenericCatalogException( "Not possible to use Materialized View with Update Freshness if underlying table does not include a modifiable table." ); } } - long tableId = catalog.addMaterializedView( + long tableId = catalog.getLogicalRel( namespaceId ).addMaterializedView( viewName, - schemaId, + namespaceId, statement.getPrepareContext().getCurrentUserId(), EntityType.MATERIALIZED_VIEW, false, @@ -1805,12 +1804,12 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR // Creates a list with all columns, tableId is needed to create the primary key List columns = getColumnInformation( projectedColumns, fieldList, true, tableId ); - Map> addedColumns = new HashMap<>(); + Map> addedColumns = new HashMap<>(); List columnIds = new ArrayList<>(); for ( FieldInformation column : columns ) { - long columnId = catalog.addColumn( + long columnId = catalog.getLogicalRel( namespaceId ).addColumn( column.name, tableId, column.position, @@ -1829,8 +1828,8 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR } for ( DataStore s : stores ) { - int adapterId = s.getAdapterId(); - catalog.addColumnPlacement( + long adapterId = s.getAdapterId(); + catalog.getAllocRel( namespaceId ).addColumnPlacement( s.getAdapterId(), columnId, placementType, @@ -1845,19 +1844,19 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR } else { logicalColumns = new ArrayList<>(); } - logicalColumns.add( catalog.getColumn( columnId ) ); + logicalColumns.add( catalog.getLogicalRel( namespaceId ).getColumn( columnId ) ); addedColumns.put( adapterId, logicalColumns ); } } // Sets previously created primary key - catalog.addPrimaryKey( tableId, columnIds ); + catalog.getLogicalRel( namespaceId ).addPrimaryKey( tableId, columnIds ); - CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalog.getTable( tableId ); + CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalog.getLogicalRel( namespaceId ).getTable( tableId ); Catalog.getInstance().getSnapshot( 0 ); for ( DataStore store : stores ) { - catalog.addPartitionPlacement( + catalog.getAllocRel( namespaceId ).addPartitionPlacement( catalogMaterializedView.namespaceId, store.getAdapterId(), tableId, @@ -1876,8 +1875,8 @@ public void createMaterializedView( String viewName, long schemaId, AlgRoot algR } - private void checkModelLangCompatibility( QueryLanguage language, Long tableId ) { - LogicalTable catalogTable = catalog.getTable( tableId ); + private void checkModelLangCompatibility( QueryLanguage language, long namespaceId, Long tableId ) { + LogicalTable catalogTable = catalog.getLogicalRel( namespaceId ).getTable( tableId ); if ( catalogTable.getNamespaceType() != language.getNamespaceType() ) { throw new RuntimeException( String.format( @@ -1896,10 +1895,10 @@ public void refreshView( Statement statement, Long materializedId ) { @Override - public long createGraph( long databaseId, String graphName, boolean modifiable, @Nullable List stores, boolean ifNotExists, boolean replace, Statement statement ) { + public long createGraph( String graphName, boolean modifiable, @Nullable List stores, boolean ifNotExists, boolean replace, boolean caseSensitive, Statement statement ) { assert !replace : "Graphs cannot be replaced yet."; - graphName = adjustNameIfNeeded( graphName, databaseId ); + graphName = caseSensitive ? graphName : graphName.toLowerCase(); if ( stores == null ) { // Ask router on which store(s) the graph should be placed @@ -1907,7 +1906,8 @@ public long createGraph( long databaseId, String graphName, boolean modifiable, } // add general graph - long graphId = catalog.addGraph( graphName, stores, modifiable, ifNotExists, replace ); + long graphId = catalog.addNamespace( graphName, NamespaceType.GRAPH, caseSensitive ); + catalog.getLogicalGraph( graphId ).addGraph( graphName, stores, modifiable, ifNotExists, replace ); addGraphPlacement( graphId, stores, false, statement ); @@ -1918,12 +1918,12 @@ public long createGraph( long databaseId, String graphName, boolean modifiable, @Override public long addGraphPlacement( long graphId, List stores, boolean onlyPlacement, Statement statement ) { try { - catalog.addGraphLogistics( graphId, stores, onlyPlacement ); + catalog.getLogicalGraph( graphId ).addGraphLogistics( graphId, stores, onlyPlacement ); } catch ( GenericCatalogException | UnknownTableException | UnknownColumnException e ) { throw new RuntimeException(); } - LogicalGraph graph = catalog.getGraph( graphId ); + LogicalGraph graph = catalog.getLogicalGraph( graphId ).getGraph( graphId ); Catalog.getInstance().getSnapshot( 0 ); List preExistingPlacements = graph.placements @@ -1934,7 +1934,7 @@ public long addGraphPlacement( long graphId, List stores, boolean onl Integer existingAdapterId = preExistingPlacements.isEmpty() ? null : preExistingPlacements.get( 0 ); for ( DataStore store : stores ) { - catalog.addGraphPlacement( store.getAdapterId(), graphId ); + catalog.getAllocGraph( graphId ).addGraphPlacement( store.getAdapterId(), graphId ); afterGraphPlacementAddLogistics( store, graphId ); @@ -1954,13 +1954,13 @@ public long addGraphPlacement( long graphId, List stores, boolean onl @Override public void removeGraphDatabasePlacement( long graphId, DataStore store, Statement statement ) { - CatalogGraphPlacement placement = catalog.getGraphPlacement( graphId, store.getAdapterId() ); + CatalogGraphPlacement placement = catalog.getAllocGraph( graphId ).getGraphPlacement( graphId, store.getAdapterId() ); store.dropGraph( statement.getPrepareContext(), placement ); afterGraphDropLogistics( store, graphId ); - catalog.deleteGraphPlacement( store.getAdapterId(), graphId ); + catalog.getAllocGraph( graphId ).deleteGraphPlacement( store.getAdapterId(), graphId ); Catalog.getInstance().getSnapshot( 0 ); @@ -1968,28 +1968,28 @@ public void removeGraphDatabasePlacement( long graphId, DataStore store, Stateme private void afterGraphDropLogistics( DataStore store, long graphId ) { - CatalogGraphMapping mapping = catalog.getGraphMapping( graphId ); + /*CatalogGraphMapping mapping = catalog.getLogicalRel( graphId ).getGraphMapping( graphId ); - catalog.removeDataPlacement( store.getAdapterId(), mapping.nodesId ); - catalog.removeDataPlacement( store.getAdapterId(), mapping.nodesPropertyId ); - catalog.removeDataPlacement( store.getAdapterId(), mapping.edgesId ); - catalog.removeDataPlacement( store.getAdapterId(), mapping.edgesPropertyId ); + catalog.getAllocGraph( graphId ).removeDataPlacement( store.getAdapterId(), mapping.nodesId ); + catalog.getAllocGraph( graphId ).removeDataPlacement( store.getAdapterId(), mapping.nodesPropertyId ); + catalog.getAllocGraph( catalogTable.namespaceId ).removeDataPlacement( store.getAdapterId(), mapping.edgesId ); + catalog.getAllocGraph( catalogTable.namespaceId ).removeDataPlacement( store.getAdapterId(), mapping.edgesPropertyId );*/ // replace } private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { - CatalogGraphMapping mapping = catalog.getGraphMapping( graphId ); - LogicalTable nodes = catalog.getTable( mapping.nodesId ); - LogicalTable nodeProperty = catalog.getTable( mapping.nodesPropertyId ); - LogicalTable edges = catalog.getTable( mapping.edgesId ); - LogicalTable edgeProperty = catalog.getTable( mapping.edgesPropertyId ); - - catalog.addDataPlacement( store.getAdapterId(), mapping.nodesId ); - catalog.addDataPlacement( store.getAdapterId(), mapping.nodesPropertyId ); - catalog.addDataPlacement( store.getAdapterId(), mapping.edgesId ); - catalog.addDataPlacement( store.getAdapterId(), mapping.edgesPropertyId ); - - catalog.addPartitionPlacement( + /*CatalogGraphMapping mapping = catalog.getLogicalRel( catalogTable.namespaceId ).getGraphMapping( graphId ); + LogicalTable nodes = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.nodesId ); + LogicalTable nodeProperty = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.nodesPropertyId ); + LogicalTable edges = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.edgesId ); + LogicalTable edgeProperty = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.edgesPropertyId ); + + catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), mapping.nodesId ); + catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), mapping.nodesPropertyId ); + catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), mapping.edgesId ); + catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), mapping.edgesPropertyId ); + + catalog.getLogicalRel( catalogTable.namespaceId ).addPartitionPlacement( nodes.namespaceId, store.getAdapterId(), nodes.id, @@ -2000,7 +2000,7 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { DataPlacementRole.UPTODATE ); - catalog.addPartitionPlacement( + catalog.getLogicalRel( catalogTable.namespaceId ).addPartitionPlacement( nodeProperty.namespaceId, store.getAdapterId(), nodeProperty.id, @@ -2011,7 +2011,7 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { DataPlacementRole.UPTODATE ); - catalog.addPartitionPlacement( + catalog.getLogicalRel( catalogTable.namespaceId ).addPartitionPlacement( edges.namespaceId, store.getAdapterId(), edges.id, @@ -2022,7 +2022,7 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { DataPlacementRole.UPTODATE ); - catalog.addPartitionPlacement( + catalog.getLogicalRel( catalogTable.namespaceId ).addPartitionPlacement( edgeProperty.namespaceId, store.getAdapterId(), edgeProperty.id, @@ -2032,21 +2032,21 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { null, DataPlacementRole.UPTODATE ); - +*/// todo dl replace } @Override public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { - catalog.addGraphAlias( graphId, alias, ifNotExists ); + catalog.getLogicalGraph( graphId ).addGraphAlias( graphId, alias, ifNotExists ); } @Override public void removeGraphAlias( long graphId, String alias, boolean ifNotExists ) { alias = alias.toLowerCase(); - catalog.removeGraphAlias( graphId, alias, ifNotExists ); + catalog.getLogicalGraph( graphId ).removeGraphAlias( graphId, alias, ifNotExists ); } @@ -2054,14 +2054,14 @@ public void removeGraphAlias( long graphId, String alias, boolean ifNotExists ) public void replaceGraphAlias( long graphId, String oldAlias, String alias ) { alias = alias.toLowerCase(); oldAlias = oldAlias.toLowerCase(); - catalog.removeGraphAlias( graphId, oldAlias, true ); - catalog.addGraphAlias( graphId, alias, true ); + catalog.getLogicalGraph( graphId ).removeGraphAlias( graphId, oldAlias, true ); + catalog.getLogicalGraph( graphId ).addGraphAlias( graphId, alias, true ); } @Override - public void removeGraphDatabase( long graphId, boolean ifExists, Statement statement ) { - LogicalGraph graph = catalog.getGraph( graphId ); + public void removeGraph( long graphId, boolean ifExists, Statement statement ) { + LogicalGraph graph = catalog.getLogicalGraph( graphId ).getGraph( graphId ); if ( graph == null ) { if ( !ifExists ) { @@ -2071,11 +2071,11 @@ public void removeGraphDatabase( long graphId, boolean ifExists, Statement state } for ( int adapterId : graph.placements ) { - CatalogGraphPlacement placement = Catalog.getInstance().getGraphPlacement( graphId, adapterId ); + CatalogGraphPlacement placement = catalog.getAllocGraph( graphId ).getGraphPlacement( graphId, adapterId ); AdapterManager.getInstance().getStore( adapterId ).dropGraph( statement.getPrepareContext(), placement ); } - catalog.deleteGraph( graphId ); + catalog.getLogicalGraph( graphId ).deleteGraph( graphId ); } @@ -2091,7 +2091,7 @@ private List getColumnInformation( List projectedColum for ( AlgDataTypeField alg : fieldList.getFieldList() ) { AlgDataType type = alg.getValue(); if ( alg.getType().getPolyType() == PolyType.ARRAY ) { - type = ((ArrayType) alg.getValue()).getComponentType(); + type = alg.getValue().getComponentType(); } String colName = alg.getName(); if ( projectedColumns != null ) { @@ -2156,14 +2156,14 @@ private Map> findUnderlyingTablesOfView( AlgNode algNode, Map getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList ) { LogicalTable table = algNode.getEntity().unwrap( LogicalTable.class ); - List columnIds = table.fieldIds; + List columns = table.columns; List logicalColumnNames = table.getColumnNames(); List underlyingColumns = new ArrayList<>(); - for ( int i = 0; i < columnIds.size(); i++ ) { + for ( int i = 0; i < columns.size(); i++ ) { for ( AlgDataTypeField algDataTypeField : fieldList.getFieldList() ) { String name = logicalColumnNames.get( i ); if ( algDataTypeField.getName().equals( name ) ) { - underlyingColumns.add( columnIds.get( i ) ); + underlyingColumns.add( columns.get( i ).id ); } } } @@ -2172,19 +2172,19 @@ private List getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList @Override - public void createTable( long schemaId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { - name = adjustNameIfNeeded( name, schemaId ); + public void createTable( long namespaceId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { + name = adjustNameIfNeeded( name, namespaceId ); try { // Check if there is already an entity with this name - if ( assertEntityExists( schemaId, name, ifNotExists ) ) { + if ( assertEntityExists( namespaceId, name, ifNotExists ) ) { return; } fields = new ArrayList<>( fields ); constraints = new ArrayList<>( constraints ); - checkDocumentModel( schemaId, fields, constraints ); + checkDocumentModel( namespaceId, fields, constraints ); boolean foundPk = false; for ( ConstraintInformation constraintInformation : constraints ) { @@ -2205,32 +2205,31 @@ public void createTable( long schemaId, String name, List fiel stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewTable(); } - long tableId = catalog.addTable( + long tableId = catalog.getLogicalRel( namespaceId ).addTable( name, - schemaId, statement.getPrepareContext().getCurrentUserId(), EntityType.ENTITY, true ); // Initially create DataPlacement containers on every store the table should be placed. - stores.forEach( store -> catalog.addDataPlacement( store.getAdapterId(), tableId ) ); + stores.forEach( store -> catalog.getAllocRel( namespaceId ).addDataPlacement( store.getAdapterId(), tableId ) ); for ( FieldInformation information : fields ) { - addColumn( information.name, information.typeInformation, information.collation, information.defaultValue, tableId, information.position, stores, placementType ); + addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, tableId, information.position, stores, placementType ); } for ( ConstraintInformation constraint : constraints ) { - addConstraint( constraint.name, constraint.type, constraint.columnNames, tableId ); + addConstraint( namespaceId, constraint.name, constraint.type, constraint.columnNames, tableId ); } - //catalog.updateTablePartitionProperties(tableId, partitionProperty); - LogicalTable catalogTable = catalog.getTable( tableId ); + //catalog.getLogicalRel( catalogTable.namespaceId ).updateTablePartitionProperties(tableId, partitionProperty); + LogicalTable catalogTable = catalog.getLogicalRel( namespaceId ).getTable( tableId ); // Trigger rebuild of schema; triggers schema creation on adapters Catalog.getInstance().getSnapshot( 0 ); for ( DataStore store : stores ) { - catalog.addPartitionPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( catalogTable.namespaceId, store.getAdapterId(), catalogTable.id, catalogTable.partitionProperty.partitionIds.get( 0 ), @@ -2249,10 +2248,10 @@ public void createTable( long schemaId, String name, List fiel @Override - public void createCollection( long schemaId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { - name = adjustNameIfNeeded( name, schemaId ); + public void createCollection( long namespaceId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { + name = adjustNameIfNeeded( name, namespaceId ); - if ( assertEntityExists( schemaId, name, ifNotExists ) ) { + if ( assertEntityExists( namespaceId, name, ifNotExists ) ) { return; } @@ -2262,30 +2261,27 @@ public void createCollection( long schemaId, String name, boolean ifNotExists, L } long collectionId; - long partitionId; try { - collectionId = catalog.addCollectionLogistics( schemaId, name, stores, false ); - partitionId = catalog.getPartitionGroups( collectionId ).get( 0 ).id; + collectionId = catalog.getAllocDoc( namespaceId ).addCollectionLogistics( namespaceId, name, stores, false ); } catch ( GenericCatalogException e ) { throw new RuntimeException( e ); } - catalog.addCollection( + catalog.getLogicalDoc( namespaceId ).addCollection( collectionId, name, - schemaId, statement.getPrepareContext().getCurrentUserId(), EntityType.ENTITY, true ); // Initially create DataPlacement containers on every store the table should be placed. - LogicalCollection catalogCollection = catalog.getCollection( collectionId ); + LogicalCollection catalogCollection = catalog.getLogicalDoc( namespaceId ).getCollection( collectionId ); // Trigger rebuild of schema; triggers schema creation on adapters Catalog.getInstance().getSnapshot( 0 ); for ( DataStore store : stores ) { - catalog.addCollectionPlacement( + catalog.getAllocDoc( namespaceId ).addCollectionPlacement( catalogCollection.namespaceId, store.getAdapterId(), catalogCollection.id, @@ -2301,7 +2297,7 @@ public void createCollection( long schemaId, String name, boolean ifNotExists, L private boolean assertEntityExists( long namespaceId, String name, boolean ifNotExists ) throws EntityAlreadyExistsException { // Check if there is already an entity with this name - if ( catalog.checkIfExistsEntity( namespaceId, name ) ) { + if ( catalog.getLogicalRel( namespaceId ).checkIfExistsEntity( name ) ) { if ( ifNotExists ) { // It is ok that there is already a table with this name because "IF NOT EXISTS" was specified return true; @@ -2322,35 +2318,31 @@ public void dropCollection( LogicalCollection catalogCollection, Statement state store.dropCollection( statement.getPrepareContext(), catalogCollection ); } - catalog.deleteCollection( catalogCollection.id ); + catalog.getLogicalDoc( catalogCollection.namespaceId ).deleteCollection( catalogCollection.id ); removeDocumentLogistics( catalogCollection, statement ); } public void removeDocumentLogistics( LogicalCollection catalogCollection, Statement statement ) { - CatalogCollectionMapping mapping = catalog.getCollectionMapping( catalogCollection.id ); - LogicalTable table = catalog.getTable( mapping.collectionId ); - catalog.deleteTable( table.id ); + CatalogCollectionMapping mapping = catalog.getAllocDoc( catalogCollection.namespaceId ).getCollectionMapping( catalogCollection.id ); + LogicalTable table = catalog.getLogicalRel( catalogCollection.namespaceId ).getTable( mapping.collectionId ); + catalog.getLogicalRel( catalogCollection.namespaceId ).deleteTable( table.id ); } @Override public void addCollectionPlacement( long namespaceId, String name, List stores, Statement statement ) { long collectionId; - try { - collectionId = catalog.addCollectionLogistics( namespaceId, name, stores, true ); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( e ); - } + collectionId = catalog.getLogicalDoc( namespaceId ).addCollectionLogistics( name, stores, true ); // Initially create DataPlacement containers on every store the table should be placed. - LogicalCollection catalogCollection = catalog.getCollection( collectionId ); + LogicalCollection catalogCollection = catalog.getLogicalDoc( namespaceId ).getCollection( collectionId ); // Trigger rebuild of schema; triggers schema creation on adapters Catalog.getInstance().getSnapshot( 0 ); for ( DataStore store : stores ) { - catalog.addCollectionPlacement( + catalog.getAllocDoc( namespaceId ).addCollectionPlacement( catalogCollection.namespaceId, store.getAdapterId(), catalogCollection.id, PlacementType.AUTOMATIC ); @@ -2367,7 +2359,7 @@ public void dropCollectionPlacement( long namespaceId, LogicalCollection collect for ( DataStore store : dataStores ) { store.dropCollection( statement.getPrepareContext(), collection ); - catalog.dropCollectionPlacement( collection.id, store.getAdapterId() ); + catalog.getAllocDoc( namespaceId ).dropCollectionPlacement( collection.id, store.getAdapterId() ); if ( !store.getSupportedSchemaType().contains( NamespaceType.DOCUMENT ) ) { removeDocumentPlacementLogistics( collection, store, statement ); @@ -2379,23 +2371,23 @@ public void dropCollectionPlacement( long namespaceId, LogicalCollection collect private void removeDocumentPlacementLogistics( LogicalCollection collection, DataStore store, Statement statement ) { - CatalogCollectionMapping mapping = catalog.getCollectionMapping( collection.id ); - LogicalTable table = catalog.getTable( mapping.collectionId ); + /*CatalogCollectionMapping mapping = catalog.getAllocDoc( collection.namespaceId ).getCollectionMapping( collection.id ); + LogicalTable table = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.collectionId ); try { dropDataPlacement( table, store, statement ); } catch ( PlacementNotExistsException | LastPlacementException e ) { throw new RuntimeException( e ); - } + }*/ } private void afterDocumentLogistics( DataStore store, long collectionId ) { - CatalogCollectionMapping mapping = catalog.getCollectionMapping( collectionId ); - LogicalTable table = catalog.getTable( mapping.collectionId ); + /*CatalogCollectionMapping mapping = catalog.getLogicalRel( catalogTable.namespaceId ).getCollectionMapping( collectionId ); + LogicalTable table = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.collectionId ); - catalog.addDataPlacement( store.getAdapterId(), collectionId ); + catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), collectionId ); - catalog.addPartitionPlacement( + catalog.getLogicalRel( catalogTable.namespaceId ).addPartitionPlacement( table.namespaceId, store.getAdapterId(), table.id, @@ -2404,12 +2396,12 @@ private void afterDocumentLogistics( DataStore store, long collectionId ) { null, null, DataPlacementRole.UPTODATE - ); + );*/ } - private void checkDocumentModel( long schemaId, List columns, List constraints ) { - if ( catalog.getNamespace( schemaId ).namespaceType == NamespaceType.DOCUMENT ) { + private void checkDocumentModel( long namespaceId, List columns, List constraints ) { + if ( catalog.getNamespace( namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { List names = columns.stream().map( c -> c.name ).collect( Collectors.toList() ); if ( names.contains( "_id" ) ) { @@ -2452,8 +2444,8 @@ private void checkDocumentModel( long schemaId, List columns, @Override - public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownDatabaseException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { - LogicalColumn logicalColumn = catalog.getColumn( partitionInfo.table.id, partitionInfo.columnName ); + public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { + LogicalColumn logicalColumn = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getColumn( partitionInfo.table.id, partitionInfo.columnName ); PartitionType actualPartitionType = PartitionType.getByName( partitionInfo.typeName ); @@ -2471,7 +2463,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List log.debug( "Creating partition group for table: {} with id {} on schema: {} on column: {}", partitionInfo.table.name, partitionInfo.table.id, partitionInfo.table.getNamespaceName(), logicalColumn.id ); } - LogicalTable unPartitionedTable = catalog.getTable( partitionInfo.table.id ); + LogicalTable unPartitionedTable = partitionInfo.table; // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); @@ -2510,7 +2502,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Make last partition unbound partition if ( partitionManager.requiresUnboundPartitionGroup() && i == numberOfPartitionGroups - 1 ) { - partId = catalog.addPartitionGroup( + partId = catalog.getAllocRel( partitionInfo.table.namespaceId ).addPartitionGroup( partitionInfo.table.id, "Unbound", partitionInfo.table.namespaceId, @@ -2528,7 +2520,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Mainly needed for HASH if ( partitionInfo.qualifiers.isEmpty() ) { - partId = catalog.addPartitionGroup( + partId = catalog.getAllocRel( partitionInfo.table.namespaceId ).addPartitionGroup( partitionInfo.table.id, partitionGroupName, partitionInfo.table.namespaceId, @@ -2537,7 +2529,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List new ArrayList<>(), false ); } else { - partId = catalog.addPartitionGroup( + partId = catalog.getAllocRel( partitionInfo.table.namespaceId ).addPartitionGroup( partitionInfo.table.id, partitionGroupName, partitionInfo.table.namespaceId, @@ -2552,8 +2544,8 @@ public void addPartitioning( PartitionInformation partitionInfo, List List partitionIds = new ArrayList<>(); //get All PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds - //catalog.getPartitionGroups( partitionInfo.table.id ).forEach( pg -> partitionIds.forEach( p -> partitionIds.add( p ) ) ); - partitionGroupIds.forEach( pg -> catalog.getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); + //catalog.getLogicalRel( catalogTable.namespaceId ).getPartitionGroups( partitionInfo.table.id ).forEach( pg -> partitionIds.forEach( p -> partitionIds.add( p ) ) ); + partitionGroupIds.forEach( pg -> catalog.getAllocRel( partitionInfo.table.namespaceId ).getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); PartitionProperty partitionProperty; if ( actualPartitionType == PartitionType.TEMPERATURE ) { @@ -2572,11 +2564,11 @@ public void addPartitioning( PartitionInformation partitionInfo, List break; } - int hotPercentageIn = Integer.valueOf( ((RawTemperaturePartitionInformation) partitionInfo.rawPartitionInformation).getHotAccessPercentageIn().toString() ); - int hotPercentageOut = Integer.valueOf( ((RawTemperaturePartitionInformation) partitionInfo.rawPartitionInformation).getHotAccessPercentageOut().toString() ); + int hotPercentageIn = Integer.parseInt( ((RawTemperaturePartitionInformation) partitionInfo.rawPartitionInformation).getHotAccessPercentageIn().toString() ); + int hotPercentageOut = Integer.parseInt( ((RawTemperaturePartitionInformation) partitionInfo.rawPartitionInformation).getHotAccessPercentageOut().toString() ); //Initially distribute partitions as intended in a running system - long numberOfPartitionsInHot = numberOfPartitions * hotPercentageIn / 100; + long numberOfPartitionsInHot = (long) numberOfPartitions * hotPercentageIn / 100; if ( numberOfPartitionsInHot == 0 ) { numberOfPartitionsInHot = 1; } @@ -2585,30 +2577,30 @@ public void addPartitioning( PartitionInformation partitionInfo, List // -1 because one partition is already created in COLD List partitionsForHot = new ArrayList<>(); - catalog.getPartitions( partitionGroupIds.get( 0 ) ).forEach( p -> partitionsForHot.add( p.id ) ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).getPartitions( partitionGroupIds.get( 0 ) ).forEach( p -> partitionsForHot.add( p.id ) ); // -1 because one partition is already created in HOT for ( int i = 0; i < numberOfPartitionsInHot - 1; i++ ) { long tempId; - tempId = catalog.addPartition( partitionInfo.table.id, partitionInfo.table.namespaceId, partitionGroupIds.get( 0 ), partitionInfo.qualifiers.get( 0 ), false ); + tempId = catalog.getAllocRel( partitionInfo.table.namespaceId ).addPartition( partitionInfo.table.id, partitionInfo.table.namespaceId, partitionGroupIds.get( 0 ), partitionInfo.qualifiers.get( 0 ), false ); partitionIds.add( tempId ); partitionsForHot.add( tempId ); } - catalog.updatePartitionGroup( partitionGroupIds.get( 0 ), partitionsForHot ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).updatePartitionGroup( partitionGroupIds.get( 0 ), partitionsForHot ); // -1 because one partition is already created in COLD List partitionsForCold = new ArrayList<>(); - catalog.getPartitions( partitionGroupIds.get( 1 ) ).forEach( p -> partitionsForCold.add( p.id ) ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).getPartitions( partitionGroupIds.get( 1 ) ).forEach( p -> partitionsForCold.add( p.id ) ); for ( int i = 0; i < numberOfPartitionsInCold - 1; i++ ) { long tempId; - tempId = catalog.addPartition( partitionInfo.table.id, partitionInfo.table.namespaceId, partitionGroupIds.get( 1 ), partitionInfo.qualifiers.get( 1 ), false ); + tempId = catalog.getAllocRel( partitionInfo.table.namespaceId ).addPartition( partitionInfo.table.id, partitionInfo.table.namespaceId, partitionGroupIds.get( 1 ), partitionInfo.qualifiers.get( 1 ), false ); partitionIds.add( tempId ); partitionsForCold.add( tempId ); } - catalog.updatePartitionGroup( partitionGroupIds.get( 1 ), partitionsForCold ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).updatePartitionGroup( partitionGroupIds.get( 1 ), partitionsForCold ); partitionProperty = TemperaturePartitionProperty.builder() .partitionType( actualPartitionType ) @@ -2639,13 +2631,13 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Update catalog table - catalog.partitionTable( partitionInfo.table.id, actualPartitionType, logicalColumn.id, numberOfPartitionGroups, partitionGroupIds, partitionProperty ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).partitionTable( partitionInfo.table.id, actualPartitionType, logicalColumn.id, numberOfPartitionGroups, partitionGroupIds, partitionProperty ); // Get primary key of table and use PK to find all DataPlacements of table long pkid = partitionInfo.table.primaryKey; - List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; + List pkColumnIds = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient - LogicalColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) boolean fillStores = false; @@ -2653,7 +2645,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List stores = new ArrayList<>(); fillStores = true; } - List catalogColumnPlacements = catalog.getColumnPlacement( pkColumn.id ); + List catalogColumnPlacements = catalog.getAllocRel( partitionInfo.table.namespaceId ).getColumnPlacement( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { if ( fillStores ) { // Ask router on which store(s) the table should be placed @@ -2665,11 +2657,11 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Now get the partitioned table, partitionInfo still contains the basic/unpartitioned table. - LogicalTable partitionedTable = catalog.getTable( partitionInfo.table.id ); + LogicalTable partitionedTable = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getTable( partitionInfo.table.id ); DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); for ( DataStore store : stores ) { for ( long partitionId : partitionIds ) { - catalog.addPartitionPlacement( + catalog.getAllocRel( partitionInfo.table.namespaceId ).addPartitionPlacement( partitionedTable.namespaceId, store.getAdapterId(), partitionedTable.id, @@ -2687,7 +2679,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Get only columns that are actually on that store // Every store of a newly partitioned table, initially will hold all partitions List necessaryColumns = new LinkedList<>(); - catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( catalog.getColumn( cp.columnId ) ) ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( catalog.getLogicalRel( partitionInfo.table.namespaceId ).getColumn( cp.columnId ) ) ); // Copy data from the old partition to new partitions dataMigrator.copyPartitionData( @@ -2701,14 +2693,14 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Adjust indexes - List indexes = catalog.getIndexes( unPartitionedTable.id, false ); + List indexes = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getIndexes( unPartitionedTable.id, false ); for ( CatalogIndex index : indexes ) { // Remove old index DataStore ds = ((DataStore) AdapterManager.getInstance().getAdapter( index.location )); ds.dropIndex( statement.getPrepareContext(), index, unPartitionedTable.partitionProperty.partitionIds ); - catalog.deleteIndex( index.id ); + catalog.getLogicalRel( partitionInfo.table.namespaceId ).deleteIndex( index.id ); // Add new index - long newIndexId = catalog.addIndex( + long newIndexId = catalog.getLogicalRel( partitionInfo.table.namespaceId ).addIndex( partitionedTable.id, index.key.columnIds, index.unique, @@ -2718,18 +2710,18 @@ public void addPartitioning( PartitionInformation partitionInfo, List index.type, index.name ); if ( index.location == 0 ) { - IndexManager.getInstance().addIndex( catalog.getIndex( newIndexId ), statement ); + IndexManager.getInstance().addIndex( catalog.getLogicalRel( partitionInfo.table.namespaceId ).getIndex( newIndexId ), statement ); } else { ds.addIndex( statement.getPrepareContext(), - catalog.getIndex( newIndexId ), - catalog.getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); + catalog.getLogicalRel( partitionInfo.table.namespaceId ).getIndex( newIndexId ), + catalog.getAllocRel( partitionInfo.table.namespaceId ).getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); } } // Remove old tables stores.forEach( store -> store.dropTable( statement.getPrepareContext(), unPartitionedTable, unPartitionedTable.partitionProperty.partitionIds ) ); - catalog.deletePartitionGroup( unPartitionedTable.id, unPartitionedTable.namespaceId, unPartitionedTable.partitionProperty.partitionGroupIds.get( 0 ) ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).deletePartitionGroup( unPartitionedTable.id, unPartitionedTable.namespaceId, unPartitionedTable.partitionProperty.partitionGroupIds.get( 0 ) ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -2737,7 +2729,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List @Override - public void removePartitioning( LogicalTable partitionedTable, Statement statement ) throws UnknownDatabaseException, GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { + public void removePartitioning( LogicalTable partitionedTable, Statement statement ) throws GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { long tableId = partitionedTable.id; if ( log.isDebugEnabled() ) { @@ -2753,20 +2745,20 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme placementDistribution = partitionManager.getRelevantPlacements( partitionedTable, partitionedTable.partitionProperty.partitionIds, new ArrayList<>( Arrays.asList( -1 ) ) ); // Update catalog table - catalog.mergeTable( tableId ); + catalog.getAllocRel( partitionedTable.namespaceId ).mergeTable( tableId ); // Now get the merged table - LogicalTable mergedTable = catalog.getTable( tableId ); + LogicalTable mergedTable = catalog.getLogicalRel( partitionedTable.namespaceId ).getTable( tableId ); List stores = new ArrayList<>(); // Get primary key of table and use PK to find all DataPlacements of table long pkid = partitionedTable.primaryKey; - List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; + List pkColumnIds = catalog.getLogicalRel( partitionedTable.namespaceId ).getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient - LogicalColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = catalog.getLogicalRel( partitionedTable.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) - List catalogColumnPlacements = catalog.getColumnPlacement( pkColumn.id ); + List catalogColumnPlacements = catalog.getAllocRel( partitionedTable.namespaceId ).getColumnPlacement( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { // Ask router on which store(s) the table should be placed Adapter adapter = AdapterManager.getInstance().getAdapter( ccp.adapterId ); @@ -2780,7 +2772,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // For merge create only full placements on the used stores. Otherwise partition constraints might not hold for ( DataStore store : stores ) { // Need to create partitionPlacements first in order to trigger schema creation on PolySchemaBuilder - catalog.addPartitionPlacement( + catalog.getAllocRel( partitionedTable.namespaceId ).addPartitionPlacement( mergedTable.namespaceId, store.getAdapterId(), mergedTable.id, @@ -2795,13 +2787,13 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); - catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( catalog.getColumn( cp.columnId ) ) ); + catalog.getAllocRel( partitionedTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( catalog.getLogicalRel( partitionedTable.namespaceId ).getColumn( cp.columnId ) ) ); // TODO @HENNLO Check if this can be omitted - catalog.updateDataPlacement( + catalog.getAllocRel( partitionedTable.namespaceId ).updateDataPlacement( store.getAdapterId(), mergedTable.id, - catalog.getDataPlacement( store.getAdapterId(), mergedTable.id ).columnPlacementsOnAdapter, + catalog.getAllocRel( partitionedTable.namespaceId ).getDataPlacement( store.getAdapterId(), mergedTable.id ).columnPlacementsOnAdapter, mergedTable.partitionProperty.partitionIds ); // @@ -2816,14 +2808,14 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme } // Adjust indexes - List indexes = catalog.getIndexes( partitionedTable.id, false ); + List indexes = catalog.getLogicalRel( partitionedTable.namespaceId ).getIndexes( partitionedTable.id, false ); for ( CatalogIndex index : indexes ) { // Remove old index DataStore ds = (DataStore) AdapterManager.getInstance().getAdapter( index.location ); ds.dropIndex( statement.getPrepareContext(), index, partitionedTable.partitionProperty.partitionIds ); - catalog.deleteIndex( index.id ); + catalog.getLogicalRel( partitionedTable.namespaceId ).deleteIndex( index.id ); // Add new index - long newIndexId = catalog.addIndex( + long newIndexId = catalog.getLogicalRel( partitionedTable.namespaceId ).addIndex( mergedTable.id, index.key.columnIds, index.unique, @@ -2833,19 +2825,19 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme index.type, index.name ); if ( index.location == 0 ) { - IndexManager.getInstance().addIndex( catalog.getIndex( newIndexId ), statement ); + IndexManager.getInstance().addIndex( catalog.getLogicalRel( partitionedTable.namespaceId ).getIndex( newIndexId ), statement ); } else { ds.addIndex( statement.getPrepareContext(), - catalog.getIndex( newIndexId ), - catalog.getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); + catalog.getLogicalRel( partitionedTable.namespaceId ).getIndex( newIndexId ), + catalog.getAllocRel( partitionedTable.namespaceId ).getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); } } // Needs to be separated from loop above. Otherwise we loose data for ( DataStore store : stores ) { List partitionIdsOnStore = new ArrayList<>(); - catalog.getPartitionPlacementsByTableOnAdapter( store.getAdapterId(), partitionedTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); + catalog.getAllocRel( partitionedTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( store.getAdapterId(), partitionedTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); // Otherwise everything will be dropped again, leaving the table inaccessible partitionIdsOnStore.remove( mergedTable.partitionProperty.partitionIds.get( 0 ) ); @@ -2855,7 +2847,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // Loop over **old.partitionIds** to delete all partitions which are part of table // Needs to be done separately because partitionPlacements will be recursively dropped in `deletePartitionGroup` but are needed in dropTable for ( long partitionGroupId : partitionedTable.partitionProperty.partitionGroupIds ) { - catalog.deletePartitionGroup( tableId, partitionedTable.namespaceId, partitionGroupId ); + catalog.getAllocRel( partitionedTable.namespaceId ).deletePartitionGroup( tableId, partitionedTable.namespaceId, partitionGroupId ); } // Reset query plan cache, implementation cache & routing cache @@ -2863,9 +2855,9 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme } - private void addColumn( String columnName, ColumnTypeInformation typeInformation, Collation collation, String defaultValue, long tableId, int position, List stores, PlacementType placementType ) throws GenericCatalogException, UnknownCollationException, UnknownColumnException { - columnName = adjustNameIfNeeded( columnName, catalog.getTable( tableId ).namespaceId ); - long addedColumnId = catalog.addColumn( + private void addColumn( long namespaceId, String columnName, ColumnTypeInformation typeInformation, Collation collation, String defaultValue, long tableId, int position, List stores, PlacementType placementType ) throws GenericCatalogException, UnknownCollationException, UnknownColumnException { + columnName = adjustNameIfNeeded( columnName, catalog.getLogicalRel( namespaceId ).getTable( tableId ).namespaceId ); + long addedColumnId = catalog.getLogicalRel( namespaceId ).addColumn( columnName, tableId, position, @@ -2880,10 +2872,10 @@ private void addColumn( String columnName, ColumnTypeInformation typeInformation ); // Add default value - addDefaultValue( defaultValue, addedColumnId ); + addDefaultValue( namespaceId, defaultValue, addedColumnId ); for ( DataStore s : stores ) { - catalog.addColumnPlacement( + catalog.getAllocRel( namespaceId ).addColumnPlacement( s.getAdapterId(), addedColumnId, placementType, @@ -2896,39 +2888,39 @@ private void addColumn( String columnName, ColumnTypeInformation typeInformation @Override - public void addConstraint( String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException { + public void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getColumn( tableId, columnName ); + LogicalColumn logicalColumn = catalog.getLogicalRel( namespaceId ).getColumn( tableId, columnName ); columnIds.add( logicalColumn.id ); } if ( constraintType == ConstraintType.PRIMARY ) { - catalog.addPrimaryKey( tableId, columnIds ); + catalog.getLogicalRel( namespaceId ).addPrimaryKey( tableId, columnIds ); } else if ( constraintType == ConstraintType.UNIQUE ) { if ( constraintName == null ) { constraintName = NameGenerator.generateConstraintName(); } - catalog.addUniqueConstraint( tableId, constraintName, columnIds ); + catalog.getLogicalRel( namespaceId ).addUniqueConstraint( tableId, constraintName, columnIds ); } } @Override - public void dropSchema( long databaseId, String schemaName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException { + public void dropNamespace( String schemaName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException { try { schemaName = schemaName.toLowerCase(); // Check if there is a schema with this name if ( catalog.checkIfExistsNamespace( schemaName ) ) { - LogicalNamespace logicalNamespace = catalog.getSchema( databaseId, schemaName ); + LogicalNamespace logicalNamespace = catalog.getNamespace( schemaName ); // Drop all collections in this namespace - List collections = catalog.getCollections( logicalNamespace.id, null ); + List collections = catalog.getLogicalDoc( logicalNamespace.id ).getCollections( null ); for ( LogicalCollection collection : collections ) { dropCollection( collection, statement ); } // Drop all tables in this schema - List catalogEntities = catalog.getTables( logicalNamespace.id, null ); + List catalogEntities = catalog.getLogicalRel( logicalNamespace.id ).getTables( logicalNamespace.id, null ); for ( LogicalTable catalogTable : catalogEntities ) { dropTable( catalogTable, statement ); } @@ -2952,25 +2944,23 @@ public void dropSchema( long databaseId, String schemaName, boolean ifExists, St @Override public void dropView( LogicalTable catalogView, Statement statement ) throws DdlOnSourceException { // Make sure that this is a table of type VIEW - if ( catalogView.entityType == EntityType.VIEW ) { - // Empty on purpose - } else { + if ( catalogView.entityType != EntityType.VIEW ) { throw new NotViewException(); } // Check if views are dependent from this view checkViewDependencies( catalogView ); - catalog.flagTableForDeletion( catalogView.id, true ); - catalog.deleteViewDependencies( (CatalogView) catalogView ); + catalog.getLogicalRel( catalogView.namespaceId ).flagTableForDeletion( catalogView.id, true ); + catalog.getLogicalRel( catalogView.namespaceId ).deleteViewDependencies( (CatalogView) catalogView ); // Delete columns - for ( Long columnId : catalogView.fieldIds ) { - catalog.deleteColumn( columnId ); + for ( LogicalColumn column : catalogView.columns ) { + catalog.getLogicalRel( catalogView.namespaceId ).deleteColumn( column.id ); } // Delete the view - catalog.deleteTable( catalogView.id ); + catalog.getLogicalRel( catalogView.namespaceId ).deleteTable( catalogView.id ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -2988,9 +2978,9 @@ public void dropMaterializedView( LogicalTable materializedView, Statement state // Check if views are dependent from this view checkViewDependencies( materializedView ); - catalog.flagTableForDeletion( materializedView.id, true ); + catalog.getLogicalRel( materializedView.namespaceId ).flagTableForDeletion( materializedView.id, true ); - catalog.deleteViewDependencies( (CatalogView) materializedView ); + catalog.getLogicalRel( materializedView.namespaceId ).deleteViewDependencies( (CatalogView) materializedView ); dropTable( materializedView, statement ); @@ -3009,7 +2999,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Check if there are foreign keys referencing this table List selfRefsToDelete = new LinkedList<>(); - List exportedKeys = catalog.getExportedKeys( catalogTable.id ); + List exportedKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getExportedKeys( catalogTable.id ); if ( exportedKeys.size() > 0 ) { for ( CatalogForeignKey foreignKey : exportedKeys ) { if ( foreignKey.tableId == catalogTable.id ) { @@ -3022,12 +3012,12 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D } // Make sure that all adapters are of type store (and not source) - for ( int storeId : catalogTable.dataPlacements ) { + for ( long storeId : catalogTable.dataPlacements ) { getDataStoreInstance( storeId ); } // Delete all indexes - for ( CatalogIndex index : catalog.getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( index.location == 0 ) { // Delete polystore index IndexManager.getInstance().deleteIndex( index ); @@ -3036,24 +3026,24 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D AdapterManager.getInstance().getStore( index.location ).dropIndex( statement.getPrepareContext(), index, - catalog.getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } // Delete index in catalog - catalog.deleteIndex( index.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); } // Delete data from the stores and remove the column placement - catalog.flagTableForDeletion( catalogTable.id, true ); - for ( int storeId : catalogTable.dataPlacements ) { + catalog.getLogicalRel( catalogTable.namespaceId ).flagTableForDeletion( catalogTable.id, true ); + for ( long storeId : catalogTable.dataPlacements ) { // Delete table on store List partitionIdsOnStore = new ArrayList<>(); - catalog.getPartitionPlacementsByTableOnAdapter( storeId, catalogTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( storeId, catalogTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); AdapterManager.getInstance().getStore( storeId ).dropTable( statement.getPrepareContext(), catalogTable, partitionIdsOnStore ); // Delete column placement in catalog - for ( Long columnId : catalogTable.fieldIds ) { - if ( catalog.checkIfExistsColumnPlacement( storeId, columnId ) ) { - catalog.deleteColumnPlacement( storeId, columnId, false ); + for ( LogicalColumn column : catalogTable.columns ) { + if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeId, column.id ) ) { + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeId, column.id, false ); } } } @@ -3061,45 +3051,45 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Delete the self-referencing foreign keys try { for ( CatalogForeignKey foreignKey : selfRefsToDelete ) { - catalog.deleteForeignKey( foreignKey.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } } catch ( GenericCatalogException e ) { - catalog.flagTableForDeletion( catalogTable.id, true ); + catalog.getLogicalRel( catalogTable.namespaceId ).flagTableForDeletion( catalogTable.id, true ); throw new PolyphenyDbContextException( "Exception while deleting self-referencing foreign key constraints.", e ); } // Delete indexes of this table - List indexes = catalog.getIndexes( catalogTable.id, false ); + List indexes = catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ); for ( CatalogIndex index : indexes ) { - catalog.deleteIndex( index.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); IndexManager.getInstance().deleteIndex( index ); } // Delete keys and constraints try { // Remove primary key - catalog.deletePrimaryKey( catalogTable.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); // Delete all foreign keys of the table - List foreignKeys = catalog.getForeignKeys( catalogTable.id ); + List foreignKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); for ( CatalogForeignKey foreignKey : foreignKeys ) { - catalog.deleteForeignKey( foreignKey.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } // Delete all constraints of the table - for ( CatalogConstraint constraint : catalog.getConstraints( catalogTable.id ) ) { - catalog.deleteConstraint( constraint.id ); + for ( CatalogConstraint constraint : catalog.getLogicalRel( catalogTable.namespaceId ).getConstraints( catalogTable.id ) ) { + catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); } } catch ( GenericCatalogException e ) { - catalog.flagTableForDeletion( catalogTable.id, true ); + catalog.getLogicalRel( catalogTable.namespaceId ).flagTableForDeletion( catalogTable.id, true ); throw new PolyphenyDbContextException( "Exception while dropping keys.", e ); } // Delete columns - for ( Long columnId : catalogTable.fieldIds ) { - catalog.deleteColumn( columnId ); + for ( LogicalColumn column : catalogTable.columns ) { + catalog.getLogicalRel( catalogTable.namespaceId ).deleteColumn( column.id ); } // Delete the table - catalog.deleteTable( catalogTable.id ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteTable( catalogTable.id ); // Monitor dropTables for statistics prepareMonitoring( statement, Kind.DROP_TABLE, catalogTable ); diff --git a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java index 457de00f4c..17e629e86f 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java @@ -45,7 +45,7 @@ public abstract class AbstractPartitionManager implements PartitionManager { public boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ) { // Check for the specified columnId if we still have a ColumnPlacement for every partitionGroup for ( Long partitionGroupId : catalogTable.partitionProperty.partitionGroupIds ) { - List ccps = catalog.getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); + List ccps = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); if ( ccps.size() <= threshold ) { for ( CatalogColumnPlacement placement : ccps ) { if ( placement.adapterId == storeId ) { @@ -66,11 +66,11 @@ public Map> getRelevantPlacements( LogicalTab if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { - CatalogPartition catalogPartition = catalog.getPartition( partitionId ); + CatalogPartition catalogPartition = catalog.getAllocRel( catalogTable.namespaceId ).getPartition( partitionId ); List relevantCcps = new ArrayList<>(); - for ( long columnId : catalogTable.fieldIds ) { - List ccps = catalog.getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, columnId ); + for ( LogicalColumn column : catalogTable.columns ) { + List ccps = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, column.id ); ccps.removeIf( ccp -> excludedAdapters.contains( ccp.adapterId ) ); if ( !ccps.isEmpty() ) { // Get first column placement which contains partition @@ -127,17 +127,17 @@ public String getUnifiedNullValue() { @Override - public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { - Map>> adapterPlacements = new HashMap<>(); // adapterId -> partitionId ; placements + public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { + Map>> adapterPlacements = new HashMap<>(); // adapterId -> partitionId ; placements if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { - List adapters = catalog.getAdaptersByPartitionGroup( catalogTable.id, partitionId ); + List adapters = catalog.getAllocRel( catalogTable.namespaceId ).getAdaptersByPartitionGroup( catalogTable.id, partitionId ); for ( CatalogAdapter adapter : adapters ) { if ( !adapterPlacements.containsKey( adapter.id ) ) { adapterPlacements.put( adapter.id, new HashMap<>() ); } - List placements = catalog.getColumnPlacementsOnAdapterPerTable( adapter.id, catalogTable.id ); + List placements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapter.id, catalogTable.id ); adapterPlacements.get( adapter.id ).put( partitionId, placements ); } } diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 66af0b72fd..a64e5b05de 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -36,7 +36,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.DataPlacementRole; @@ -217,7 +216,7 @@ private void determinePartitionDistribution( LogicalTable table ) { // Which of those are currently in cold --> action needed - List currentHotPartitions = Catalog.INSTANCE.getPartitions( ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); + List currentHotPartitions = Catalog.getInstance().getAllocRel( table.namespaceId ).getPartitions( ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); for ( CatalogPartition catalogPartition : currentHotPartitions ) { // Remove partitions from List if they are already in HOT (not necessary to send to DataMigrator) @@ -261,13 +260,13 @@ private void redistributePartitions( LogicalTable table, List partitionsFr Transaction transaction = null; try { - transaction = transactionManager.startTransaction( Catalog.defaultUserId, table.databaseId, false, "FrequencyMap" ); + transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "FrequencyMap" ); Statement statement = transaction.createStatement(); DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); - List adaptersWithHot = Catalog.getInstance().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); - List adaptersWithCold = Catalog.getInstance().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getColdPartitionGroupId() ); + List adaptersWithHot = Catalog.getInstance().getAllocRel( table.namespaceId ).getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); + List adaptersWithCold = Catalog.getInstance().getAllocRel( table.namespaceId ).getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getColdPartitionGroupId() ); log.debug( "Get adapters to create physical tables" ); // Validate that partition does not already exist on store @@ -281,52 +280,7 @@ private void redistributePartitions( LogicalTable table, List partitionsFr } // First create new HOT tables - Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); - if ( adapter instanceof DataStore ) { - DataStore store = (DataStore) adapter; - - List hotPartitionsToCreate = filterList( catalogAdapter.id, table.id, partitionsFromColdToHot ); - //List coldPartitionsToDelete = filterList( catalogAdapter.id, table.id, partitionsFromHotToCold ); - - // If this store contains both Groups HOT {@literal &} COLD do nothing - if ( hotPartitionsToCreate.size() != 0 ) { - Catalog.getInstance().getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); - - for ( long partitionId : hotPartitionsToCreate ) { - catalog.addPartitionPlacement( - table.namespaceId, - store.getAdapterId(), - table.id, - partitionId, - PlacementType.AUTOMATIC, - null, - null, - DataPlacementRole.UPTODATE ); - } - - store.createPhysicalTable( statement.getPrepareContext(), table, null ); - - List logicalColumns = new ArrayList<>(); - catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getColumn( cp.columnId ) ) ); - - dataMigrator.copyData( - statement.getTransaction(), - catalog.getAdapter( store.getAdapterId() ), - logicalColumns, - hotPartitionsToCreate ); - - if ( !partitionsToRemoveFromStore.containsKey( store ) ) { - partitionsToRemoveFromStore.put( store, partitionsFromHotToCold ); - } else { - partitionsToRemoveFromStore.replace( - store, - Stream.of( partitionsToRemoveFromStore.get( store ), partitionsFromHotToCold ) - .flatMap( Collection::stream ) - .collect( Collectors.toList() ) - ); - } - } - } + createHotTables( table, partitionsFromColdToHot, partitionsFromHotToCold, partitionsToRemoveFromStore, statement, dataMigrator, catalogAdapter ); } for ( CatalogAdapter catalogAdapter : adaptersWithCold ) { @@ -335,40 +289,7 @@ private void redistributePartitions( LogicalTable table, List partitionsFr continue; } // First create new HOT tables - Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); - if ( adapter instanceof DataStore ) { - DataStore store = (DataStore) adapter; - List coldPartitionsToCreate = filterList( catalogAdapter.id, table.id, partitionsFromHotToCold ); - if ( coldPartitionsToCreate.size() != 0 ) { - Catalog.getInstance().getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); - - for ( long partitionId : coldPartitionsToCreate ) { - catalog.addPartitionPlacement( - table.namespaceId, - store.getAdapterId(), - table.id, - partitionId, - PlacementType.AUTOMATIC, - null, - null, DataPlacementRole.UPTODATE ); - } - store.createPhysicalTable( statement.getPrepareContext(), table, null ); - - List logicalColumns = new ArrayList<>(); - catalog.getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getColumn( cp.columnId ) ) ); - - dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), logicalColumns, coldPartitionsToCreate ); - - if ( !partitionsToRemoveFromStore.containsKey( store ) ) { - partitionsToRemoveFromStore.put( store, partitionsFromColdToHot ); - } else { - partitionsToRemoveFromStore.replace( - store, - Stream.of( partitionsToRemoveFromStore.get( store ), partitionsFromColdToHot ).flatMap( Collection::stream ).collect( Collectors.toList() ) - ); - } - } - } + createHotTables( table, partitionsFromHotToCold, partitionsFromColdToHot, partitionsToRemoveFromStore, statement, dataMigrator, catalogAdapter ); } // DROP all partitions on each store @@ -377,8 +298,8 @@ private void redistributePartitions( LogicalTable table, List partitionsFr long coldPartitionGroupId = ((TemperaturePartitionProperty) table.partitionProperty).getColdPartitionGroupId(); // Update catalogInformation - partitionsFromColdToHot.forEach( p -> Catalog.getInstance().updatePartition( p, hotPartitionGroupId ) ); - partitionsFromHotToCold.forEach( p -> Catalog.getInstance().updatePartition( p, coldPartitionGroupId ) ); + partitionsFromColdToHot.forEach( p -> Catalog.getInstance().getAllocRel( table.namespaceId ).updatePartition( p, hotPartitionGroupId ) ); + partitionsFromHotToCold.forEach( p -> Catalog.getInstance().getAllocRel( table.namespaceId ).updatePartition( p, coldPartitionGroupId ) ); // Remove all tables that have been moved for ( DataStore store : partitionsToRemoveFromStore.keySet() ) { @@ -386,7 +307,7 @@ private void redistributePartitions( LogicalTable table, List partitionsFr } transaction.commit(); - } catch ( GenericCatalogException | UnknownUserException | UnknownDatabaseException | UnknownSchemaException | TransactionException e ) { + } catch ( GenericCatalogException | UnknownUserException | UnknownSchemaException | TransactionException e ) { log.error( "Error while reassigning new location for temperature-based partitions", e ); if ( transaction != null ) { try { @@ -399,18 +320,69 @@ private void redistributePartitions( LogicalTable table, List partitionsFr } + private void createHotTables( LogicalTable table, List partitionsFromColdToHot, List partitionsFromHotToCold, Map> partitionsToRemoveFromStore, Statement statement, DataMigrator dataMigrator, CatalogAdapter catalogAdapter ) { + Adapter adapter = AdapterManager.getInstance().getAdapter( catalogAdapter.id ); + if ( adapter instanceof DataStore ) { + DataStore store = (DataStore) adapter; + + List hotPartitionsToCreate = filterList( table.namespaceId, catalogAdapter.id, table.id, partitionsFromColdToHot ); + //List coldPartitionsToDelete = filterList( catalogAdapter.id, table.id, partitionsFromHotToCold ); + + // If this store contains both Groups HOT {@literal &} COLD do nothing + if ( hotPartitionsToCreate.size() != 0 ) { + Catalog.getInstance().getAllocRel( table.namespaceId ).getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); + + for ( long partitionId : hotPartitionsToCreate ) { + catalog.getAllocRel( table.namespaceId ).addPartitionPlacement( + table.namespaceId, + store.getAdapterId(), + table.id, + partitionId, + PlacementType.AUTOMATIC, + null, + null, + DataPlacementRole.UPTODATE ); + } + + store.createPhysicalTable( statement.getPrepareContext(), table, null ); + + List logicalColumns = new ArrayList<>(); + catalog.getAllocRel( table.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getLogicalRel( table.namespaceId ).getColumn( cp.columnId ) ) ); + + dataMigrator.copyData( + statement.getTransaction(), + catalog.getAdapter( store.getAdapterId() ), + logicalColumns, + hotPartitionsToCreate ); + + if ( !partitionsToRemoveFromStore.containsKey( store ) ) { + partitionsToRemoveFromStore.put( store, partitionsFromHotToCold ); + } else { + partitionsToRemoveFromStore.replace( + store, + Stream.of( partitionsToRemoveFromStore.get( store ), partitionsFromHotToCold ) + .flatMap( Collection::stream ) + .collect( Collectors.toList() ) + ); + } + } + } + } + + /** * Cleanses the List if physical partitions already resides on store. Happens if PartitionGroups HOT and COLD logically reside on same store. * Therefore no actual data distribution has to take place * + * @param namespaceId * @param adapterId Adapter which ist subject of receiving new tables * @param tableId Id of temperature partitioned table * @param partitionsToFilter List of partitions to be filtered * @return The filtered and cleansed list */ - private List filterList( int adapterId, long tableId, List partitionsToFilter ) { + private List filterList( long namespaceId, long adapterId, long tableId, List partitionsToFilter ) { // Remove partition from list if it's already contained on the store - for ( long partitionId : Catalog.getInstance().getPartitionsOnDataPlacement( adapterId, tableId ) ) { + for ( long partitionId : Catalog.getInstance().getAllocRel( namespaceId ).getPartitionsOnDataPlacement( adapterId, tableId ) ) { if ( partitionsToFilter.contains( partitionId ) ) { partitionsToFilter.remove( partitionId ); } diff --git a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java index ee2c47aa87..b11de38836 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java @@ -62,7 +62,7 @@ public Map> getRelevantPlacements( LogicalTab @Override - public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { + public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 83ea071f9b..f8b4346bc6 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -658,7 +658,7 @@ private boolean testConstraintsValid() { .stream() .filter( t -> t.entityType == EntityType.ENTITY && t.getNamespaceType() == NamespaceType.RELATIONAL ) .collect( Collectors.toList() ); - Transaction transaction = this.manager.startTransaction( Catalog.defaultUserId, Catalog.defaultDatabaseId, false, "ConstraintEnforcement" ); + Transaction transaction = this.manager.startTransaction( Catalog.defaultUserId, false, "ConstraintEnforcement" ); Statement statement = transaction.createStatement(); QueryProcessor processor = statement.getQueryProcessor(); List infos = ConstraintEnforceAttacher diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index d66f651926..e239a88013 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -55,17 +55,15 @@ import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -83,7 +81,6 @@ import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.Router; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyType; @@ -116,7 +113,7 @@ protected static Map> selectPlacement( Logica // Find the adapter with the most column placements int adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : catalog.getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -125,11 +122,11 @@ protected static Map> selectPlacement( Logica // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); - for ( long cid : table.fieldIds ) { - if ( catalog.getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( cid ) ) { - placementList.add( Catalog.getInstance().getColumnPlacement( adapterIdWithMostPlacements, cid ) ); + for ( LogicalColumn column : table.columns ) { + if ( catalog.getAllocRel( table.namespaceId ).getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { + placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); } else { - placementList.add( Catalog.getInstance().getColumnPlacement( cid ).get( 0 ) ); + placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacement( column.id ).get( 0 ) ); } } @@ -176,22 +173,14 @@ public AlgNode recursiveCopy( AlgNode node ) { public RoutedAlgBuilder handleScan( RoutedAlgBuilder builder, Statement statement, - long tableId, - String storeUniqueName, - String logicalSchemaName, - String logicalTableName, - String physicalSchemaName, - String physicalTableName, - long partitionId, - NamespaceType namespaceType ) { - - AlgNode node = builder.scan( ImmutableList.of( - PolySchemaBuilder.buildAdapterSchemaName( storeUniqueName, logicalSchemaName, physicalSchemaName ), - logicalTableName + "_" + partitionId ) ).build(); + long partitionId ) { + + PhysicalEntity physical = catalog.getPhysicalEntity( partitionId ); + AlgNode node = builder.scan( catalog.getPhysicalEntity( partitionId ) ).build(); builder.push( node ); - if ( namespaceType == NamespaceType.DOCUMENT + if ( physical.namespaceType == NamespaceType.DOCUMENT && node.getRowType().getFieldCount() == 1 && node.getRowType().getFieldList().get( 0 ).getName().equals( "d" ) && node.getRowType().getFieldList().get( 0 ).getType().getPolyType() == PolyType.DOCUMENT ) { @@ -283,21 +272,14 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< } if ( placementsByAdapter.size() == 1 ) { - List ccps = placementsByAdapter.values().iterator().next(); - CatalogColumnPlacement ccp = ccps.get( 0 ); - CatalogPartitionPlacement cpp = catalog.getPartitionPlacement( ccp.adapterId, partitionId ); + // List ccps = placementsByAdapter.values().iterator().next(); + // CatalogColumnPlacement ccp = ccps.get( 0 ); + // CatalogPartitionPlacement cpp = catalog.getPartitionPlacement( ccp.adapterId, partitionId ); builder = handleScan( builder, statement, - ccp.tableId, - ccp.adapterUniqueName, - ccp.getLogicalSchemaName(), - ccp.getLogicalTableName(), - ccp.physicalSchemaName, - cpp.physicalTableName, - cpp.partitionId, - catalog.getTable( ccp.tableId ).getNamespaceType() ); + partitionId ); // Final project buildFinalProject( builder, currentPlacements ); @@ -305,17 +287,17 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< // We need to join placements on different adapters // Get primary key - long pkid = catalog.getTable( currentPlacements.get( 0 ).tableId ).primaryKey; - List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; + long pkid = catalog.getLogicalRel( currentPlacements.get( 0 ).namespaceId ).getTable( currentPlacements.get( 0 ).tableId ).primaryKey; + List pkColumnIds = catalog.getLogicalRel( currentPlacements.get( 0 ).namespaceId ).getPrimaryKey( pkid ).columnIds; List pkColumns = new LinkedList<>(); for ( long pkColumnId : pkColumnIds ) { - pkColumns.add( catalog.getColumn( pkColumnId ) ); + pkColumns.add( catalog.getLogicalRel( currentPlacements.get( 0 ).namespaceId ).getColumn( pkColumnId ) ); } // Add primary key for ( Entry> entry : placementsByAdapter.entrySet() ) { for ( LogicalColumn pkColumn : pkColumns ) { - CatalogColumnPlacement pkPlacement = catalog.getColumnPlacement( entry.getKey(), pkColumn.id ); + CatalogColumnPlacement pkPlacement = catalog.getAllocRel( currentPlacements.get( 0 ).namespaceId ).getColumnPlacement( entry.getKey(), pkColumn.id ); if ( !entry.getValue().contains( pkPlacement ) ) { entry.getValue().add( pkPlacement ); } @@ -326,19 +308,13 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< boolean first = true; for ( List ccps : placementsByAdapter.values() ) { CatalogColumnPlacement ccp = ccps.get( 0 ); - CatalogPartitionPlacement cpp = catalog.getPartitionPlacement( ccp.adapterId, partitionId ); + CatalogPartitionPlacement cpp = catalog.getAllocRel( currentPlacements.get( 0 ).namespaceId ).getPartitionPlacement( ccp.adapterId, partitionId ); handleScan( builder, statement, - ccp.tableId, - ccp.adapterUniqueName, - ccp.getLogicalSchemaName(), - ccp.getLogicalTableName(), - ccp.physicalSchemaName, - cpp.physicalTableName, - cpp.partitionId, - catalog.getTable( ccp.tableId ).getNamespaceType() ); + cpp.partitionId + ); if ( first ) { first = false; } else { @@ -380,7 +356,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< CatalogColumnPlacement placement = new ArrayList<>( placements.values() ).get( 0 ).get( 0 ); // todo dl: remove after RowType refactor - if ( catalog.getTable( placement.tableId ).namespaceType == NamespaceType.DOCUMENT ) { + if ( catalog.getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); builder.push( new LogicalTransformer( node.getCluster(), @@ -401,7 +377,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< private void buildFinalProject( RoutedAlgBuilder builder, List currentPlacements ) { List rexNodes = new ArrayList<>(); List placementList = currentPlacements.stream() - .map( col -> catalog.getColumn( col.columnId ) ) + .map( col -> catalog.getLogicalRel( currentPlacements.get( 0 ).namespaceId ).getColumn( col.columnId ) ) .sorted( Comparator.comparingInt( col -> col.position ) ) .collect( Collectors.toList() ); for ( LogicalColumn logicalColumn : placementList ) { @@ -432,11 +408,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab placements = List.of( placementId ); } - for ( int adapterId : placements ) { - CatalogAdapter adapter = catalog.getAdapter( adapterId ); - CatalogGraphPlacement graphPlacement = catalog.getGraphPlacement( catalogGraph.id, adapterId ); - String name = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, catalogGraph.name, graphPlacement.physicalName ); - + for ( long adapterId : placements ) { PhysicalGraph graph = snapshot.getPhysicalGraph( catalogGraph.id, adapterId ); if ( !(graph instanceof TranslatableEntity) ) { @@ -459,7 +431,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Integer placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = catalog.getTables( new Pattern( namespace.name ), null ); + List tables = catalog.getLogicalRel( namespace.id ).getTables( new Pattern( namespace.name ), null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, selectPlacement( t ) ) ) ) .collect( Collectors.toList() ); @@ -473,7 +445,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace na private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Integer placementId ) { AlgOptCluster cluster = alg.getCluster(); - List collections = catalog.getCollections( namespace.id, null ); + List collections = catalog.getLogicalDoc( namespace.id ).getCollections( null ); List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); @@ -490,8 +462,8 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace name } - public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement statement ) { - CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); + public AlgNode getRelationalScan( LogicalLpgScan alg, long adapterId, Statement statement ) { + /*CatalogGraphMapping mapping = Catalog.getInstance().getLogicalGraph( alg.entity.namespaceId ).getGraphMapping( alg.entity.id ); PhysicalTable nodesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.nodesId ).unwrap( PhysicalTable.class ); PhysicalTable nodePropertiesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.nodesPropertyId ).unwrap( PhysicalTable.class ); @@ -503,12 +475,13 @@ public AlgNode getRelationalScan( LogicalLpgScan alg, int adapterId, Statement s AlgNode edge = buildSubstitutionJoin( alg, edgesTable, edgePropertiesTable ); return LogicalTransformer.create( List.of( node, edge ), alg.getTraitSet().replace( ModelTrait.RELATIONAL ), ModelTrait.RELATIONAL, ModelTrait.GRAPH, alg.getRowType() ); - + */ // todo dl + return null; } protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, long columnId, int adapterId ) { - LogicalTable nodes = Catalog.getInstance().getTable( tableId ); + /*LogicalTable nodes = Catalog.getInstance().getTable( tableId ); CatalogColumnPlacement placement = Catalog.getInstance().getColumnPlacement( adapterId, columnId ); List qualifiedTableName = ImmutableList.of( PolySchemaBuilder.buildAdapterSchemaName( @@ -519,6 +492,8 @@ protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, nodes.name + "_" + nodes.partitionProperty.partitionIds.get( 0 ) ); return statement.getDataContext().getSnapshot().getLogicalTable( qualifiedTableName ); + */ // todo dl + return null; } @@ -564,12 +539,12 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st if ( !adapter.supportedNamespaces.contains( sourceModel ) ) { // document on relational - scans.add( handleDocumentOnRelational( alg, placementId, statement, builder ) ); + scans.add( handleDocumentOnRelational( (DocumentScan) alg, placementId, statement, builder ) ); continue; } - CatalogCollectionPlacement placement = catalog.getCollectionPlacement( collection.id, placementId ); - String namespaceName = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, collection.getNamespaceName(), placement.physicalNamespaceName ); - String collectionName = collection.name + "_" + placement.id; + // CatalogCollectionPlacement placement = catalog.getAllocDoc( alg.entity ).getCollectionPlacement( collection.id, placementId ); + // String namespaceName = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, collection.getNamespaceName(), placement.physicalNamespaceName ); + // String collectionName = collection.name + "_" + placement.id; PhysicalTable collectionTable = snapshot.getPhysicalTable( collection.id, adapterId ); // we might previously have pushed the non-native transformer builder.clear(); @@ -586,18 +561,21 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder ) { - AlgNode scan = buildJoinedScan( statement, alg.getCluster(), selectPlacement( catalog.getTable( alg.entity.id ) ) ); + /*AlgNode scan = buildJoinedScan( statement, alg.getCluster(), selectPlacement( alg.entity ) ); builder.push( scan ); AlgTraitSet out = alg.getTraitSet().replace( ModelTrait.RELATIONAL ); builder.push( new LogicalTransformer( builder.getCluster(), List.of( builder.build() ), null, out.replace( ModelTrait.DOCUMENT ), ModelTrait.RELATIONAL, ModelTrait.DOCUMENT, alg.getRowType(), false ) ); + + return builder; + */// todo dl return builder; } @NotNull - private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer adapterId, Statement statement, RoutedAlgBuilder builder ) { - List columns = catalog.getColumns( node.entity.id ); + private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer adapterId, Statement statement, RoutedAlgBuilder builder ) { + List columns = node.entity.columns; AlgTraitSet out = node.getTraitSet().replace( ModelTrait.RELATIONAL ); CatalogEntity subTable = getSubstitutionTable( statement, node.entity.id, columns.get( 0 ).id, adapterId ); builder.scan( subTable ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 8efac6664c..2102d0526d 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -44,9 +44,7 @@ import org.polypheny.db.algebra.core.document.DocumentProject; import org.polypheny.db.algebra.core.document.DocumentScan; import org.polypheny.db.algebra.core.document.DocumentValues; -import org.polypheny.db.algebra.core.lpg.LpgProject; import org.polypheny.db.algebra.core.lpg.LpgScan; -import org.polypheny.db.algebra.core.lpg.LpgValues; import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.logical.common.LogicalBatchIterator; import org.polypheny.db.algebra.logical.common.LogicalConditionalExecute; @@ -65,7 +63,6 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgProject; import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; import org.polypheny.db.algebra.logical.lpg.LogicalLpgTransformer; -import org.polypheny.db.algebra.logical.lpg.LogicalLpgValues; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalModifyCollect; import org.polypheny.db.algebra.logical.relational.LogicalProject; @@ -79,11 +76,9 @@ import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalCollection; @@ -113,7 +108,6 @@ import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.schema.graph.ModifiableGraph; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.RoutedAlgBuilder; @@ -151,11 +145,11 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { } long pkid = catalogTable.primaryKey; - List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); + List pkColumnIds = catalog.getLogicalRel( modify.entity.namespaceId ).getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = catalog.getLogicalRel( modify.entity.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); // Essentially gets a list of all stores where this table resides - List pkPlacements = catalog.getColumnPlacement( pkColumn.id ); + List pkPlacements = catalog.getAllocRel( modify.entity.namespaceId ).getColumnPlacement( pkColumn.id ); if ( catalogTable.partitionProperty.isPartitioned && log.isDebugEnabled() ) { log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, catalogTable.partitionProperty.partitionGroupIds ); @@ -163,8 +157,8 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { log.debug( "\t\t -> '{}' {}\t{}", dataPlacement.adapterUniqueName, - catalog.getPartitionGroupsOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ), - catalog.getPartitionGroupsIndexOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ) ); + catalog.getAllocRel( modify.entity.namespaceId ).getPartitionGroupsOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ), + catalog.getAllocRel( modify.entity.namespaceId ).getPartitionGroupsIndexOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ) ); } } @@ -185,12 +179,12 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { Snapshot snapshot = statement.getTransaction().getSnapshot(); // Get placements on store - List placementsOnAdapter = catalog.getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); + List placementsOnAdapter = catalog.getAllocRel( modify.entity.namespaceId ).getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); // If this is an update, check whether we need to execute on this store at all List updateColumnList = modify.getUpdateColumnList(); List sourceExpressionList = modify.getSourceExpressionList(); - if ( placementsOnAdapter.size() != catalogTable.fieldIds.size() ) { + if ( placementsOnAdapter.size() != catalogTable.columns.size() ) { if ( modify.getOperation() == Modify.Operation.UPDATE ) { updateColumnList = new LinkedList<>( modify.getUpdateColumnList() ); @@ -201,8 +195,8 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { String columnName = updateColumnListIterator.next(); sourceExpressionListIterator.next(); try { - LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); - if ( !catalog.checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { + LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { updateColumnListIterator.remove(); sourceExpressionListIterator.remove(); } @@ -227,7 +221,7 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( catalogTable.partitionProperty.partitionType ); - WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, catalogTable.fieldIds.indexOf( catalogTable.partitionProperty.partitionColumnId ) ); + WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, catalogTable.getColumnIds().indexOf( catalogTable.partitionProperty.partitionColumnId ) ); modify.accept( new AlgShuttleImpl() { @Override public AlgNode visit( LogicalFilter filter ) { @@ -268,7 +262,7 @@ public AlgNode visit( LogicalFilter filter ) { for ( String cn : updateColumnList ) { try { - if ( catalog.getColumn( catalogTable.id, cn ).id == catalogTable.partitionProperty.partitionColumnId ) { + if ( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, cn ).id == catalogTable.partitionProperty.partitionColumnId ) { if ( log.isDebugEnabled() ) { log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", catalogTable.partitionProperty.partitionColumnId, index ); } @@ -345,10 +339,10 @@ else if ( identifiedPartitionForSetValue != -1 ) { // Retrieve columnId of fieldName and map it to its fieldList location of INSERT Stmt int columnIndex = catalogTable.getColumnNames().indexOf( columnFieldName ); - resultColMapping.put( catalogTable.fieldIds.get( columnIndex ), j ); + resultColMapping.put( catalogTable.getColumnIds().get( columnIndex ), j ); // Determine location of partitionColumn in fieldList - if ( catalogTable.fieldIds.get( columnIndex ) == catalogTable.partitionProperty.partitionColumnId ) { + if ( catalogTable.getColumnIds().get( columnIndex ) == catalogTable.partitionProperty.partitionColumnId ) { partitionColumnIndex = columnIndex; if ( log.isDebugEnabled() ) { log.debug( "INSERT: Found PartitionColumnID: '{}' at column index: {}", catalogTable.partitionProperty.partitionColumnId, j ); @@ -381,7 +375,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { for ( Map.Entry>> partitionMapping : tuplesOnPartition.entrySet() ) { Long currentPartitionId = partitionMapping.getKey(); - if ( !catalog.getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( currentPartitionId ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( currentPartitionId ) ) { continue; } @@ -397,19 +391,12 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - catalog.getPartitionPlacement( pkPlacement.adapterId, currentPartitionId ), + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( pkPlacement.adapterId, currentPartitionId ), statement, cluster, true, statement.getDataContext().getParameterValues() ).build(); - List qualifiedTableName = ImmutableList.of( - PolySchemaBuilder.buildAdapterSchemaName( - pkPlacement.adapterUniqueName, - catalogTable.getNamespaceName(), - pkPlacement.physicalSchemaName - ), - catalogTable.name + "_" + currentPartitionId ); PhysicalTable physical = snapshot.getPhysicalTable( currentPartitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -432,7 +419,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { } else if ( modify.getInput() instanceof LogicalProject && ((LogicalProject) modify.getInput()).getInput() instanceof LogicalValues ) { - String partitionColumnName = catalog.getColumn( catalogTable.partitionProperty.partitionColumnId ).name; + String partitionColumnName = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name; List fieldNames = modify.getInput().getRowType().getFieldNames(); LogicalRelModify ltm = modify; @@ -461,7 +448,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { tempPartitionId = partitionManager.getTargetPartitionId( catalogTable, currentRow.get( partitionValueIndex ).toString() ); - if ( !catalog.getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( tempPartitionId ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( tempPartitionId ) ) { continue; } @@ -487,19 +474,12 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - catalog.getPartitionPlacement( pkPlacement.adapterId, entry.getKey() ), + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( pkPlacement.adapterId, entry.getKey() ), statement, cluster, false, entry.getValue() ).build(); - List qualifiedTableName = ImmutableList.of( - PolySchemaBuilder.buildAdapterSchemaName( - pkPlacement.adapterUniqueName, - catalogTable.getNamespaceName(), - pkPlacement.physicalSchemaName - ), - catalogTable.name + "_" + entry.getKey() ); PhysicalTable physical = snapshot.getPhysicalTable( entry.getKey() ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -540,8 +520,8 @@ else if ( identifiedPartitionForSetValue != -1 ) { } if ( log.isDebugEnabled() ) { - String partitionColumnName = catalog.getColumn( catalogTable.partitionProperty.partitionColumnId ).name; - String partitionName = catalog.getPartitionGroup( identPart ).partitionGroupName; + String partitionColumnName = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name; + String partitionName = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroup( identPart ).partitionGroupName; log.debug( "INSERT: partitionColumn-value: '{}' should be put on partition: {} ({}), which is partitioned with column {}", partitionValue, identPart, partitionName, partitionColumnName ); } @@ -577,17 +557,10 @@ else if ( identifiedPartitionForSetValue != -1 ) { for ( long partitionId : accessedPartitionList ) { - if ( !catalog.getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( partitionId ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( partitionId ) ) { continue; } - List qualifiedTableName = ImmutableList.of( - PolySchemaBuilder.buildAdapterSchemaName( - pkPlacement.adapterUniqueName, - catalogTable.getNamespaceName(), - pkPlacement.physicalSchemaName - ), - catalogTable.name + "_" + partitionId ); PhysicalTable physical = snapshot.getPhysicalTable( partitionId ); // Build DML @@ -597,7 +570,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - catalog.getPartitionPlacement( pkPlacement.adapterId, partitionId ), + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( pkPlacement.adapterId, partitionId ), statement, cluster, false, @@ -725,10 +698,7 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, for ( int placementId : placements ) { CatalogAdapter adapter = Catalog.getInstance().getAdapter( placementId ); - CatalogCollectionPlacement placement = Catalog.getInstance().getCollectionPlacement( collection.id, placementId ); - String namespaceName = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, collection.getNamespaceName(), placement.physicalNamespaceName ); - - String collectionName = collection.name + "_" + placement.id; + CatalogCollectionPlacement placement = Catalog.getInstance().getAllocDoc( alg.entity.namespaceId ).getCollectionPlacement( collection.id, placementId ); PhysicalCollection document = snapshot.getPhysicalCollection( placement.id ); if ( !adapter.supportedNamespaces.contains( NamespaceType.DOCUMENT ) ) { @@ -772,8 +742,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Logical for ( int adapterId : placements ) { CatalogAdapter adapter = Catalog.getInstance().getAdapter( adapterId ); - CatalogGraphPlacement graphPlacement = Catalog.getInstance().getGraphPlacement( catalogGraph.id, adapterId ); - String name = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, catalogGraph.name, graphPlacement.physicalName ); + CatalogGraphPlacement graphPlacement = Catalog.getInstance().getAllocGraph( alg.entity.namespaceId ).getGraphPlacement( catalogGraph.id, adapterId ); PhysicalGraph graph = snapshot.getPhysicalGraph( catalogGraph.id, adapterId ); if ( graph == null ) { @@ -819,7 +788,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Logical private AlgNode buildDocumentDml( AlgNode node, Statement statement, LogicalQueryInformation queryInformation ) { if ( node instanceof DocumentScan ) { - return super.handleDocumentScan( (DocumentScan) node, statement, RoutedAlgBuilder.create( statement, node.getCluster() ), null ).build(); + return super.handleDocumentScan( (DocumentScan) node, statement, RoutedAlgBuilder.create( statement, node.getCluster() ), null ).build(); } int i = 0; List inputs = new ArrayList<>(); @@ -846,7 +815,6 @@ private AlgNode buildGraphDml( AlgNode node, Statement statement, int adapterId private AlgNode attachRelationalModify( LogicalDocumentModify alg, Statement statement, int adapterId, LogicalQueryInformation queryInformation ) { - CatalogCollectionMapping mapping = Catalog.getInstance().getCollectionMapping( alg.entity.id ); switch ( alg.operation ) { case INSERT: @@ -956,7 +924,7 @@ private List attachRelationalDocInsert( LogicalDocumentModify alg, Stat private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Statement statement ) { - CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); + /*CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); PhysicalTable nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ).unwrap( PhysicalTable.class ); PhysicalTable nodePropertiesTable = getSubstitutionTable( statement, mapping.nodesPropertyId, mapping.idNodesPropertyId, adapterId ).unwrap( PhysicalTable.class ); @@ -1004,6 +972,8 @@ private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Sta } return new LogicalModifyCollect( alg.getCluster(), alg.getTraitSet().replace( ModelTrait.GRAPH ), modifies, true ); + */// todo dl + return null; } @@ -1229,20 +1199,13 @@ private AlgBuilder buildDml( builder = super.handleScan( builder, statement, - placements.get( 0 ).tableId, - placements.get( 0 ).adapterUniqueName, - catalogTable.getNamespaceName(), - catalogTable.name, - placements.get( 0 ).physicalSchemaName, - partitionPlacement.physicalTableName, - partitionPlacement.partitionId, - catalogTable.getNamespaceType() ); + partitionPlacement.partitionId + ); LogicalRelScan scan = (LogicalRelScan) builder.build(); builder.push( scan.copy( scan.getTraitSet().replace( ModelTrait.DOCUMENT ), scan.getInputs() ) ); return builder; } else if ( node instanceof LogicalRelScan && node.getEntity() != null ) { - // Special handling for INSERT INTO foo SELECT * FROM foo2 if ( false ) { return handleSelectFromOtherTable( builder, catalogTable, statement ); @@ -1251,14 +1214,8 @@ private AlgBuilder buildDml( builder = super.handleScan( builder, statement, - placements.get( 0 ).tableId, - placements.get( 0 ).adapterUniqueName, - catalogTable.getNamespaceName(), - catalogTable.name, - placements.get( 0 ).physicalSchemaName, - partitionPlacement.physicalTableName, - partitionPlacement.partitionId, - catalogTable.getNamespaceType() ); + partitionPlacement.partitionId + ); return builder; @@ -1272,7 +1229,7 @@ private AlgBuilder buildDml( builder = super.handleValues( values, builder ); - if ( catalogTable.fieldIds.size() == placements.size() ) { // full placement, no additional checks required + if ( catalogTable.columns.size() == placements.size() ) { // full placement, no additional checks required return builder; } else if ( node.getRowType().toString().equals( "RecordType(INTEGER ZERO)" ) ) { // This is a prepared statement. Actual values are in the project. Do nothing @@ -1285,7 +1242,7 @@ private AlgBuilder buildDml( return builder.project( rexNodes ); } } else if ( node instanceof LogicalProject ) { - if ( catalogTable.fieldIds.size() == placements.size() ) { // full placement, generic handling is sufficient + if ( catalogTable.columns.size() == placements.size() ) { // full placement, generic handling is sufficient if ( catalogTable.partitionProperty.isPartitioned && remapParameterValues ) { // && ((LogicalProject) node).getInput().getRowType().toString().equals( "RecordType(INTEGER ZERO)" ) return remapParameterizedDml( node, builder, statement, parameterValues ); } else { @@ -1316,7 +1273,7 @@ private AlgBuilder buildDml( } } } else if ( node instanceof LogicalFilter ) { - if ( catalogTable.fieldIds.size() != placements.size() ) { // partitioned, check if there is a illegal condition + if ( catalogTable.columns.size() != placements.size() ) { // partitioned, check if there is a illegal condition RexCall call = ((RexCall) ((LogicalFilter) node).getCondition()); for ( RexNode operand : call.operands ) { @@ -1338,28 +1295,22 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical } long pkid = fromTable.primaryKey; - List pkColumnIds = catalog.getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = catalog.getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = catalog.getColumnPlacement( pkColumn.id ); + List pkColumnIds = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + List pkPlacements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( pkColumn.id ); List nodes = new ArrayList<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { - catalog.getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); - CatalogPartitionPlacement partition = catalog.getPartitionPlacement( pkPlacement.adapterId, fromTable.partitionProperty.partitionIds.get( 0 ) ); + CatalogPartitionPlacement partition = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( pkPlacement.adapterId, fromTable.partitionProperty.partitionIds.get( 0 ) ); nodes.add( super.handleScan( builder, statement, - pkPlacements.get( 0 ).tableId, - pkPlacements.get( 0 ).adapterUniqueName, - fromTable.getNamespaceName(), - fromTable.name, - pkPlacements.get( 0 ).physicalSchemaName, - partition.physicalTableName, - partition.partitionId, - fromTable.namespaceType ).build() ); + partition.partitionId + ).build() ); } @@ -1397,11 +1348,11 @@ private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, L } else { throw new RuntimeException( "Invalid column name: " + field.getName() ); } - column = catalog.getColumn( catalogTable.id, columnName ); + column = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); } catch ( UnknownColumnException e ) { throw new RuntimeException( e ); } - if ( !catalog.checkIfExistsColumnPlacement( placements.get( 0 ).adapterId, column.id ) ) { + if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( placements.get( 0 ).adapterId, column.id ) ) { throw new RuntimeException( "Current implementation of vertical partitioning does not allow conditions on partitioned columns. " ); // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // TODO: Use indexes diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java index dbed8b0700..e66497f938 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java @@ -101,7 +101,7 @@ public Transaction startTransaction( CatalogUser user, LogicalNamespace defaultS @Override - public Transaction startTransaction( long userId, long databaseId, boolean analyze, String origin, MultimediaFlavor flavor ) throws UnknownUserException, UnknownDatabaseException, UnknownSchemaException { + public Transaction startTransaction( long userId, boolean analyze, String origin, MultimediaFlavor flavor ) throws UnknownUserException, UnknownSchemaException { Catalog catalog = Catalog.getInstance(); CatalogUser catalogUser = catalog.getUser( (int) userId ); CatalogDatabase catalogDatabase = catalog.getDatabase( databaseId ); @@ -111,7 +111,7 @@ public Transaction startTransaction( long userId, long databaseId, boolean analy @Override - public Transaction startTransaction( long userId, long databaseId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownDatabaseException, UnknownSchemaException { + public Transaction startTransaction( long userId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownDatabaseException, UnknownSchemaException { return startTransaction( userId, databaseId, analyze, origin, MultimediaFlavor.DEFAULT ); } diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 12af517d90..b8d4e32118 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -279,7 +279,6 @@ public void prepareToUpdate( Long materializedId ) { try { Transaction transaction = getTransactionManager().startTransaction( catalogTable.ownerId, - catalogTable.databaseId, false, "Materialized View" ); diff --git a/dbms/src/test/java/org/polypheny/db/TestHelper.java b/dbms/src/test/java/org/polypheny/db/TestHelper.java index 21e4633cd9..816b4bde37 100644 --- a/dbms/src/test/java/org/polypheny/db/TestHelper.java +++ b/dbms/src/test/java/org/polypheny/db/TestHelper.java @@ -127,7 +127,7 @@ private TestHelper() { public Transaction getTransaction() { try { - return transactionManager.startTransaction( Catalog.defaultUserId, Catalog.defaultDatabaseId, true, "Test Helper" ); + return transactionManager.startTransaction( Catalog.defaultUserId, true, "Test Helper" ); } catch ( GenericCatalogException | UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { throw new RuntimeException( "Error while starting transaction", e ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 722ec38890..a9308790b2 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -202,7 +202,7 @@ private StatisticResult executeColStat( AlgNode node, Transaction transaction, S private Transaction getTransaction() { try { - return transactionManager.startTransaction( userId, databaseId, false, "Statistics", MultimediaFlavor.FILE ); + return transactionManager.startTransaction( userId, false, "Statistics", MultimediaFlavor.FILE ); } catch ( UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { throw new RuntimeException( "Error while starting transaction", e ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index adb5544761..928fc458ac 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -193,7 +193,7 @@ public void updateSchemaName( LogicalNamespace logicalNamespace, String newName private Transaction getTransaction() { Transaction transaction; try { - transaction = statisticQueryInterface.getTransactionManager().startTransaction( Catalog.defaultUserId, Catalog.defaultDatabaseId, false, "Statistic Manager" ); + transaction = statisticQueryInterface.getTransactionManager().startTransaction( Catalog.defaultUserId, false, "Statistic Manager" ); } catch ( GenericCatalogException | UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { throw new RuntimeException( e ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 34bf1588aa..b4de346abe 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -89,7 +89,7 @@ public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable List fieldTypes = new LinkedList<>(); List fieldIds = new ArrayList<>( allocationTable.placements.size() ); for ( CatalogColumnPlacement placement : allocationTable.placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( placement.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( allocationTable.namespaceId ).getColumn( placement.columnId ); AlgDataType sqlType = sqlType( typeFactory, logicalColumn.type, logicalColumn.length, logicalColumn.scale, null ); fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); fieldTypes.add( CsvFieldType.getCsvFieldType( logicalColumn.type ) ); @@ -98,6 +98,7 @@ public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable String csvFileName = Catalog .getInstance() + .getAllocRel( allocationTable.namespaceId ) .getColumnPlacementsOnAdapterPerTable( csvSource.getAdapterId(), catalogTable.id ).iterator().next() .physicalSchemaName; Source source; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java index f291ae6526..c6eeb5f983 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherProcessorImpl.java @@ -24,7 +24,6 @@ import org.polypheny.db.algebra.constant.ExplainFormat; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.cypher.cypher2alg.CypherToAlgConverter; import org.polypheny.db.cypher.parser.CypherParser; import org.polypheny.db.cypher.parser.CypherParser.CypherParserConfig; @@ -159,7 +158,7 @@ public void autoGenerateDDL( Statement statement, Node node, QueryParameters par try { DdlManager ddlManager = DdlManager.getInstance(); long namespaceId = ddlManager.createGraph( - Catalog.defaultDatabaseId, ((ExtendedQueryParameters) parameters).getDatabaseName(), true, null, true, false, statement ); + ((ExtendedQueryParameters) parameters).getDatabaseName(), true, null, true, false, true, statement ); statement.getTransaction().commit(); ((ExtendedQueryParameters) parameters).setDatabaseId( namespaceId ); diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabase.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabase.java index c1845aa553..4afe6d8371 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabase.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabase.java @@ -21,7 +21,6 @@ import lombok.Getter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; import org.polypheny.db.cypher.clause.CypherWaitClause; @@ -82,13 +81,11 @@ public void execute( Context context, Statement statement, QueryParameters param } DdlManager.getInstance().createGraph( - Catalog.defaultDatabaseId, databaseName, true, dataStore, ifNotExists, - replace, - statement ); + replace, true, statement ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java index 51a04e264c..7f470c0b75 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java @@ -73,7 +73,7 @@ public void execute( Context context, Statement statement, QueryParameters param return; } - DdlManager.getInstance().removeGraphDatabase( databases.get( 0 ).id, ifExists, statement ); + DdlManager.getInstance().removeGraph( databases.get( 0 ).id, ifExists, statement ); } diff --git a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java index f983fb9634..fe3b0c67a2 100644 --- a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java +++ b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java @@ -66,7 +66,7 @@ public ExploreQueryProcessor( final TransactionManager transactionManager, Authe private Transaction getTransaction() { try { - return transactionManager.startTransaction( userId, databaseId, false, "Explore-by-Example", MultimediaFlavor.FILE ); + return transactionManager.startTransaction( userId, false, "Explore-by-Example", MultimediaFlavor.FILE ); } catch ( UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { throw new RuntimeException( "Error while starting transaction", e ); } diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java deleted file mode 100644 index d3bc381ca5..0000000000 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ /dev/null @@ -1,5017 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog; - - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import java.io.File; -import java.io.IOException; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.Getter; -import lombok.NonNull; -import lombok.extern.slf4j.Slf4j; -import org.mapdb.BTreeMap; -import org.mapdb.DB; -import org.mapdb.DBException.SerializationError; -import org.mapdb.DBMaker; -import org.mapdb.HTreeMap; -import org.mapdb.Serializer; -import org.mapdb.serializer.SerializerArrayTuple; -import org.pf4j.Extension; -import org.polypheny.db.StatusService; -import org.polypheny.db.StatusService.ErrorConfig; -import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.AdapterManager; -import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.algebra.AlgCollation; -import org.polypheny.db.algebra.AlgCollations; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Sort; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogDefaultValue; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogMaterializedView; -import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.CatalogView; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.MaterializedCriteria; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalEntity; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.GraphAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownColumnIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownColumnPlacementRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownGraphException; -import org.polypheny.db.catalog.exceptions.UnknownGraphPlacementsException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; -import org.polypheny.db.catalog.exceptions.UnknownIndexIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownKeyIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionGroupIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionPlacementException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownTableIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; -import org.polypheny.db.catalog.exceptions.UnknownUserIdRuntimeException; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.ConstraintType; -import org.polypheny.db.catalog.logistic.DataPlacementRole; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; -import org.polypheny.db.catalog.logistic.IndexType; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.logistic.PlacementType; -import org.polypheny.db.catalog.snapshot.Snapshot; -import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.iface.QueryInterfaceManager; -import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.languages.QueryParameters; -import org.polypheny.db.nodes.Node; -import org.polypheny.db.partition.FrequencyMap; -import org.polypheny.db.partition.properties.PartitionProperty; -import org.polypheny.db.processing.ExtendedQueryParameters; -import org.polypheny.db.processing.Processor; -import org.polypheny.db.transaction.Statement; -import org.polypheny.db.transaction.Transaction; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFamily; -import org.polypheny.db.util.ImmutableIntList; -import org.polypheny.db.util.Pair; -import org.polypheny.db.util.PolyphenyHomeDirManager; -import org.polypheny.db.view.MaterializedViewManager; - -@Extension -@Slf4j -public class CatalogImpl extends Catalog { - - private static final String FILE_PATH = "mapDB"; - private static DB db; - - private static HTreeMap users; - private static HTreeMap userNames; - - private static BTreeMap databases; - private static BTreeMap databaseNames; - private static HTreeMap> databaseChildren; - - private static BTreeMap schemas; - private static BTreeMap schemaNames; - private static HTreeMap> schemaChildren; - - private static BTreeMap tables; - private static BTreeMap tableNames; - private static HTreeMap> tableChildren; - - private static BTreeMap collections; - private static BTreeMap collectionNames; - - - private static BTreeMap columns; - private static BTreeMap columnNames; - private static BTreeMap columnPlacements; - - private static HTreeMap adapters; - private static HTreeMap adapterNames; - - private static HTreeMap queryInterfaces; - private static HTreeMap queryInterfaceNames; - - private static HTreeMap keys; - private static HTreeMap keyColumns; - - private static HTreeMap primaryKeys; - private static HTreeMap foreignKeys; - private static HTreeMap constraints; - private static HTreeMap indexes; - - private static BTreeMap partitionGroups; - private static BTreeMap partitions; - private static BTreeMap partitionPlacements; // (AdapterId, Partition) - - // Container Object that contains all other placements - private static BTreeMap dataPlacements; // (AdapterId, TableId) -> CatalogDataPlacement - - private static BTreeMap graphs; - private static BTreeMap graphAliases; - private static BTreeMap graphNames; - private static BTreeMap graphPlacements; - - - private static Long openTable; - - private static final AtomicInteger adapterIdBuilder = new AtomicInteger( 1 ); - private static final AtomicInteger queryInterfaceIdBuilder = new AtomicInteger( 1 ); - private static final AtomicInteger userIdBuilder = new AtomicInteger( 1 ); - - private static final AtomicLong databaseIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong namespaceIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong entityIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong columnIdBuilder = new AtomicLong( 1 ); - - private static final AtomicLong partitionGroupIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong partitionIdBuilder = new AtomicLong( 1000 ); - - private static final AtomicLong keyIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong constraintIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong indexIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong foreignKeyIdBuilder = new AtomicLong( 1 ); - - private static final AtomicLong physicalPositionBuilder = new AtomicLong(); - - private static Set frequencyDependentTables = new HashSet<>(); // All tables to consider for periodic processing - - // Keeps a list of all tableIDs which are going to be deleted. This is required to avoid constraints when recursively - // removing a table and all placements and partitions. Otherwise **validatePartitionDistribution()** inside the Catalog - // would throw an error. - private static final List tablesFlaggedForDeletion = new ArrayList<>(); - - Comparator columnComparator = Comparator.comparingInt( o -> o.position ); - - // {@link AlgNode} used to create view and materialized view - @Getter - private final Map nodeInfo = new HashMap<>(); - - - public CatalogImpl() { - this( FILE_PATH, true, true, false ); - } - - - /** - * Creates a new catalog after the given parameters - * - * @param fileName name of persistent catalog file - * @param doInitSchema if the default schema is initiated - * @param doInitInformationPage if a new informationPage should be created - * @param deleteAfter if the file is deleted when the catalog is closed - */ - public CatalogImpl( String fileName, boolean doInitSchema, boolean doInitInformationPage, boolean deleteAfter ) { - super(); - - if ( db != null ) { - db.close(); - } - synchronized ( this ) { - - if ( Catalog.memoryCatalog || Catalog.testMode ) { - isPersistent = false; - } else { - isPersistent = isPersistent(); - } - - if ( isPersistent ) { - StatusService.printInfo( "Making the catalog persistent." ); - File folder = PolyphenyHomeDirManager.getInstance().registerNewFolder( "catalog" ); - - if ( Catalog.resetCatalog ) { - StatusService.printInfo( "Resetting catalog on startup." ); - if ( new File( folder, fileName ).exists() ) { - //noinspection ResultOfMethodCallIgnored - new File( folder, fileName ).delete(); - } - } - - if ( !deleteAfter ) { - db = DBMaker - .fileDB( new File( folder, fileName ) ) - .closeOnJvmShutdown() - .transactionEnable() - .fileMmapEnableIfSupported() - .fileMmapPreclearDisable() - .make(); - } else { - db = DBMaker - .fileDB( new File( folder, fileName ) ) - .closeOnJvmShutdown() - .fileDeleteAfterClose() - .transactionEnable() - .fileMmapEnableIfSupported() - .fileMmapPreclearDisable() - .make(); - } - db.getStore().fileLoad(); - - } else { - StatusService.printInfo( "Making the catalog in-memory." ); - db = DBMaker - .memoryDB() - .transactionEnable() - .closeOnJvmShutdown() - .make(); - } - - initDBLayout( db ); - - // mirrors default data from old sql file - restoreAllIdBuilders(); - try { - - if ( doInitSchema ) { - insertDefaultData(); - } - - } catch ( GenericCatalogException | UnknownUserException | UnknownTableException | - UnknownSchemaException | UnknownAdapterException | UnknownColumnException e ) { - throw new RuntimeException( e ); - } - if ( doInitInformationPage ) { - new CatalogInfoPage( this ); - } - - new CatalogValidator().startCheck(); - } - } - - - @Override - public void commit() throws NoTablePrimaryKeyException { - if ( openTable != null ) { - throw new NoTablePrimaryKeyException(); - } - db.commit(); - } - - - @Override - public void rollback() { - db.rollback(); - } - - - /** - * Checks if a file can be created on the system, accessed and changed - * - * @return if it was possible - */ - private boolean isPersistent() { - File file = PolyphenyHomeDirManager.getInstance().registerNewFile( "testfile" ); - try { - if ( !file.exists() ) { - boolean res = file.createNewFile(); - if ( !res ) { - return false; - } - } - } catch ( IOException e ) { - return false; - } - if ( !file.canRead() || !file.canWrite() ) { - return false; - } - file.delete(); - - return true; - } - - - /** - * Initializes the default catalog layout - * - * @param db the databases object on which the layout is created - */ - private void initDBLayout( DB db ) { - try { - initUserInfo( db ); - initDatabaseInfo( db ); - initSchemaInfo( db ); - initTableInfo( db ); - initGraphInfo( db ); - initDocumentInfo( db ); - initColumnInfo( db ); - initKeysAndConstraintsInfo( db ); - initAdapterInfo( db ); - initQueryInterfaceInfo( db ); - } catch ( SerializationError e ) { - log.error( "!!!!!!!!!!! Error while restoring the catalog !!!!!!!!!!!" ); - log.error( "This usually means that there have been changes to the internal structure of the catalog with the last update of Polypheny-DB." ); - log.error( "To fix this, you must reset the catalog. To do this, please start Polypheny-DB once with the argument \"-resetCatalog\"." ); - StatusService.printError( - "Unsupported version of catalog! Unable to restore the schema.", - ErrorConfig.builder().func( ErrorConfig.DO_NOTHING ).doExit( true ).showButton( true ).buttonMessage( "Exit" ).build() ); - } - } - - - @Override - public void restoreColumnPlacements( Transaction transaction ) { - AdapterManager manager = AdapterManager.getInstance(); - - Map> restoredTables = new HashMap<>(); - - for ( LogicalColumn c : columns.values() ) { - List placements = getColumnPlacement( c.id ); - LogicalTable catalogTable = getTable( c.tableId ); - - // No column placements need to be restored if it is a view - if ( catalogTable.entityType != EntityType.VIEW ) { - if ( placements.size() == 0 ) { - // No placements shouldn't happen - throw new RuntimeException( "There seems to be no placement for the column with the id " + c.id ); - } else if ( placements.size() == 1 ) { - Adapter adapter = manager.getAdapter( placements.get( 0 ).adapterId ); - if ( adapter instanceof DataStore ) { - DataStore store = (DataStore) adapter; - if ( !store.isPersistent() ) { - - // TODO only full placements atm here - - if ( !restoredTables.containsKey( store.getAdapterId() ) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, null ); - restoredTables.put( store.getAdapterId(), Collections.singletonList( catalogTable.id ) ); - - } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( catalogTable.id )) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, null ); - List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); - ids.add( catalogTable.id ); - restoredTables.put( store.getAdapterId(), ids ); - } - } - } - } else { - Map persistent = placements.stream().collect( Collectors.toMap( p -> p.adapterId, p -> manager.getStore( p.adapterId ).isPersistent() ) ); - - if ( !persistent.containsValue( true ) ) { // no persistent placement for this column - LogicalTable table = getTable( c.tableId ); - for ( CatalogColumnPlacement p : placements ) { - DataStore store = manager.getStore( p.adapterId ); - - if ( !restoredTables.containsKey( store.getAdapterId() ) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, null ); - List ids = new ArrayList<>(); - ids.add( table.id ); - restoredTables.put( store.getAdapterId(), ids ); - - } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( table.id )) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, null ); - List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); - ids.add( table.id ); - restoredTables.put( store.getAdapterId(), ids ); - } - } - } else if ( persistent.containsValue( true ) && persistent.containsValue( false ) ) { - // TODO DL change so column gets copied - for ( Entry p : persistent.entrySet() ) { - if ( !p.getValue() ) { - deleteColumnPlacement( p.getKey(), c.id, false ); - } - } - } - } - } - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void restoreViews( Transaction transaction ) { - Statement statement = transaction.createStatement(); - - for ( LogicalTable c : tables.values() ) { - if ( c.entityType == EntityType.VIEW || c.entityType == EntityType.MATERIALIZED_VIEW ) { - String query; - QueryLanguage language; - if ( c.entityType == EntityType.VIEW ) { - query = ((CatalogView) c).getQuery(); - language = ((CatalogView) c).getLanguage(); - } else { - query = ((CatalogMaterializedView) c).getQuery(); - language = ((CatalogMaterializedView) c).getLanguage(); - } - - switch ( language.getSerializedName() ) { - case "sql": - Processor sqlProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "rel" ) ); - Node sqlNode = sqlProcessor.parse( query ).get( 0 ); - AlgRoot algRoot = sqlProcessor.translate( - statement, - sqlProcessor.validate( statement.getTransaction(), sqlNode, RuntimeConfig.ADD_DEFAULT_VALUES_IN_INSERTS.getBoolean() ).left, - new QueryParameters( query, c.getNamespaceType() ) ); - nodeInfo.put( c.id, algRoot.alg ); - break; - - case "rel": - Processor jsonRelProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "rel" ) ); - AlgNode result = jsonRelProcessor.translate( statement, null, new QueryParameters( query, c.getNamespaceType() ) ).alg; - - final AlgDataType rowType = result.getRowType(); - final List> fields = Pair.zip( ImmutableIntList.identity( rowType.getFieldCount() ), rowType.getFieldNames() ); - final AlgCollation collation = - result instanceof Sort - ? ((Sort) result).collation - : AlgCollations.EMPTY; - AlgRoot root = new AlgRoot( result, result.getRowType(), Kind.SELECT, fields, collation ); - - nodeInfo.put( c.id, root.alg ); - break; - - case "mongo": - Processor mqlProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "mongo" ) ); - Node mqlNode = mqlProcessor.parse( query ).get( 0 ); - - AlgRoot mqlRel = mqlProcessor.translate( - statement, - mqlNode, - new ExtendedQueryParameters( query, NamespaceType.DOCUMENT, getNamespace( defaultDatabaseId ).name ) ); - nodeInfo.put( c.id, mqlRel.alg ); - break; - } - if ( c.entityType == EntityType.MATERIALIZED_VIEW ) { - log.info( "Updating materialized view: {}", c.getNamespaceName() + "." + c.name ); - MaterializedViewManager materializedManager = MaterializedViewManager.getInstance(); - materializedManager.addMaterializedInfo( c.id, ((CatalogMaterializedView) c).getMaterializedCriteria() ); - materializedManager.updateData( statement.getTransaction(), c.id ); - materializedManager.updateMaterializedTime( c.id ); - } - } - } - } - - - /** - * Sets the idBuilder for a given map to the new starting position - * - * @param map the map to which the idBuilder belongs - * @param idBuilder which is creates new unique ids - */ - private void restoreIdBuilder( Map map, AtomicInteger idBuilder ) { - if ( !map.isEmpty() ) { - idBuilder.set( Collections.max( map.keySet() ) + 1 ); - } - } - - - private void restoreIdBuilder( Map map, AtomicLong idBuilder ) { - if ( !map.isEmpty() ) { - idBuilder.set( Collections.max( map.keySet() ) + 1 ); - } - } - - - private void restoreAllIdBuilders() { - restoreIdBuilder( schemas, namespaceIdBuilder ); - restoreIdBuilder( databases, databaseIdBuilder ); - restoreIdBuilder( tables, entityIdBuilder ); - restoreIdBuilder( columns, columnIdBuilder ); - restoreIdBuilder( users, userIdBuilder ); - restoreIdBuilder( keys, keyIdBuilder ); - restoreIdBuilder( constraints, columnIdBuilder ); - restoreIdBuilder( indexes, indexIdBuilder ); - restoreIdBuilder( adapters, adapterIdBuilder ); - restoreIdBuilder( queryInterfaces, queryInterfaceIdBuilder ); - restoreIdBuilder( foreignKeys, foreignKeyIdBuilder ); - restoreIdBuilder( partitionGroups, partitionGroupIdBuilder ); - restoreIdBuilder( partitions, partitionIdBuilder ); - - // Restore physical position builder - if ( columnPlacements.size() > 0 ) { - long highestPosition = 0; - for ( CatalogColumnPlacement placement : columnPlacements.values() ) { - if ( placement.physicalPosition > highestPosition ) { - highestPosition = placement.physicalPosition; - } - } - physicalPositionBuilder.set( highestPosition + 1 ); - } - } - - - /** - * Initiates all needed maps for adapters - * - * adapters: adapterId {@code ->} CatalogAdapter - * adapterName: adapterName {@code ->} CatalogAdapter - */ - private void initAdapterInfo( DB db ) { - adapters = db.hashMap( "adapters", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); - adapterNames = db.hashMap( "adapterNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); - } - - - /** - * Initiates all needed maps for query interfaces - * - * queryInterfaces: ifaceId CatalogQueryInterface - * queryInterfaceNames: ifaceName CatalogQueryInterface - */ - private void initQueryInterfaceInfo( DB db ) { - queryInterfaces = db.hashMap( "queryInterfaces", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); - queryInterfaceNames = db.hashMap( "queryInterfaceNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); - } - - - /** - * Creates all needed maps for keys and constraints - * - * keyColumns: [columnId1, columnId2,...] keyId - * keys: keyId CatalogKey - * primaryKeys: keyId CatalogPrimaryKey - * foreignKeys: keyId CatalogForeignKey - * constraints: constraintId CatalogConstraint - * indexes: indexId {@code ->} CatalogIndex - */ - private void initKeysAndConstraintsInfo( DB db ) { - keyColumns = db.hashMap( "keyColumns", Serializer.LONG_ARRAY, Serializer.LONG ).createOrOpen(); - keys = db.hashMap( "keys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - primaryKeys = db.hashMap( "primaryKeys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - foreignKeys = db.hashMap( "foreignKeys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - constraints = db.hashMap( "constraints", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - indexes = db.hashMap( "indexes", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - } - - - /** - * Creates all needed maps for users - * - * users: userId {@code ->} CatalogUser - * userNames: name {@code ->} CatalogUser - */ - private void initUserInfo( DB db ) { - users = db.hashMap( "users", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); - userNames = db.hashMap( "usersNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); - } - - - /** - * Initialize the column maps - * - * columns: columnId {@code ->} CatalogColumn - * columnNames: new Object[]{databaseId, schemaId, tableId, columnName} {@code ->} CatalogColumn - * columnPlacements: new Object[]{adapterId, columnId} {@code ->} CatalogPlacement - */ - private void initColumnInfo( DB db ) { - //noinspection unchecked - columns = db.treeMap( "columns", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - //noinspection unchecked - columnNames = db.treeMap( "columnNames", new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - //noinspection unchecked - columnPlacements = db.treeMap( "columnPlacement", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); - } - - - /** - * Creates all maps needed for tables - * - * tables: tableId {@code ->} CatalogEntity - * tableChildren: tableId {@code ->} [columnId, columnId,..] - * tableNames: new Object[]{databaseId, schemaId, tableName} {@code ->} CatalogEntity - */ - private void initTableInfo( DB db ) { - //noinspection unchecked - tables = db.treeMap( "tables", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - tableChildren = db.hashMap( "tableChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); - //noinspection unchecked - tableNames = db.treeMap( "tableNames" ) - .keySerializer( new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.STRING ) ) - .valueSerializer( Serializer.JAVA ) - .createOrOpen(); - dataPlacements = db.treeMap( "dataPlacement", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); - partitionGroups = db.treeMap( "partitionGroups", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - partitions = db.treeMap( "partitions", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - - partitionPlacements = db.treeMap( "partitionPlacements", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); - - // Restores all Tables dependent on periodic checks like TEMPERATURE Partitioning - frequencyDependentTables = tables.values().stream().filter( t -> t.partitionProperty.reliesOnPeriodicChecks ).map( t -> t.id ).collect( Collectors.toSet() ); - } - - - @SuppressWarnings("unchecked") - private void initGraphInfo( DB db ) { - graphs = db.treeMap( "graphs", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - graphNames = db.treeMap( "graphNames", new SerializerArrayTuple( Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - graphPlacements = db.treeMap( "graphPlacements", new SerializerArrayTuple( Serializer.LONG, Serializer.INTEGER ), Serializer.JAVA ).createOrOpen(); - - graphAliases = db.treeMap( "graphAliases", Serializer.STRING, Serializer.JAVA ).createOrOpen(); - } - - - @SuppressWarnings("unchecked") - private void initDocumentInfo( DB db ) { - collections = db.treeMap( "collections", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - collectionNames = db.treeMap( "collectionNames", new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - - } - - - /** - * Creates all needed maps for schemas - * - * schemas: schemaId {@code ->} CatalogNamespace - * schemaChildren: schemaId {@code ->} [tableId, tableId, etc] - * schemaNames: new Object[]{databaseId, schemaName} {@code ->} CatalogNamespace - */ - private void initSchemaInfo( DB db ) { - //noinspection unchecked - schemas = db.treeMap( "schemas", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - schemaChildren = db.hashMap( "schemaChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); - //noinspection unchecked - schemaNames = db.treeMap( "schemaNames", new SerializerArrayTuple( Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - } - - - /** - * Creates maps for databases - * - * databases: databaseId {@code ->} CatalogDatabase - * databaseNames: databaseName {@code ->} CatalogDatabase - * databaseChildren: databaseId {@code ->} [tableId, tableId,...] - */ - private void initDatabaseInfo( DB db ) { - //noinspection unchecked - databases = db.treeMap( "databases", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - //noinspection unchecked - databaseNames = db.treeMap( "databaseNames", Serializer.STRING, Serializer.JAVA ).createOrOpen(); - databaseChildren = db.hashMap( "databaseChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); - } - - - /** - * Fills the catalog database with default data, skips if data is already inserted - */ - private void insertDefaultData() throws GenericCatalogException, UnknownUserException, UnknownTableException, UnknownSchemaException, UnknownAdapterException, UnknownColumnException { - - ////////////// - // init users - int systemId; - if ( !userNames.containsKey( "system" ) ) { - systemId = addUser( "system", "" ); - } else { - systemId = getUser( "system" ).id; - } - - if ( !userNames.containsKey( "pa" ) ) { - addUser( "pa", "" ); - } - Catalog.defaultUserId = systemId; - - ////////////// - // init schema - - long schemaId; - if ( !schemaNames.containsKey( new Object[]{ "public" } ) ) { - schemaId = addNamespace( "public", NamespaceType.getDefault(), false ); - } else { - schemaId = getNamespace( "public" ).id; - } - - ////////////// - // init adapters - if ( adapterNames.size() == 0 ) { - // Deploy default store - addAdapter( "hsqldb", defaultStore.getAdapterName(), AdapterType.STORE, defaultStore.getDefaultSettings() ); - - // Deploy default CSV view - addAdapter( "hr", defaultSource.getAdapterName(), AdapterType.SOURCE, defaultSource.getDefaultSettings() ); - - // init schema - CatalogAdapter csv = getAdapter( "hr" ); - if ( !testMode ) { - if ( !tableNames.containsKey( new Object[]{ schemaId, "depts" } ) ) { - addTable( "depts", schemaId, systemId, EntityType.SOURCE, false ); - } - if ( !tableNames.containsKey( new Object[]{ schemaId, "emps" } ) ) { - addTable( "emps", schemaId, systemId, EntityType.SOURCE, false ); - } - if ( !tableNames.containsKey( new Object[]{ schemaId, "emp" } ) ) { - addTable( "emp", schemaId, systemId, EntityType.SOURCE, false ); - } - if ( !tableNames.containsKey( new Object[]{ schemaId, "work" } ) ) { - addTable( "work", schemaId, systemId, EntityType.SOURCE, false ); - addDefaultCsvColumns( csv ); - } - } - } - - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( e ); - } - - } - - - @Override - public void restoreInterfacesIfNecessary() { - //////////////////////// - // init query interfaces - if ( queryInterfaceNames.size() == 0 ) { - QueryInterfaceManager.getREGISTER().values().forEach( i -> addQueryInterface( i.interfaceName, i.clazz.getName(), i.defaultSettings ) ); - - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( e ); - } - } - } - - - /** - * Initiates default columns for csv files - */ - private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaException, UnknownTableException, GenericCatalogException, UnknownColumnException { - LogicalNamespace schema = getNamespace( "public" ); - LogicalTable depts = getTable( schema.id, "depts" ); - - addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - - LogicalTable emps = getTable( schema.id, "emps" ); - addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); - addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); - addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); - - LogicalTable emp = getTable( schema.id, "emp" ); - addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); - addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 ); - addDefaultCsvColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 ); - addDefaultCsvColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); - addDefaultCsvColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null ); - addDefaultCsvColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); - addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); - addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); - - LogicalTable work = getTable( schema.id, "work" ); - addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null ); - addDefaultCsvColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 ); - addDefaultCsvColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); - addDefaultCsvColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 ); - addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); - addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); - - // set all needed primary keys - addPrimaryKey( depts.id, Collections.singletonList( getColumn( depts.id, "deptno" ).id ) ); - addPrimaryKey( emps.id, Collections.singletonList( getColumn( emps.id, "empid" ).id ) ); - addPrimaryKey( emp.id, Collections.singletonList( getColumn( emp.id, "employeeno" ).id ) ); - addPrimaryKey( work.id, Collections.singletonList( getColumn( work.id, "employeeno" ).id ) ); - - // set foreign keys - addForeignKey( - emps.id, - ImmutableList.of( getColumn( emps.id, "deptno" ).id ), - depts.id, - ImmutableList.of( getColumn( depts.id, "deptno" ).id ), - "fk_emps_depts", - ForeignKeyOption.NONE, - ForeignKeyOption.NONE ); - addForeignKey( - work.id, - ImmutableList.of( getColumn( work.id, "employeeno" ).id ), - emp.id, - ImmutableList.of( getColumn( emp.id, "employeeno" ).id ), - "fk_work_emp", - ForeignKeyOption.NONE, - ForeignKeyOption.NONE ); - } - - - private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !checkIfExistsColumn( table.id, name ) ) { - long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - String filename = table.name + ".csv"; - if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { - filename += ".gz"; - } - - addColumnPlacement( csv.id, colId, PlacementType.AUTOMATIC, filename, table.name, name ); - updateColumnPlacementPhysicalPosition( csv.id, colId, position ); - - long partitionId = table.partitionProperty.partitionIds.get( 0 ); - addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, filename, table.name, DataPlacementRole.UPTODATE ); - } - } - - - private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !checkIfExistsColumn( table.id, name ) ) { - long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - addColumnPlacement( adapter.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name ); - updateColumnPlacementPhysicalPosition( adapter.id, colId, position ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void validateColumns() { - CatalogValidator validator = new CatalogValidator(); - db.rollback(); - try { - validator.validate(); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void close() { - db.close(); - } - - - /** - * {@inheritDoc} - */ - @Override - public void clear() { - db.getAll().clear(); - initDBLayout( db ); - restoreAllIdBuilders(); - } - - - @Override - public Snapshot getSnapshot( long id ) { - return null; - } - - - /** - * {@inheritDoc} - */ - public long addDatabase( String name, int ownerId, String ownerName, long defaultSchemaId, String defaultSchemaName ) { - long id = databaseIdBuilder.getAndIncrement(); - CatalogDatabase database = new CatalogDatabase( id, name, ownerId, ownerName, defaultSchemaId, defaultSchemaName ); - synchronized ( this ) { - databases.put( id, database ); - databaseNames.put( name, database ); - databaseChildren.put( id, ImmutableList.builder().build() ); - } - listeners.firePropertyChange( "database", null, database ); - return id; - } - - - /** - * {@inheritDoc} - */ - public void deleteDatabase( long databaseId ) { - CatalogDatabase database = getDatabase( databaseId ); - if ( database != null ) { - synchronized ( this ) { - databases.remove( databaseId ); - databaseNames.remove( database.name ); - databaseChildren.remove( databaseId ); - } - } - } - - - /** - * {@inheritDoc} - */ - @Override - public int addUser( String name, String password ) { - CatalogUser user = new CatalogUser( userIdBuilder.getAndIncrement(), name, password ); - synchronized ( this ) { - users.put( user.id, user ); - userNames.put( user.name, user ); - } - listeners.firePropertyChange( "user", null, user ); - return user.id; - } - - - /** - * {@inheritDoc} - */ - public List getDatabases( Pattern pattern ) { - if ( pattern != null ) { - if ( pattern.containsWildcards ) { - return databaseNames.entrySet().stream().filter( e -> e.getKey().matches( pattern.toRegex() ) ).map( Entry::getValue ).sorted().collect( Collectors.toList() ); - } else { - if ( databaseNames.containsKey( pattern.pattern ) ) { - return Collections.singletonList( databaseNames.get( pattern.pattern ) ); - } else { - return new ArrayList<>(); - } - } - } else { - return new ArrayList<>( databases.values() ); - } - } - - - /** - * {@inheritDoc} - */ - private CatalogDatabase getDatabase( long databaseId ) { - try { - return Objects.requireNonNull( databases.get( databaseId ) ); - } catch ( NullPointerException e ) { - throw new UnknownDatabaseIdRuntimeException( databaseId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public @NonNull List getNamespaces( Pattern name ) { - if ( name != null ) { - return schemaNames.values().stream().filter( s -> s.name.matches( name.toRegex() ) ).collect( Collectors.toList() ); - } - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalNamespace getNamespace( long id ) { - try { - return Objects.requireNonNull( schemas.get( id ) ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaIdRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalNamespace getNamespace( final String schemaName ) throws UnknownSchemaException { - String name = schemaName.toLowerCase(); - try { - return Objects.requireNonNull( schemaNames.get( new Object[]{ name } ) ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaException( schemaName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ) { - name = name.toLowerCase(); - CatalogUser owner = getUser( ownerId ); - long id = namespaceIdBuilder.getAndIncrement(); - LogicalNamespace schema = new LogicalNamespace( id, name, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); - synchronized ( this ) { - schemas.put( id, schema ); - schemaNames.put( new Object[]{ name }, schema ); - schemaChildren.put( id, ImmutableList.builder().build() ); - } - listeners.firePropertyChange( "namespace", null, schema ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsNamespace( String name ) { - name = name.toLowerCase(); - return schemaNames.containsKey( new Object[]{ name } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void renameNamespace( long schemaId, String name ) { - name = name.toLowerCase(); - try { - LogicalNamespace old = Objects.requireNonNull( schemas.get( schemaId ) ); - LogicalNamespace schema = new LogicalNamespace( old.id, name, old.ownerId, old.ownerName, old.namespaceType, false ); - - synchronized ( this ) { - schemas.replace( schemaId, schema ); - schemaNames.remove( new Object[]{ old.name } ); - schemaNames.put( new Object[]{ name }, schema ); - } - listeners.firePropertyChange( "schema", old, schema ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaIdRuntimeException( schemaId ); - } - } - - - /** - * {@inheritDoc} - */ - public void setNamespaceOwner( long schemaId, long ownerId ) { - try { - LogicalNamespace old = Objects.requireNonNull( schemas.get( schemaId ) ); - LogicalNamespace schema = new LogicalNamespace( old.id, old.name, (int) ownerId, old.ownerName, old.namespaceType, false ); - synchronized ( this ) { - schemas.replace( schemaId, schema ); - schemaNames.replace( new Object[]{ schema.name }, schema ); - } - listeners.firePropertyChange( "schema", old, schema ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaIdRuntimeException( schemaId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { - if ( getGraphs( new Pattern( name ) ).size() != 0 && !ifNotExists ) { - throw new GraphAlreadyExistsException( name ); - } - - long id = addNamespace( name, NamespaceType.GRAPH, false ); - - LogicalGraph graph = new LogicalGraph( id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); - - synchronized ( this ) { - graphs.put( id, graph ); - graphNames.put( new Object[]{ name }, graph ); - } - - listeners.firePropertyChange( "graph", null, graph ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { - LogicalGraph graph = Objects.requireNonNull( getGraph( graphId ) ); - - if ( graphAliases.containsKey( alias ) ) { - if ( !ifNotExists ) { - throw new RuntimeException( "Error while creating alias: " + alias ); - } - return; - } - - synchronized ( this ) { - graphAliases.put( alias, graph ); - } - listeners.firePropertyChange( "graphAlias", null, alias ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void removeGraphAlias( long graphId, String alias, boolean ifExists ) { - if ( !graphAliases.containsKey( alias ) ) { - if ( !ifExists ) { - throw new RuntimeException( "Error while removing alias: " + alias ); - } - return; - } - synchronized ( this ) { - graphAliases.remove( alias ); - } - listeners.firePropertyChange( "graphAlias", alias, null ); - } - - - - /** - * {@inheritDoc} - */ - @Override - public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException { - /// --- nodes - // table id nodes -> id, node, labels - long nodesId; - if ( !onlyPlacement ) { - nodesId = addTable( "_nodes_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); - } else { - nodesId = getTable( id, "_nodes_" ).id; - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), nodesId ) ); - - long idNodeId; - long labelNodeId; - if ( !onlyPlacement ) { - idNodeId = addColumn( "_id_", nodesId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - labelNodeId = addColumn( "_label_", nodesId, 1, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - } else { - idNodeId = getColumn( nodesId, "_id_" ).id; - labelNodeId = getColumn( nodesId, "_label_" ).id; - } - - for ( DataStore s : stores ) { - addColumnPlacement( - s.getAdapterId(), - idNodeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - labelNodeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - if ( !onlyPlacement ) { - addPrimaryKey( nodesId, List.of( idNodeId, labelNodeId ) ); - } - - /// --- node properties - - // table id nodes -> id, node, labels - long nodesPropertyId; - if ( !onlyPlacement ) { - nodesPropertyId = addTable( "_n_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); - } else { - nodesPropertyId = getTable( id, "_n_properties_" ).id; - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), nodesPropertyId ) ); - - long idNodesPropertyId; - long keyNodePropertyId; - long valueNodePropertyId; - - if ( !onlyPlacement ) { - idNodesPropertyId = addColumn( "_id_", nodesPropertyId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - keyNodePropertyId = addColumn( "_key_", nodesPropertyId, 1, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - valueNodePropertyId = addColumn( "_value_", nodesPropertyId, 2, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - } else { - idNodesPropertyId = getColumn( nodesPropertyId, "_id_" ).id; - keyNodePropertyId = getColumn( nodesPropertyId, "_key_" ).id; - valueNodePropertyId = getColumn( nodesPropertyId, "_value_" ).id; - } - - for ( DataStore s : stores ) { - addColumnPlacement( - s.getAdapterId(), - idNodesPropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - keyNodePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - valueNodePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - if ( !onlyPlacement ) { - addPrimaryKey( nodesPropertyId, List.of( idNodesPropertyId, keyNodePropertyId ) ); - } - - /// --- edges - - // table id relationships -> id, rel, labels - long edgesId; - if ( !onlyPlacement ) { - edgesId = addTable( "_edges_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); - } else { - edgesId = getTable( id, "_edges_" ).id; - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), edgesId ) ); - - long idEdgeId; - long labelEdgeId; - long sourceEdgeId; - long targetEdgeId; - - if ( !onlyPlacement ) { - idEdgeId = addColumn( - "_id_", - edgesId, - 0, - PolyType.VARCHAR, - null, - 36, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - labelEdgeId = addColumn( - "_label_", - edgesId, - 1, - PolyType.VARCHAR, - null, - 255, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - sourceEdgeId = addColumn( - "_l_id_", - edgesId, - 2, - PolyType.VARCHAR, - null, - 36, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - targetEdgeId = addColumn( - "_r_id_", - edgesId, - 3, - PolyType.VARCHAR, - null, - 36, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - } else { - idEdgeId = getColumn( edgesId, "_id_" ).id; - labelEdgeId = getColumn( edgesId, "_label_" ).id; - sourceEdgeId = getColumn( edgesId, "_l_id_" ).id; - targetEdgeId = getColumn( edgesId, "_r_id_" ).id; - } - - for ( DataStore store : stores ) { - addColumnPlacement( - store.getAdapterId(), - idEdgeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - store.getAdapterId(), - labelEdgeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - store.getAdapterId(), - sourceEdgeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - store.getAdapterId(), - targetEdgeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - if ( !onlyPlacement ) { - addPrimaryKey( edgesId, Collections.singletonList( idEdgeId ) ); - } - - /// --- edge properties - - // table id nodes -> id, node, labels - long edgesPropertyId; - if ( !onlyPlacement ) { - edgesPropertyId = addTable( "_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); - } else { - edgesPropertyId = getTable( id, "_properties_" ).id; - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), edgesPropertyId ) ); - - long idEdgePropertyId; - long keyEdgePropertyId; - long valueEdgePropertyId; - - if ( !onlyPlacement ) { - idEdgePropertyId = addColumn( - "_id_", - edgesPropertyId, - 0, - PolyType.VARCHAR, - null, - 255, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - keyEdgePropertyId = addColumn( - "_key_", - edgesPropertyId, - 1, - PolyType.VARCHAR, - null, - 255, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - valueEdgePropertyId = addColumn( - "_value_", - edgesPropertyId, - 2, - PolyType.VARCHAR, - null, - 255, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - } else { - idEdgePropertyId = getColumn( edgesPropertyId, "_id_" ).id; - keyEdgePropertyId = getColumn( edgesPropertyId, "_key_" ).id; - valueEdgePropertyId = getColumn( edgesPropertyId, "_value_" ).id; - } - - for ( DataStore s : stores ) { - addColumnPlacement( - s.getAdapterId(), - idEdgePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - keyEdgePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - valueEdgePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - if ( !onlyPlacement ) { - addPrimaryKey( edgesPropertyId, List.of( idEdgePropertyId, keyEdgePropertyId ) ); - - CatalogGraphMapping mapping = new CatalogGraphMapping( - id, - nodesId, - idNodeId, - labelNodeId, - nodesPropertyId, - idNodesPropertyId, - keyNodePropertyId, - valueNodePropertyId, - edgesId, - idEdgeId, - labelEdgeId, - sourceEdgeId, - targetEdgeId, - edgesPropertyId, - idEdgePropertyId, - keyEdgePropertyId, - valueEdgePropertyId ); - - } - - } - - - private void removeGraphLogistics( long graphId ) { - - deleteNamespace( graphId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteGraph( long id ) { - if ( !graphs.containsKey( id ) ) { - throw new UnknownGraphException( id ); - } - - LogicalGraph old = Objects.requireNonNull( graphs.get( id ) ); - - removeGraphLogistics( id ); - - synchronized ( this ) { - old.placements.forEach( a -> graphPlacements.remove( new Object[]{ old.id, a } ) ); - graphs.remove( id ); - graphNames.remove( new Object[]{ old.name } ); - } - listeners.firePropertyChange( "graph", old, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalGraph getGraph( long id ) { - if ( !graphs.containsKey( id ) ) { - throw new UnknownGraphException( id ); - } - return graphs.get( id ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getGraphs( Pattern graphName ) { - if ( graphName != null ) { - return ImmutableList.copyOf( - Stream.concat( - graphAliases.values().stream(), - graphs.values().stream() ).filter( g -> g.name.matches( graphName.pattern.toLowerCase() ) ) - .collect( Collectors.toList() ) ); - } else { - return ImmutableList.copyOf( graphs.values() ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteNamespace( long schemaId ) { - LogicalNamespace schema = getNamespace( schemaId ); - synchronized ( this ) { - schemaNames.remove( new Object[]{ schema.name } ); - - for ( Long id : Objects.requireNonNull( schemaChildren.get( schemaId ) ) ) { - deleteTable( id ); - } - schemaChildren.remove( schemaId ); - schemas.remove( schemaId ); - - } - listeners.firePropertyChange( "Schema", schema, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTables( long schemaId, Pattern tableNamePattern ) { - if ( schemas.containsKey( schemaId ) ) { - - LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); - if ( tableNamePattern != null ) { - return Collections.singletonList( tableNames.get( new Object[]{ schemaId, tableNamePattern.pattern } ) ); - } else { - return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schemaId } ).values() ); - } - } - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { - if ( schemaNamePattern != null && tableNamePattern != null ) { - LogicalNamespace schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); - if ( schema != null ) { - return Collections.singletonList( Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableNamePattern.pattern } ) ) ); - } - } else if ( schemaNamePattern != null ) { - LogicalNamespace schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); - if ( schema != null ) { - return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schema.id } ).values() ); - } - } else { - return new ArrayList<>( tableNames.values() ); - } - - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTable( long tableId ) { - try { - return Objects.requireNonNull( tables.get( tableId ) ); - } catch ( NullPointerException e ) { - throw new UnknownTableIdRuntimeException( tableId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { - try { - LogicalNamespace schema = getNamespace( schemaId ); - if ( !schema.caseSensitive ) { - tableName = tableName.toLowerCase(); - } - return Objects.requireNonNull( tableNames.get( new Object[]{ schemaId, tableName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownTableException( schemaId, tableName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTableFromPartition( long partitionId ) { - return getTable( getPartition( partitionId ).tableId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { - try { - LogicalNamespace schema = getNamespace( schemaName ); - if ( !schema.caseSensitive ) { - tableName = tableName.toLowerCase(); - } - - return Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownTableException( schemaName, tableName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { - long id = entityIdBuilder.getAndIncrement(); - LogicalNamespace schema = getNamespace( namespaceId ); - if ( !schema.caseSensitive ) { - name = name.toLowerCase(); - } - - try { - //Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition - List partitionGroupIds = new ArrayList<>(); - partitionGroupIds.add( addPartitionGroup( id, "full", namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); - //get All(only one) PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds - CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); - - PartitionProperty partitionProperty = PartitionProperty.builder() - .partitionType( PartitionType.NONE ) - .isPartitioned( false ) - .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) - .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) - .reliesOnPeriodicChecks( false ) - .build(); - - LogicalTable table = new LogicalTable( - id, - name, - ImmutableList.of(), - namespaceId, - ownerId, - entityType, - null, - ImmutableList.of(), - modifiable, - partitionProperty, - ImmutableList.of() ); - - updateEntityLogistics( name, namespaceId, id, schema, table ); - if ( schema.namespaceType != NamespaceType.DOCUMENT ) { - openTable = id; - } - - } catch ( GenericCatalogException e ) { - throw new RuntimeException( "Error when adding table " + name, e ); - } - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { - long id = entityIdBuilder.getAndIncrement(); - LogicalNamespace schema = getNamespace( namespaceId ); - - if ( !schema.caseSensitive ) { - name = name.toLowerCase(); - } - - PartitionProperty partitionProperty = PartitionProperty.builder() - .partitionType( PartitionType.NONE ) - .reliesOnPeriodicChecks( false ) - .partitionIds( ImmutableList.copyOf( new ArrayList<>() ) ) - .partitionGroupIds( ImmutableList.copyOf( new ArrayList<>() ) ) - .build(); - - if ( entityType != EntityType.VIEW ) { - // Should not happen, addViewTable is only called with EntityType.View - throw new RuntimeException( "addViewTable is only possible with EntityType = VIEW" ); - } - CatalogView viewTable = new CatalogView( - id, - name, - ImmutableList.of(), - namespaceId, - ownerId, - entityType, - query,//definition, - null, - ImmutableList.of(), - modifiable, - partitionProperty, - algCollation, - ImmutableList.of(), - underlyingTables, - language.getSerializedName() //fieldList - ); - addConnectedViews( underlyingTables, viewTable.id ); - updateEntityLogistics( name, namespaceId, id, schema, viewTable ); - nodeInfo.put( id, definition ); - - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { - long id = entityIdBuilder.getAndIncrement(); - LogicalNamespace schema = getNamespace( namespaceId ); - - if ( !schema.caseSensitive ) { - name = name.toLowerCase(); - } - - // Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition - List partitionGroupIds = new ArrayList<>(); - partitionGroupIds.add( addPartitionGroup( id, "full", namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); - - // Get the single PartitionGroup and consequently retrieve all contained partitionIds to add them to completeList of partitionIds in the partitionProperty - CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); - - PartitionProperty partitionProperty = PartitionProperty.builder() - .partitionType( PartitionType.NONE ) - .isPartitioned( false ) - .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) - .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) - .reliesOnPeriodicChecks( false ) - .build(); - - if ( entityType == EntityType.MATERIALIZED_VIEW ) { - Map> map = new HashMap<>(); - for ( Entry> e : underlyingTables.entrySet() ) { - if ( map.put( e.getKey(), ImmutableList.copyOf( e.getValue() ) ) != null ) { - throw new IllegalStateException( "Duplicate key" ); - } - } - CatalogMaterializedView materializedViewTable = new CatalogMaterializedView( - id, - name, - List.of(), - namespaceId, - ownerId, - entityType, - query, - null, - List.of(), - modifiable, - partitionProperty, - algCollation, - List.of(), - Map.copyOf( map ), - language.getSerializedName(), - materializedCriteria, - ordered - ); - addConnectedViews( underlyingTables, materializedViewTable.id ); - updateEntityLogistics( name, namespaceId, id, schema, materializedViewTable ); - - nodeInfo.put( id, definition ); - } else { - // Should not happen, addViewTable is only called with EntityType.View - throw new RuntimeException( "addMaterializedViewTable is only possible with EntityType = MATERIALIZED_VIEW" ); - } - return id; - } - - - /** - * Update all information after the addition of all kind of tables - */ - private void updateEntityLogistics( String name, long namespaceId, long id, LogicalNamespace schema, LogicalTable entity ) { - synchronized ( this ) { - tables.put( id, entity ); - tableChildren.put( id, ImmutableList.builder().build() ); - tableNames.put( new Object[]{ namespaceId, name }, entity ); - List children = new ArrayList<>( Objects.requireNonNull( schemaChildren.get( namespaceId ) ) ); - children.add( id ); - schemaChildren.replace( namespaceId, ImmutableList.copyOf( children ) ); - } - - listeners.firePropertyChange( "entity", null, entity ); - } - - - /** - * Add additional Information to Table, what Views are connected to table - */ - public void addConnectedViews( Map> underlyingTables, long viewId ) { - for ( long id : underlyingTables.keySet() ) { - LogicalTable old = getTable( id ); - List connectedViews; - connectedViews = new ArrayList<>( old.connectedViews ); - connectedViews.add( viewId ); - LogicalTable table = old.withConnectedViews( ImmutableList.copyOf( connectedViews ) ); - synchronized ( this ) { - tables.replace( id, table ); - assert table != null; - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteViewDependencies( CatalogView catalogView ) { - for ( long id : catalogView.getUnderlyingTables().keySet() ) { - LogicalTable old = getTable( id ); - List connectedViews = old.connectedViews.stream().filter( e -> e != catalogView.id ).collect( Collectors.toList() ); - - LogicalTable table = old.withConnectedViews( ImmutableList.copyOf( connectedViews ) ); - - synchronized ( this ) { - tables.replace( id, table ); - assert table != null; - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsEntity( long namespaceId, String entityName ) { - LogicalNamespace schema = getNamespace( namespaceId ); - if ( !schema.caseSensitive ) { - entityName = entityName.toLowerCase(); - } - return tableNames.containsKey( new Object[]{ namespaceId, entityName } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsEntity( long tableId ) { - return tables.containsKey( tableId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void renameTable( long tableId, String name ) { - LogicalTable old = getTable( tableId ); - if ( !getNamespace( old.namespaceId ).caseSensitive ) { - name = name.toLowerCase(); - } - - LogicalTable table = old.withName( name ); - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.remove( new Object[]{ table.namespaceId, old.name } ); - tableNames.put( new Object[]{ table.namespaceId, name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteTable( long tableId ) { - LogicalTable table = getTable( tableId ); - List children = new ArrayList<>( Objects.requireNonNull( schemaChildren.get( table.namespaceId ) ) ); - children.remove( tableId ); - synchronized ( this ) { - schemaChildren.replace( table.namespaceId, ImmutableList.copyOf( children ) ); - - if ( table.partitionProperty.reliesOnPeriodicChecks ) { - removeTableFromPeriodicProcessing( tableId ); - } - - if ( table.partitionProperty.isPartitioned ) { - for ( Long partitionGroupId : Objects.requireNonNull( table.partitionProperty.partitionGroupIds ) ) { - deletePartitionGroup( table.id, table.namespaceId, partitionGroupId ); - } - } - - for ( Long columnId : Objects.requireNonNull( tableChildren.get( tableId ) ) ) { - deleteColumn( columnId ); - } - - // Remove all placement containers along with all placements - table.dataPlacements.forEach( adapterId -> removeDataPlacement( adapterId, tableId ) ); - - tableChildren.remove( tableId ); - tables.remove( tableId ); - tableNames.remove( new Object[]{ table.namespaceId, table.name } ); - flagTableForDeletion( table.id, false ); - // primary key was deleted and open table has to be closed - if ( openTable != null && openTable == tableId ) { - openTable = null; - } - - } - listeners.firePropertyChange( "table", table, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void setTableOwner( long tableId, int ownerId ) { - LogicalTable old = getTable( tableId ); - LogicalTable table = old.withOwnerId( ownerId ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void setPrimaryKey( long tableId, Long keyId ) { - LogicalTable old = getTable( tableId ); - - LogicalTable table = old.withPrimaryKey( keyId ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); - - if ( keyId == null ) { - openTable = tableId; - } else { - primaryKeys.put( keyId, new CatalogPrimaryKey( Objects.requireNonNull( keys.get( keyId ) ) ) ); - openTable = null; - } - } - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - LogicalColumn column = Objects.requireNonNull( columns.get( columnId ) ); - CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); - - CatalogColumnPlacement columnPlacement = new CatalogColumnPlacement( - column.schemaId, - column.tableId, - columnId, - adapterId, - store.uniqueName, - placementType, - physicalSchemaName, - physicalColumnName, - physicalPositionBuilder.getAndIncrement() ); - - synchronized ( this ) { - columnPlacements.put( new Object[]{ adapterId, columnId }, columnPlacement ); - - // Adds this ColumnPlacement to existing DataPlacement container - addColumnsToDataPlacement( adapterId, column.tableId, List.of( columnId ) ); - } - listeners.firePropertyChange( "columnPlacement", null, columnPlacement ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateMaterializedViewRefreshTime( long materializedViewId ) { - CatalogMaterializedView old = (CatalogMaterializedView) getTable( materializedViewId ); - - MaterializedCriteria materializedCriteria = old.getMaterializedCriteria(); - materializedCriteria.setLastUpdate( new Timestamp( System.currentTimeMillis() ) ); - - CatalogMaterializedView view = old.withMaterializedCriteria( materializedCriteria ); - - synchronized ( this ) { - tables.replace( materializedViewId, view ); - tableNames.replace( - new Object[]{ view.namespaceId, view.name }, - view ); - } - listeners.firePropertyChange( "table", old, view ); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalCollection getCollection( long id ) { - if ( !collections.containsKey( id ) ) { - throw new UnknownTableIdRuntimeException( id ); - } - return Objects.requireNonNull( collections.get( id ) ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getCollections( long namespaceId, Pattern namePattern ) { - if ( schemas.containsKey( namespaceId ) ) { - LogicalNamespace schema = Objects.requireNonNull( schemas.get( namespaceId ) ); - if ( namePattern != null ) { - LogicalCollection collection = collectionNames.get( new Object[]{ namespaceId, namePattern.pattern } ); - if ( collection == null ) { - return new ArrayList<>(); - } - return Collections.singletonList( collection ); - } else { - return new ArrayList<>( collectionNames.prefixSubMap( new Object[]{ namespaceId } ).values() ); - } - } - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ) { - long collectionId = entityIdBuilder.getAndIncrement(); - if ( id != null ) { - collectionId = id; - } - - LogicalNamespace namespace = getNamespace( schemaId ); - LogicalCollection collection = new LogicalCollection( - Catalog.defaultDatabaseId, - schemaId, - collectionId, - name, - List.of(), - EntityType.ENTITY, - null ); - - synchronized ( this ) { - collections.put( collectionId, collection ); - collectionNames.put( new Object[]{ schemaId, name }, collection ); - } - listeners.firePropertyChange( "collection", null, entity ); - - return collectionId; - } - - - /** - * {@inheritDoc} - */ - @Override - public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { - long tableId; - if ( onlyPlacement ) { - try { - tableId = getTable( schemaId, name ).id; - } catch ( UnknownTableException e ) { - throw new RuntimeException( e ); - } - } else { - tableId = addTable( name, schemaId, Catalog.defaultUserId, EntityType.ENTITY, true ); - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), tableId ) ); - - long idId; - long dataId; - if ( !onlyPlacement ) { - idId = addColumn( "_id_", tableId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - dataId = addColumn( "_data_", tableId, 1, PolyType.JSON, null, null, null, null, null, false, Collation.getDefaultCollation() ); - } else { - try { - idId = getColumn( tableId, "_id_" ).id; - dataId = getColumn( tableId, "_data_" ).id; - } catch ( UnknownColumnException e ) { - throw new RuntimeException( "Error while adding a document placement." ); - } - } - - for ( DataStore s : stores ) { - addColumnPlacement( - s.getAdapterId(), - idId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - dataId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - addPrimaryKey( tableId, List.of( idId, dataId ) ); - - return tableId; - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteCollection( long id ) { - LogicalCollection collection = getCollection( id ); - - synchronized ( this ) { - collections.remove( collection.namespaceId ); - collectionNames.remove( new Object[]{ collection.databaseId, collection.namespaceId, collection.name } ); - } - listeners.firePropertyChange( "collection", null, null ); - } - - - /** - * {@inheritDoc} - */ - public List getGraphPlacements( int adapterId ) { - return graphPlacements.entrySet().stream().filter( e -> e.getKey()[1].equals( adapterId ) ).map( Entry::getValue ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ) { - LogicalTable oldTable = getTable( getColumn( columnId ).tableId ); - - synchronized ( this ) { - if ( log.isDebugEnabled() ) { - log.debug( "Is flagged for deletion {}", isTableFlaggedForDeletion( oldTable.id ) ); - } - - if ( oldTable.partitionProperty.isPartitioned ) { - if ( !isTableFlaggedForDeletion( oldTable.id ) ) { - if ( !columnOnly ) { - if ( !validateDataPlacementsConstraints( oldTable.id, adapterId, Arrays.asList( columnId ), new ArrayList<>() ) ) { - throw new RuntimeException( "Partition Distribution failed" ); - } - } - } - } - - removeColumnsFromDataPlacement( adapterId, oldTable.id, Arrays.asList( columnId ) ); - columnPlacements.remove( new Object[]{ adapterId, columnId } ); - } - listeners.firePropertyChange( "columnPlacement", oldTable, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ) { - try { - return Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsColumnPlacement( int adapterId, long columnId ) { - CatalogColumnPlacement placement = columnPlacements.get( new Object[]{ adapterId, columnId } ); - return placement != null; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsOnAdapter( int adapterId ) { - return new ArrayList<>( columnPlacements.prefixSubMap( new Object[]{ adapterId } ).values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ) { - final Comparator columnPlacementComparator = Comparator.comparingInt( p -> getColumn( p.columnId ).position ); - return getColumnPlacementsOnAdapter( adapterId ) - .stream() - .filter( p -> p.tableId == tableId ) - .sorted( columnPlacementComparator ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsByColumn( long columnId ) { - return columnPlacements.values() - .stream() - .filter( p -> p.columnId == columnId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { - LogicalTable table = getTable( tableId ); - Map> columnPlacementsByAdapter = new HashMap<>(); - - table.dataPlacements.forEach( adapterId -> columnPlacementsByAdapter.put( - adapterId, - ImmutableList.copyOf( - getDataPlacement( adapterId, tableId ).columnPlacementsOnAdapter ) - ) - ); - - return ImmutableMap.copyOf( columnPlacementsByAdapter ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPhysicalsOnAdapter( long tableId ) { - LogicalTable table = getTable( tableId ); - Map> partitionPlacementsByAdapter = new HashMap<>(); - - table.dataPlacements.forEach( adapterId -> partitionPlacementsByAdapter.put( - adapterId, - ImmutableList.copyOf( - getDataPlacement( adapterId, tableId ).getAllPartitionIds() ) - ) - ); - - return ImmutableMap.copyOf( partitionPlacementsByAdapter ); - } - - - /** - * {@inheritDoc} - */ - @Override - public long getPartitionGroupByPartition( long partitionId ) { - return getPartition( partitionId ).partitionGroupId; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacement( long columnId ) { - return columnPlacements.values() - .stream() - .filter( p -> p.columnId == columnId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ) { - try { - return getColumnPlacementsOnAdapter( adapterId ).stream().filter( p -> Objects.requireNonNull( columns.get( p.columnId ) ).schemaId == schemaId ).collect( Collectors.toList() ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getNamespace( schemaId ); - return new ArrayList<>(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ) { - try { - CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - CatalogColumnPlacement placement = new CatalogColumnPlacement( - old.namespaceId, - old.tableId, - old.columnId, - old.adapterId, - old.adapterUniqueName, - placementType, - old.physicalSchemaName, - old.physicalColumnName, - old.physicalPosition ); - synchronized ( this ) { - columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); - } - listeners.firePropertyChange( "columnPlacement", old, placement ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ) { - try { - CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - CatalogColumnPlacement placement = new CatalogColumnPlacement( - old.namespaceId, - old.tableId, - old.columnId, - old.adapterId, - old.adapterUniqueName, - old.placementType, - old.physicalSchemaName, - old.physicalColumnName, - position ); - synchronized ( this ) { - columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); - } - listeners.firePropertyChange( "columnPlacement", old, placement ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ) { - try { - CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - CatalogColumnPlacement placement = new CatalogColumnPlacement( - old.namespaceId, - old.tableId, - old.columnId, - old.adapterId, - old.adapterUniqueName, - old.placementType, - old.physicalSchemaName, - old.physicalColumnName, - physicalPositionBuilder.getAndIncrement() ); - synchronized ( this ) { - columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); - } - listeners.firePropertyChange( "columnPlacement", old, placement ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ) { - try { - CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - CatalogColumnPlacement placement = new CatalogColumnPlacement( - old.namespaceId, - old.tableId, - old.columnId, - old.adapterId, - old.adapterUniqueName, - old.placementType, - physicalSchemaName, - physicalColumnName, - updatePhysicalColumnPosition ? physicalPositionBuilder.getAndIncrement() : old.physicalPosition ); - synchronized ( this ) { - columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); - } - listeners.firePropertyChange( "columnPlacement", old, placement ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumns( long tableId ) { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - return columnNames.prefixSubMap( new Object[]{ table.namespaceId, table.id } ).values().stream().sorted( columnComparator ).collect( Collectors.toList() ); - } catch ( NullPointerException e ) { - return new ArrayList<>(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { - List catalogEntities = getTables( schemaNamePattern, tableNamePattern ); - - if ( catalogEntities.size() > 0 ) { - Stream catalogColumns = catalogEntities.stream().filter( t -> tableChildren.containsKey( t.id ) ).flatMap( t -> Objects.requireNonNull( tableChildren.get( t.id ) ).stream() ).map( columns::get ); - - if ( columnNamePattern != null ) { - catalogColumns = catalogColumns.filter( c -> c.name.matches( columnNamePattern.toRegex() ) ); - } - return catalogColumns.collect( Collectors.toList() ); - } - - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalColumn getColumn( long columnId ) { - try { - return Objects.requireNonNull( columns.get( columnId ) ); - } catch ( NullPointerException e ) { - throw new UnknownColumnIdRuntimeException( columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { - try { - LogicalTable table = getTable( tableId ); - if ( !getNamespace( table.namespaceId ).caseSensitive ) { - columnName = columnName.toLowerCase(); - } - return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownColumnException( tableId, columnName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { - try { - LogicalTable table = getTable( schemaName, tableName ); - return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownColumnException( schemaName, tableName, columnName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { - LogicalTable table = getTable( tableId ); - - if ( !getNamespace( table.namespaceId ).caseSensitive ) { - name = name.toLowerCase(); - } - - if ( type.getFamily() == PolyTypeFamily.CHARACTER && collation == null ) { - throw new RuntimeException( "Collation is not allowed to be null for char types." ); - } - if ( scale != null && length != null ) { - if ( scale > length ) { - throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); - } - } - - long id = columnIdBuilder.getAndIncrement(); - LogicalColumn column = new LogicalColumn( - id, - name, - tableId, - table.namespaceId, - position, - type, - collectionsType, - length, - scale, - dimension, - cardinality, - nullable, - collation, - null ); - - synchronized ( this ) { - columns.put( id, column ); - columnNames.put( new Object[]{ table.namespaceId, table.id, name }, column ); - List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( tableId ) ) ); - children.add( id ); - tableChildren.replace( tableId, ImmutableList.copyOf( children ) ); - - List columnIds = new ArrayList<>( table.fieldIds ); - columnIds.add( id ); - - LogicalTable updatedTable; - - updatedTable = table.withConnectedViews( ImmutableList.copyOf( columnIds ) ); - tables.replace( tableId, updatedTable ); - tableNames.replace( new Object[]{ updatedTable.namespaceId, updatedTable.name }, updatedTable ); - } - listeners.firePropertyChange( "column", null, column ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void renameColumn( long columnId, String name ) { - LogicalColumn old = getColumn( columnId ); - - if ( !getNamespace( old.schemaId ).caseSensitive ) { - name = name.toLowerCase(); - } - - LogicalColumn column = new LogicalColumn( old.id, name, old.tableId, old.schemaId, old.position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.remove( new Object[]{ column.schemaId, column.tableId, old.name } ); - columnNames.put( new Object[]{ column.schemaId, column.tableId, name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void setColumnPosition( long columnId, int position ) { - LogicalColumn old = getColumn( columnId ); - LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ column.schemaId, column.tableId, column.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality ) throws GenericCatalogException { - try { - LogicalColumn old = Objects.requireNonNull( columns.get( columnId ) ); - - if ( scale != null && scale > length ) { - throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); - } - - // Check that the column is not part of a key - for ( CatalogKey key : getKeys() ) { - if ( key.columnIds.contains( columnId ) ) { - String name = "UNKNOWN"; - if ( key instanceof CatalogPrimaryKey ) { - name = "PRIMARY KEY"; - } else if ( key instanceof CatalogForeignKey ) { - name = ((CatalogForeignKey) key).name; - } else { - List constraints = getConstraints( key ); - if ( constraints.size() > 0 ) { - name = constraints.get( 0 ).name; - } - } - throw new GenericCatalogException( "The column \"" + old.name + "\" is part of the key \"" + name + "\". Unable to change the type of a column that is part of a key." ); - } - } - - Collation collation = type.getFamily() == PolyTypeFamily.CHARACTER - ? Collation.getById( RuntimeConfig.DEFAULT_COLLATION.getInteger() ) - : null; - LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, old.position, type, collectionsType, length, scale, dimension, cardinality, old.nullable, collation, old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { - try { - LogicalColumn old = Objects.requireNonNull( columns.get( columnId ) ); - if ( nullable ) { - // Check if the column is part of a primary key (pk's are not allowed to contain null values) - LogicalTable table = Objects.requireNonNull( tables.get( old.tableId ) ); - if ( table.primaryKey != null ) { - CatalogKey catalogKey = getPrimaryKey( table.primaryKey ); - if ( catalogKey.columnIds.contains( columnId ) ) { - throw new GenericCatalogException( "Unable to allow null values in a column that is part of the primary key." ); - } - } - } else { - // TODO: Check that the column does not contain any null values - getColumnPlacement( columnId ); - } - LogicalColumn column = new LogicalColumn( - old.id, - old.name, - old.tableId, - old.schemaId, - old.position, - old.type, - old.collectionsType, - old.length, - old.scale, - old.dimension, - old.cardinality, - nullable, - old.collation, - old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void setCollation( long columnId, Collation collation ) { - LogicalColumn old = getColumn( columnId ); - - if ( old.type.getFamily() != PolyTypeFamily.CHARACTER ) { - throw new RuntimeException( "Illegal attempt to set collation for a non-char column!" ); - } - LogicalColumn column = new LogicalColumn( - old.id, - old.name, - old.tableId, - old.schemaId, - old.position, - old.type, - old.collectionsType, - old.length, - old.scale, - old.dimension, - old.cardinality, - old.nullable, - collation, - old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsColumn( long tableId, String columnName ) { - LogicalTable table = getTable( tableId ); - return columnNames.containsKey( new Object[]{ table.namespaceId, tableId, columnName } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteColumn( long columnId ) { - //TODO also delete keys with that column? - LogicalColumn column = getColumn( columnId ); - - List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( column.tableId ) ) ); - children.remove( columnId ); - - LogicalTable old = getTable( column.tableId ); - List columnIds = new ArrayList<>( old.fieldIds ); - columnIds.remove( columnId ); - - LogicalTable table = old.withFieldIds( ImmutableList.copyOf( columnIds ) ); - - synchronized ( this ) { - columnNames.remove( new Object[]{ column.schemaId, column.tableId, column.name } ); - tableChildren.replace( column.tableId, ImmutableList.copyOf( children ) ); - - deleteDefaultValue( columnId ); - for ( CatalogColumnPlacement p : getColumnPlacement( columnId ) ) { - deleteColumnPlacement( p.adapterId, p.columnId, false ); - } - tables.replace( column.tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); - - columns.remove( columnId ); - } - listeners.firePropertyChange( "column", column, null ); - } - - - /** - * {@inheritDoc} - * - * TODO: String is only a temporary solution - */ - @Override - public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { - LogicalColumn old = getColumn( columnId ); - LogicalColumn column = new LogicalColumn( - old.id, - old.name, - old.tableId, - old.schemaId, - old.position, - old.type, - old.collectionsType, - old.length, - old.scale, - old.dimension, - old.cardinality, - old.nullable, - old.collation, - new CatalogDefaultValue( columnId, type, defaultValue, "defaultValue" ) ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ column.schemaId, column.tableId, column.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteDefaultValue( long columnId ) { - LogicalColumn old = getColumn( columnId ); - LogicalColumn column = new LogicalColumn( - old.id, - old.name, - old.tableId, - old.schemaId, - old.position, - old.type, - old.collectionsType, - old.length, - old.scale, - old.dimension, - old.cardinality, - old.nullable, - old.collation, - null ); - if ( old.defaultValue != null ) { - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogPrimaryKey getPrimaryKey( long key ) { - try { - return Objects.requireNonNull( primaryKeys.get( key ) ); - } catch ( NullPointerException e ) { - throw new UnknownKeyIdRuntimeException( key ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isPrimaryKey( long key ) { - try { - Long primary = getTable( Objects.requireNonNull( keys.get( key ) ).tableId ).primaryKey; - return primary != null && primary == key; - } catch ( NullPointerException e ) { - throw new UnknownKeyIdRuntimeException( key ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { - try { - // Check if the columns are set 'not null' - List nullableColumns = columnIds.stream().map( columns::get ).filter( Objects::nonNull ).filter( c -> c.nullable ).collect( Collectors.toList() ); - for ( LogicalColumn col : nullableColumns ) { - throw new GenericCatalogException( "Primary key is not allowed to contain null values but the column '" + col.name + "' is declared nullable." ); - } - - // TODO: Check if the current values are unique - - // Check if there is already a primary key defined for this table and if so, delete it. - LogicalTable table = getTable( tableId ); - - if ( table.primaryKey != null ) { - // CatalogCombinedKey combinedKey = getCombinedKey( table.primaryKey ); - if ( getKeyUniqueCount( table.primaryKey ) == 1 && isForeignKey( tableId ) ) { - // This primary key is the only constraint for the uniqueness of this key. - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key, first drop the foreign keys or create a unique constraint." ); - } - synchronized ( this ) { - setPrimaryKey( tableId, null ); - deleteKeyIfNoLongerUsed( table.primaryKey ); - } - } - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); - setPrimaryKey( tableId, keyId ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - private int getKeyUniqueCount( long keyId ) { - CatalogKey key = keys.get( keyId ); - int count = 0; - if ( isPrimaryKey( keyId ) ) { - count++; - } - - for ( CatalogConstraint constraint : getConstraints( key ) ) { - if ( constraint.type == ConstraintType.UNIQUE ) { - count++; - } - } - - for ( CatalogIndex index : getIndexes( key ) ) { - if ( index.unique ) { - count++; - } - } - - return count; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getForeignKeys( long tableId ) { - return foreignKeys.values().stream().filter( f -> f.tableId == tableId ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getExportedKeys( long tableId ) { - return foreignKeys.values().stream().filter( k -> k.referencedKeyTableId == tableId ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getConstraints( long tableId ) { - List keysOfTable = keys.values().stream().filter( k -> k.tableId == tableId ).map( k -> k.id ).collect( Collectors.toList() ); - return constraints.values().stream().filter( c -> keysOfTable.contains( c.keyId ) ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { - try { - return constraints.values().stream() - .filter( c -> c.key.tableId == tableId && c.name.equals( constraintName ) ) - .findFirst() - .orElseThrow( NullPointerException::new ); - } catch ( NullPointerException e ) { - throw new UnknownConstraintException( tableId, constraintName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { - try { - return foreignKeys.values().stream() - .filter( f -> f.tableId == tableId && f.name.equals( foreignKeyName ) ) - .findFirst() - .orElseThrow( NullPointerException::new ); - } catch ( NullPointerException e ) { - throw new UnknownForeignKeyException( tableId, foreignKeyName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); - - for ( CatalogKey refKey : childKeys ) { - if ( refKey.columnIds.size() == referencesIds.size() && refKey.columnIds.containsAll( referencesIds ) && referencesIds.containsAll( refKey.columnIds ) ) { - - // CatalogKey combinedKey = getCombinedKey( refKey.id ); - - int i = 0; - for ( long referencedColumnId : refKey.columnIds ) { - LogicalColumn referencingColumn = getColumn( columnIds.get( i++ ) ); - LogicalColumn referencedColumn = getColumn( referencedColumnId ); - if ( referencedColumn.type != referencingColumn.type ) { - throw new GenericCatalogException( "The data type of the referenced columns does not match the data type of the referencing column: " + referencingColumn.type.name() + " != " + referencedColumn.type ); - } - } - // TODO same keys for key and foreign key - if ( getKeyUniqueCount( refKey.id ) > 0 ) { - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); - CatalogForeignKey key = new CatalogForeignKey( - keyId, - constraintName, - tableId, - table.namespaceId, - refKey.id, - refKey.tableId, - refKey.schemaId, - columnIds, - referencesIds, - onUpdate, - onDelete ); - synchronized ( this ) { - foreignKeys.put( keyId, key ); - } - listeners.firePropertyChange( "foreignKey", null, key ); - return; - } - } - } - throw new GenericCatalogException( "There is no key over the referenced columns." ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { - try { - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); - // Check if there is already a unique constraint - List catalogConstraints = constraints.values().stream() - .filter( c -> c.keyId == keyId && c.type == ConstraintType.UNIQUE ) - .collect( Collectors.toList() ); - if ( catalogConstraints.size() > 0 ) { - throw new GenericCatalogException( "There is already a unique constraint!" ); - } - long id = constraintIdBuilder.getAndIncrement(); - synchronized ( this ) { - constraints.put( id, new CatalogConstraint( id, keyId, ConstraintType.UNIQUE, constraintName, Objects.requireNonNull( keys.get( keyId ) ) ) ); - } - listeners.firePropertyChange( "constraint", null, keyId ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getIndexes( long tableId, boolean onlyUnique ) { - if ( !onlyUnique ) { - return indexes.values().stream().filter( i -> i.key.tableId == tableId ).collect( Collectors.toList() ); - } else { - return indexes.values().stream().filter( i -> i.key.tableId == tableId && i.unique ).collect( Collectors.toList() ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { - try { - return indexes.values().stream() - .filter( i -> i.key.tableId == tableId && i.name.equals( indexName ) ) - .findFirst() - .orElseThrow( NullPointerException::new ); - } catch ( NullPointerException e ) { - throw new UnknownIndexException( tableId, indexName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsIndex( long tableId, String indexName ) { - try { - LogicalTable table = getTable( tableId ); - getIndex( table.id, indexName ); - return true; - } catch ( UnknownIndexException e ) { - return false; - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogIndex getIndex( long indexId ) { - try { - return Objects.requireNonNull( indexes.get( indexId ) ); - } catch ( NullPointerException e ) { - throw new UnknownIndexIdRuntimeException( indexId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getIndexes() { - return new ArrayList<>( indexes.values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException { - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); - if ( unique ) { - // TODO: Check if the current values are unique - } - long id = indexIdBuilder.getAndIncrement(); - synchronized ( this ) { - indexes.put( id, new CatalogIndex( - id, - indexName, - unique, - method, - methodDisplayName, - type, - location, - keyId, - Objects.requireNonNull( keys.get( keyId ) ), - null ) ); - } - listeners.firePropertyChange( "index", null, keyId ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void setIndexPhysicalName( long indexId, String physicalName ) { - try { - CatalogIndex oldEntry = Objects.requireNonNull( indexes.get( indexId ) ); - CatalogIndex newEntry = new CatalogIndex( - oldEntry.id, - oldEntry.name, - oldEntry.unique, - oldEntry.method, - oldEntry.methodDisplayName, - oldEntry.type, - oldEntry.location, - oldEntry.keyId, - oldEntry.key, - physicalName ); - synchronized ( this ) { - indexes.replace( indexId, newEntry ); - } - listeners.firePropertyChange( "index", oldEntry, newEntry ); - } catch ( NullPointerException e ) { - throw new UnknownIndexIdRuntimeException( indexId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteIndex( long indexId ) { - CatalogIndex index = getIndex( indexId ); - if ( index.unique ) { - if ( getKeyUniqueCount( index.keyId ) == 1 && isForeignKey( index.keyId ) ) { - // This unique index is the only constraint for the uniqueness of this key. - //throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To delete this index, first add a unique constraint." ); - } - } - synchronized ( this ) { - indexes.remove( indexId ); - } - listeners.firePropertyChange( "index", index.key, null ); - deleteKeyIfNoLongerUsed( index.keyId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deletePrimaryKey( long tableId ) throws GenericCatalogException { - LogicalTable table = getTable( tableId ); - - // TODO: Check if the currently stored values are unique - if ( table.primaryKey != null ) { - // Check if this primary key is required to maintain to uniqueness - // CatalogCombinedKey key = getCombinedKey( table.primaryKey ); - if ( isForeignKey( table.primaryKey ) ) { - if ( getKeyUniqueCount( table.primaryKey ) < 2 ) { - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key either drop the foreign key or create a unique constraint." ); - } - } - - setPrimaryKey( tableId, null ); - deleteKeyIfNoLongerUsed( table.primaryKey ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { - try { - CatalogForeignKey catalogForeignKey = Objects.requireNonNull( foreignKeys.get( foreignKeyId ) ); - synchronized ( this ) { - foreignKeys.remove( catalogForeignKey.id ); - deleteKeyIfNoLongerUsed( catalogForeignKey.id ); - } - listeners.firePropertyChange( "foreignKey", foreignKeyId, null ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteConstraint( long constraintId ) throws GenericCatalogException { - try { - CatalogConstraint catalogConstraint = Objects.requireNonNull( constraints.get( constraintId ) ); - - //CatalogCombinedKey key = getCombinedKey( catalogConstraint.keyId ); - if ( catalogConstraint.type == ConstraintType.UNIQUE && isForeignKey( catalogConstraint.keyId ) ) { - if ( getKeyUniqueCount( catalogConstraint.keyId ) < 2 ) { - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. Unable to drop unique constraint." ); - } - } - synchronized ( this ) { - constraints.remove( catalogConstraint.id ); - } - listeners.firePropertyChange( "constraint", catalogConstraint, null ); - deleteKeyIfNoLongerUsed( catalogConstraint.keyId ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogUser getUser( String name ) throws UnknownUserException { - try { - return Objects.requireNonNull( userNames.get( name ) ); - } catch ( NullPointerException e ) { - throw new UnknownUserException( name ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogUser getUser( long id ) { - try { - return Objects.requireNonNull( users.get( id ) ); - } catch ( NullPointerException e ) { - throw new UnknownUserIdRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAdapters() { - return new ArrayList<>( adapters.values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { - uniqueName = uniqueName.toLowerCase(); - try { - return Objects.requireNonNull( adapterNames.get( uniqueName ) ); - } catch ( NullPointerException e ) { - throw new UnknownAdapterException( uniqueName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogAdapter getAdapter( long id ) { - try { - return Objects.requireNonNull( adapters.get( id ) ); - } catch ( NullPointerException e ) { - throw new UnknownAdapterIdRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsAdapter( long id ) { - return adapters.containsKey( id ); - } - - - /** - * {@inheritDoc} - */ - @Override - public long addAdapter( String uniqueName, String adapterName, AdapterType type, Map settings ) { - uniqueName = uniqueName.toLowerCase(); - - int id = adapterIdBuilder.getAndIncrement(); - Map temp = new HashMap<>( settings ); - CatalogAdapter adapter = new CatalogAdapter( id, uniqueName, adapterName, type, temp ); - synchronized ( this ) { - adapters.put( id, adapter ); - adapterNames.put( uniqueName, adapter ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "An error occurred while creating the adapter." ); - } - listeners.firePropertyChange( "adapter", null, adapter ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateAdapterSettings( long adapterId, Map newSettings ) { - CatalogAdapter old = getAdapter( adapterId ); - Map temp = new HashMap<>(); - newSettings.forEach( temp::put ); - CatalogAdapter adapter = new CatalogAdapter( old.id, old.uniqueName, old.adapterName, old.type, temp ); - synchronized ( this ) { - adapters.put( adapter.id, adapter ); - adapterNames.put( adapter.uniqueName, adapter ); - } - listeners.firePropertyChange( "adapter", old, adapter ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteAdapter( long id ) { - try { - CatalogAdapter adapter = Objects.requireNonNull( adapters.get( id ) ); - synchronized ( this ) { - adapters.remove( id ); - adapterNames.remove( adapter.uniqueName ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "An error occurred while deleting the adapter." ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "Could not delete adapter" ); - } - listeners.firePropertyChange( "adapter", adapter, null ); - } catch ( NullPointerException e ) { - throw new UnknownAdapterIdRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getQueryInterfaces() { - return new ArrayList<>( queryInterfaces.values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { - uniqueName = uniqueName.toLowerCase(); - try { - return Objects.requireNonNull( queryInterfaceNames.get( uniqueName ) ); - } catch ( NullPointerException e ) { - throw new UnknownQueryInterfaceException( uniqueName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogQueryInterface getQueryInterface( long id ) { - try { - return Objects.requireNonNull( queryInterfaces.get( id ) ); - } catch ( NullPointerException e ) { - throw new UnknownQueryInterfaceRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addQueryInterface( String uniqueName, String clazz, Map settings ) { - uniqueName = uniqueName.toLowerCase(); - - int id = queryInterfaceIdBuilder.getAndIncrement(); - Map temp = new HashMap<>( settings ); - CatalogQueryInterface queryInterface = new CatalogQueryInterface( id, uniqueName, clazz, temp ); - synchronized ( this ) { - queryInterfaces.put( id, queryInterface ); - queryInterfaceNames.put( uniqueName, queryInterface ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "An error occurred while creating the query interface." ); - } - listeners.firePropertyChange( "queryInterface", null, queryInterface ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteQueryInterface( long id ) { - try { - CatalogQueryInterface queryInterface = Objects.requireNonNull( queryInterfaces.get( id ) ); - synchronized ( this ) { - queryInterfaces.remove( id ); - queryInterfaceNames.remove( queryInterface.name ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "An error occurred while deleting the query interface." ); - } - listeners.firePropertyChange( "queryInterface", queryInterface, null ); - } catch ( NullPointerException e ) { - throw new UnknownQueryInterfaceRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { - try { - long id = partitionGroupIdBuilder.getAndIncrement(); - if ( log.isDebugEnabled() ) { - log.debug( "Creating partitionGroup of type '{}' with id '{}'", partitionType, id ); - } - LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); - - List partitionIds = new ArrayList<>(); - for ( int i = 0; i < numberOfInternalPartitions; i++ ) { - long partId = addPartition( tableId, schemaId, id, effectivePartitionGroupQualifier, isUnbound ); - partitionIds.add( partId ); - } - - CatalogPartitionGroup partitionGroup = new CatalogPartitionGroup( - id, - partitionGroupName, - tableId, - schemaId, - 0, - null, - ImmutableList.copyOf( partitionIds ), - isUnbound ); - - synchronized ( this ) { - partitionGroups.put( id, partitionGroup ); - } - //listeners.firePropertyChange( "partitionGroups", null, partitionGroup ); - return id; - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ) throws UnknownPartitionGroupIdRuntimeException { - if ( log.isDebugEnabled() ) { - log.debug( "Deleting partitionGroup with id '{}' on table with id '{}'", partitionGroupId, tableId ); - } - // Check whether there this partition id exists - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - synchronized ( this ) { - for ( long partitionId : partitionGroup.partitionIds ) { - deletePartition( tableId, schemaId, partitionId ); - } - partitionGroups.remove( partitionGroupId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updatePartitionGroup( long partitionGroupId, List partitionIds ) throws UnknownPartitionGroupIdRuntimeException { - - // Check whether there this partition id exists - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - - CatalogPartitionGroup updatedCatalogPartitionGroup = new CatalogPartitionGroup( - partitionGroup.id, - partitionGroup.partitionGroupName, - partitionGroup.tableId, - partitionGroup.schemaId, - partitionGroup.partitionKey, - partitionGroup.partitionQualifiers, - ImmutableList.copyOf( partitionIds ), - partitionGroup.isUnbound ); - - synchronized ( this ) { - partitionGroups.replace( partitionGroupId, updatedCatalogPartitionGroup ); - listeners.firePropertyChange( "partitionGroup", partitionGroup, updatedCatalogPartitionGroup ); - } - - } - - - /** - * {@inheritDoc} - */ - @Override - public void addPartitionToGroup( long partitionGroupId, Long partitionId ) { - // Check whether there this partition id exists - getPartition( partitionId ); - - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); - - if ( !newPartitionIds.contains( partitionId ) ) { - newPartitionIds.add( partitionId ); - updatePartitionGroup( partitionGroupId, newPartitionIds ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void removePartitionFromGroup( long partitionGroupId, Long partitionId ) { - // Check whether there this partition id exists - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); - - if ( newPartitionIds.contains( partitionId ) ) { - newPartitionIds.remove( partitionId ); - updatePartitionGroup( partitionGroupId, newPartitionIds ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updatePartition( long partitionId, Long partitionGroupId ) { - // Check whether there this partition id exists - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); - - CatalogPartition oldPartition = getPartition( partitionId ); - - if ( !newPartitionIds.contains( partitionId ) ) { - newPartitionIds.add( partitionId ); - - addPartitionToGroup( partitionGroupId, partitionId ); - removePartitionFromGroup( oldPartition.partitionGroupId, partitionId ); - - CatalogPartition updatedPartition = new CatalogPartition( - oldPartition.id, - oldPartition.tableId, - oldPartition.schemaId, - oldPartition.partitionQualifiers, - oldPartition.isUnbound, - partitionGroupId - ); - - synchronized ( this ) { - partitions.put( updatedPartition.id, updatedPartition ); - } - listeners.firePropertyChange( "partition", oldPartition, updatedPartition ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) throws UnknownPartitionGroupIdRuntimeException { - try { - return Objects.requireNonNull( partitionGroups.get( partitionGroupId ) ); - } catch ( NullPointerException e ) { - throw new UnknownPartitionGroupIdRuntimeException( partitionGroupId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionQualifier, boolean isUnbound ) throws GenericCatalogException { - try { - long id = partitionIdBuilder.getAndIncrement(); - if ( log.isDebugEnabled() ) { - log.debug( "Creating partition with id '{}'", id ); - } - LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); - - CatalogPartition partition = new CatalogPartition( - id, - tableId, - schemaId, - effectivePartitionQualifier, - isUnbound, - partitionGroupId ); - - synchronized ( this ) { - partitions.put( id, partition ); - } - listeners.firePropertyChange( "partition", null, partition ); - return id; - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deletePartition( long tableId, long schemaId, long partitionId ) { - if ( log.isDebugEnabled() ) { - log.debug( "Deleting partition with id '{}' on table with id '{}'", partitionId, tableId ); - } - // Check whether there this partition id exists - getPartition( partitionId ); - synchronized ( this ) { - for ( CatalogPartitionPlacement partitionPlacement : getPartitionPlacements( partitionId ) ) { - deletePartitionPlacement( partitionPlacement.adapterId, partitionId ); - } - partitions.remove( partitionId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogPartition getPartition( long partitionId ) { - try { - return Objects.requireNonNull( partitions.get( partitionId ) ); - } catch ( NullPointerException e ) { - throw new UnknownPartitionGroupIdRuntimeException( partitionId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionsByTable( long tableId ) { - return partitions.values() - .stream() - .filter( p -> p.tableId == tableId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { - LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - - LogicalTable table = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.ownerId, - old.entityType, - old.primaryKey, - old.dataPlacements, - old.modifiable, - partitionProperty, - old.connectedViews ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - - if ( table.partitionProperty.reliesOnPeriodicChecks ) { - addTableToPeriodicProcessing( tableId ); - } - } - - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void mergeTable( long tableId ) { - LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - - if ( old.partitionProperty.reliesOnPeriodicChecks ) { - removeTableFromPeriodicProcessing( tableId ); - } - - //Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition - List partitionGroupIds = new ArrayList<>(); - try { - partitionGroupIds.add( addPartitionGroup( tableId, "full", old.namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( e ); - } - - // Get All(only one) PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds - CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); - PartitionProperty partitionProperty = PartitionProperty.builder() - .partitionType( PartitionType.NONE ) - .isPartitioned( false ) - .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) - .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) - .reliesOnPeriodicChecks( false ) - .build(); - - LogicalTable table = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.ownerId, - old.entityType, - old.primaryKey, - old.dataPlacements, - old.modifiable, - partitionProperty, - old.connectedViews ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { - LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - - LogicalTable table = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.ownerId, - old.entityType, - old.primaryKey, - old.dataPlacements, - old.modifiable, - partitionProperty, - old.connectedViews ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - } - - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroups( long tableId ) { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - List partitionGroups = new ArrayList<>(); - if ( table.partitionProperty.partitionGroupIds == null ) { - return new ArrayList<>(); - } - for ( long partId : table.partitionProperty.partitionGroupIds ) { - partitionGroups.add( getPartitionGroup( partId ) ); - } - return partitionGroups; - } catch ( UnknownPartitionGroupIdRuntimeException e ) { - return new ArrayList<>(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { - List catalogEntities = getTables( schemaNamePattern, tableNamePattern ); - Stream partitionGroupStream = Stream.of(); - for ( LogicalTable catalogTable : catalogEntities ) { - partitionGroupStream = Stream.concat( partitionGroupStream, getPartitionGroups( catalogTable.id ).stream() ); - } - return partitionGroupStream.collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitions( long partitionGroupId ) { - try { - CatalogPartitionGroup partitionGroup = Objects.requireNonNull( partitionGroups.get( partitionGroupId ) ); - List partitions = new ArrayList<>(); - if ( partitionGroup.partitionIds == null ) { - return new ArrayList<>(); - } - for ( long partId : partitionGroup.partitionIds ) { - partitions.add( getPartition( partId ) ); - } - return partitions; - } catch ( UnknownPartitionGroupIdRuntimeException e ) { - return new ArrayList<>(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { - List catalogPartitionGroups = getPartitionGroups( schemaNamePattern, tableNamePattern ); - Stream partitionStream = Stream.of(); - for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { - partitionStream = Stream.concat( partitionStream, getPartitions( catalogPartitionGroup.id ).stream() ); - } - return partitionStream.collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroupNames( long tableId ) { - List partitionGroupNames = new ArrayList<>(); - for ( CatalogPartitionGroup catalogPartitionGroup : getPartitionGroups( tableId ) ) { - partitionGroupNames.add( catalogPartitionGroup.partitionGroupName ); - } - return partitionGroupNames; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { - List catalogColumnPlacements = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : getColumnPlacement( columnId ) ) { - if ( getPartitionGroupsOnDataPlacement( ccp.adapterId, tableId ).contains( partitionGroupId ) ) { - catalogColumnPlacements.add( ccp ); - } - } - - return catalogColumnPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { - Set catalogAdapters = new HashSet<>(); - - for ( CatalogDataPlacement dataPlacement : getDataPlacements( tableId ) ) { - for ( long partitionId : dataPlacement.getAllPartitionIds() ) { - long partitionGroup = getPartitionGroupByPartition( partitionId ); - if ( partitionGroup == partitionGroupId ) { - catalogAdapters.add( getAdapter( dataPlacement.adapterId ) ); - } - } - } - - return new ArrayList<>( catalogAdapters ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ) { - Set partitionGroups = new HashSet<>(); - CatalogDataPlacement dataPlacement = getDataPlacement( adapterId, tableId ); - - dataPlacement.getAllPartitionIds().forEach( - partitionId -> partitionGroups.add( getPartitionGroupByPartition( partitionId ) - ) - ); - - return new ArrayList<>( partitionGroups ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionsOnDataPlacement( int adapterId, long tableId ) { - return getDataPlacement( adapterId, tableId ).getAllPartitionIds(); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ) { - List partitionGroups = getPartitionGroupsOnDataPlacement( adapterId, tableId ); - if ( partitionGroups == null ) { - return new ArrayList<>(); - } - - List partitionGroupIndexList = new ArrayList<>(); - LogicalTable catalogTable = getTable( tableId ); - for ( int index = 0; index < catalogTable.partitionProperty.partitionGroupIds.size(); index++ ) { - if ( partitionGroups.contains( catalogTable.partitionProperty.partitionGroupIds.get( index ) ) ) { - partitionGroupIndexList.add( (long) index ); - } - } - return partitionGroupIndexList; - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogDataPlacement getDataPlacement( int adapterId, long tableId ) { - return dataPlacements.get( new Object[]{ adapterId, tableId } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getDataPlacements( long tableId ) { - List catalogDataPlacements = new ArrayList<>(); - - getTable( tableId ).dataPlacements.forEach( adapterId -> catalogDataPlacements.add( getDataPlacement( adapterId, tableId ) ) ); - - return catalogDataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAllFullDataPlacements( long tableId ) { - List dataPlacements = new ArrayList<>(); - List allDataPlacements = getDataPlacements( tableId ); - - for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { - if ( dataPlacement.hasFullPlacement() ) { - dataPlacements.add( dataPlacement ); - } - } - return dataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAllColumnFullDataPlacements( long tableId ) { - List dataPlacements = new ArrayList<>(); - List allDataPlacements = getDataPlacements( tableId ); - - for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { - if ( dataPlacement.hasColumnFullPlacement() ) { - dataPlacements.add( dataPlacement ); - } - } - return dataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAllPartitionFullDataPlacements( long tableId ) { - List dataPlacements = new ArrayList<>(); - List allDataPlacements = getDataPlacements( tableId ); - - for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { - if ( dataPlacement.hasPartitionFullPlacement() ) { - dataPlacements.add( dataPlacement ); - } - } - return dataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { - List catalogDataPlacements = new ArrayList<>(); - for ( CatalogDataPlacement dataPlacement : getDataPlacements( tableId ) ) { - if ( dataPlacement.dataPlacementRole.equals( role ) ) { - catalogDataPlacements.add( dataPlacement ); - } - } - return catalogDataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { - List partitionPlacements = new ArrayList<>(); - for ( CatalogDataPlacement dataPlacement : getDataPlacementsByRole( tableId, role ) ) { - if ( dataPlacement.partitionPlacementsOnAdapterByRole.containsKey( role ) ) { - dataPlacement.partitionPlacementsOnAdapterByRole.get( role ) - .forEach( - partitionId -> partitionPlacements.add( getPartitionPlacement( dataPlacement.adapterId, partitionId ) ) - ); - } - } - return partitionPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { - List partitionPlacements = new ArrayList<>(); - for ( CatalogPartitionPlacement partitionPlacement : getPartitionPlacements( partitionId ) ) { - if ( partitionPlacement.role.equals( role ) ) { - partitionPlacements.add( partitionPlacement ); - } - } - return partitionPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { - if ( (columnIdsToBeRemoved.isEmpty() && partitionsIdsToBeRemoved.isEmpty()) || isTableFlaggedForDeletion( tableId ) ) { - log.warn( "Invoked validation with two empty lists of columns and partitions to be revoked. Is therefore always true..." ); - return true; - } - - // TODO @HENNLO Focus on PartitionPlacements that are labeled as UPTODATE nodes. The outdated nodes do not - // necessarily need placement constraints - - LogicalTable table = getTable( tableId ); - List dataPlacements = getDataPlacements( tableId ); - - // Checks for every column on every DataPlacement if each column is placed with all partitions - for ( long columnId : table.fieldIds ) { - List partitionsToBeCheckedForColumn = table.partitionProperty.partitionIds.stream().collect( Collectors.toList() ); - // Check for every column if it has every partition - for ( CatalogDataPlacement dataPlacement : dataPlacements ) { - // Can instantly return because we still have a full placement somewhere - if ( dataPlacement.hasFullPlacement() && dataPlacement.adapterId != adapterId ) { - return true; - } - - List effectiveColumnsOnStore = dataPlacement.columnPlacementsOnAdapter.stream().collect( Collectors.toList() ); - List effectivePartitionsOnStore = dataPlacement.getAllPartitionIds(); - - // Remove columns and partitions from store to not evaluate them - if ( dataPlacement.adapterId == adapterId ) { - - // Skips columns that shall be removed - if ( columnIdsToBeRemoved.contains( columnId ) ) { - continue; - } - - // Only process those parts that shall be present after change - effectiveColumnsOnStore.removeAll( columnIdsToBeRemoved ); - effectivePartitionsOnStore.removeAll( partitionsIdsToBeRemoved ); - } - - if ( effectiveColumnsOnStore.contains( columnId ) ) { - partitionsToBeCheckedForColumn.removeAll( effectivePartitionsOnStore ); - } else { - continue; - } - - // Found all partitions for column, continue with next column - if ( partitionsToBeCheckedForColumn.isEmpty() ) { - break; - } - } - - if ( !partitionsToBeCheckedForColumn.isEmpty() ) { - return false; - } - } - - return true; - } - - - /** - * {@inheritDoc} - */ - @Override - public void flagTableForDeletion( long tableId, boolean flag ) { - if ( flag && !tablesFlaggedForDeletion.contains( tableId ) ) { - tablesFlaggedForDeletion.add( tableId ); - } else if ( !flag && tablesFlaggedForDeletion.contains( tableId ) ) { - tablesFlaggedForDeletion.remove( tableId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isTableFlaggedForDeletion( long tableId ) { - return tablesFlaggedForDeletion.contains( tableId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { - if ( !checkIfExistsPartitionPlacement( adapterId, partitionId ) ) { - CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); - CatalogPartitionPlacement partitionPlacement = new CatalogPartitionPlacement( - namespaceId, - tableId, - adapterId, - store.uniqueName, - placementType, - physicalSchemaName, - physicalTableName, - partitionId, - role ); - - synchronized ( this ) { - partitionPlacements.put( new Object[]{ adapterId, partitionId }, partitionPlacement ); - - // Adds this PartitionPlacement to existing DataPlacement container - addPartitionsToDataPlacement( adapterId, tableId, List.of( partitionId ) ); - - listeners.firePropertyChange( "partitionPlacement", null, partitionPlacements ); - } - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ) { - CatalogDataPlacement dataPlacement; - if ( (dataPlacement = getDataPlacement( adapterId, tableId )) == null ) { - if ( log.isDebugEnabled() ) { - log.debug( "No DataPlacement exists on adapter '{}' for entity '{}'. Creating a new one.", getAdapter( adapterId ), getTable( tableId ) ); - } - addDataPlacement( adapterId, tableId ); - dataPlacement = getDataPlacement( adapterId, tableId ); - } - - return dataPlacement; - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { - LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - - LogicalTable newTable = old.withDataPlacements( ImmutableList.copyOf( newDataPlacements ) ); - - synchronized ( this ) { - tables.replace( tableId, newTable ); - tableNames.replace( new Object[]{ newTable.namespaceId, newTable.name }, newTable ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void addDataPlacement( int adapterId, long tableId ) { - if ( log.isDebugEnabled() ) { - log.debug( "Creating DataPlacement on adapter '{}' for entity '{}'", getAdapter( adapterId ), getTable( tableId ) ); - } - - if ( !dataPlacements.containsKey( new Object[]{ adapterId, tableId } ) ) { - CatalogDataPlacement dataPlacement = new CatalogDataPlacement( - tableId, - adapterId, - PlacementType.AUTOMATIC, - DataPlacementRole.UPTODATE, - ImmutableList.of(), - ImmutableList.of() ); - - synchronized ( this ) { - dataPlacements.put( new Object[]{ adapterId, tableId }, dataPlacement ); - addSingleDataPlacementToTable( adapterId, tableId ); - } - listeners.firePropertyChange( "dataPlacement", null, dataPlacement ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ) { - - try { - CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - synchronized ( this ) { - dataPlacements.replace( new Object[]{ adapterId, tableId }, catalogDataPlacement ); - } - listeners.firePropertyChange( "dataPlacement", oldDataPlacement, catalogDataPlacement ); - } catch ( NullPointerException e ) { - e.printStackTrace(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addGraphPlacement( int adapterId, long graphId ) { - long id = partitionIdBuilder.getAndIncrement(); - CatalogGraphPlacement placement = new CatalogGraphPlacement( adapterId, graphId, null, id ); - LogicalGraph old = graphs.get( graphId ); - if ( old == null ) { - throw new UnknownGraphException( graphId ); - } - - LogicalGraph graph = old.addPlacement( adapterId ); - - synchronized ( this ) { - graphPlacements.put( new Object[]{ graph.id, adapterId }, placement ); - graphs.replace( graph.id, graph ); - graphNames.replace( new Object[]{ graph.name }, graph ); - } - listeners.firePropertyChange( "graphPlacement", null, placement ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteGraphPlacement( int adapterId, long graphId ) { - if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { - throw new UnknownGraphPlacementsException( graphId, adapterId ); - } - CatalogGraphPlacement placement = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); - - deleteGraphPlacementLogistics( placement.graphId, adapterId ); - - LogicalGraph old = Objects.requireNonNull( graphs.get( placement.graphId ) ); - - LogicalGraph graph = old.removePlacement( adapterId ); - - synchronized ( this ) { - graphPlacements.remove( new Object[]{ graphId, adapterId } ); - graphs.replace( graphId, graph ); - graphNames.replace( new Object[]{ Catalog.defaultDatabaseId, graph.name }, graph ); - } - listeners.firePropertyChange( "graphPlacements", null, null ); - } - - - private void deleteGraphPlacementLogistics( long graphId, int adapterId ) { - /* - CatalogGraphMapping mapping = Objects.requireNonNull( graphMappings.get( graphId ) ); - if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { - throw new UnknownGraphPlacementsException( graphId, adapterId ); - } - CatalogGraphPlacement placement = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); - - removeSingleDataPlacementFromTable( placement.adapterId, mapping.nodesId ); - removeSingleDataPlacementFromTable( placement.adapterId, mapping.nodesPropertyId ); - removeSingleDataPlacementFromTable( placement.adapterId, mapping.edgesId ); - */ - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ) { - if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { - throw new UnknownGraphPlacementsException( graphId, adapterId ); - } - - return graphPlacements.get( new Object[]{ graphId, adapterId } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void removeDataPlacement( int adapterId, long tableId ) { - CatalogDataPlacement dataPlacement = getDataPlacement( adapterId, tableId ); - - if ( log.isDebugEnabled() ) { - log.debug( "Removing DataPlacement on adapter '{}' for entity '{}'", getAdapter( adapterId ), getTable( tableId ) ); - } - - // Make sure that all columnPlacements and partitionPlacements are correctly dropped. - // Although, they should've been dropped earlier. - - // Recursively removing columns that exist on this placement - for ( Long columnId : dataPlacement.columnPlacementsOnAdapter ) { - try { - deleteColumnPlacement( adapterId, columnId, false ); - } catch ( UnknownColumnIdRuntimeException e ) { - log.debug( "Column has been removed before the placement" ); - } - } - - // Recursively removing partitions that exist on this placement - for ( Long partitionId : dataPlacement.getAllPartitionIds() ) { - try { - deletePartitionPlacement( adapterId, partitionId ); - } catch ( UnknownColumnIdRuntimeException e ) { - log.debug( "Partition has been removed before the placement" ); - } - } - - synchronized ( this ) { - dataPlacements.remove( new Object[]{ adapterId, tableId } ); - removeSingleDataPlacementFromTable( adapterId, tableId ); - } - listeners.firePropertyChange( "dataPlacement", dataPlacement, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) { - LogicalTable old = getTable( tableId ); - List updatedPlacements = new ArrayList<>( old.dataPlacements ); - - if ( !updatedPlacements.contains( adapterId ) ) { - updatedPlacements.add( adapterId ); - updateDataPlacementsOnTable( tableId, updatedPlacements ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ) { - LogicalTable old = getTable( tableId ); - List updatedPlacements = new ArrayList<>( old.dataPlacements ); - - if ( updatedPlacements.contains( adapterId ) ) { - updatedPlacements.remove( adapterId ); - updateDataPlacementsOnTable( tableId, updatedPlacements ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ) { - CatalogDataPlacement oldDataPlacement = addDataPlacementIfNotExists( adapterId, tableId ); - - Set columnPlacementsOnAdapter = new HashSet<>( oldDataPlacement.columnPlacementsOnAdapter ); - - // Merges new columnIds to list of already existing placements - columnPlacementsOnAdapter.addAll( columnIds ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - ImmutableList.copyOf( new ArrayList<>( columnPlacementsOnAdapter ) ), - ImmutableList.copyOf( oldDataPlacement.getAllPartitionIds() ) - ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Added columns: {} of table {}, to placement on adapter {}.", columnIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ) { - CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - - Set columnPlacementsOnAdapter = new HashSet<>( oldDataPlacement.columnPlacementsOnAdapter ); - columnPlacementsOnAdapter.removeAll( columnIds ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - ImmutableList.copyOf( new ArrayList<>( columnPlacementsOnAdapter ) ), - ImmutableList.copyOf( oldDataPlacement.getAllPartitionIds() ) - ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Removed columns: {} from table {}, to placement on adapter {}.", columnIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ) { - CatalogDataPlacement oldDataPlacement = addDataPlacementIfNotExists( adapterId, tableId ); - - Set partitionPlacementsOnAdapter = new HashSet<>( oldDataPlacement.getAllPartitionIds() ); - partitionPlacementsOnAdapter.addAll( partitionIds ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - oldDataPlacement.columnPlacementsOnAdapter, - ImmutableList.copyOf( new ArrayList<>( partitionPlacementsOnAdapter ) ) ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Added partitions: {} of table {}, to placement on adapter {}.", partitionIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ) { - CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - - Set partitionPlacementsOnAdapter = new HashSet<>( oldDataPlacement.getAllPartitionIds() ); - partitionIds.forEach( partitionPlacementsOnAdapter::remove ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - oldDataPlacement.columnPlacementsOnAdapter, - ImmutableList.copyOf( new ArrayList<>( partitionPlacementsOnAdapter ) ) ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Removed partitions: {} from table {}, to placement on adapter {}.", partitionIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ) { - CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - ImmutableList.copyOf( columnIds ), - ImmutableList.copyOf( partitionIds ) ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Added columns {} & partitions: {} of table {}, to placement on adapter {}.", columnIds, partitionIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deletePartitionPlacement( int adapterId, long partitionId ) { - if ( checkIfExistsPartitionPlacement( adapterId, partitionId ) ) { - synchronized ( this ) { - partitionPlacements.remove( new Object[]{ adapterId, partitionId } ); - removePartitionsFromDataPlacement( adapterId, getTableFromPartition( partitionId ).id, Arrays.asList( partitionId ) ); - } - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ) { - try { - return Objects.requireNonNull( partitionPlacements.get( new Object[]{ adapterId, partitionId } ) ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getPartition( partitionId ); - throw new UnknownPartitionPlacementException( adapterId, partitionId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacementsByAdapter( int adapterId ) { - return new ArrayList<>( partitionPlacements.prefixSubMap( new Object[]{ adapterId } ).values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ) { - return getPartitionPlacementsByAdapter( adapterId ) - .stream() - .filter( p -> p.tableId == tableId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAllPartitionPlacementsByTable( long tableId ) { - return partitionPlacements.values() - .stream() - .filter( p -> p.tableId == tableId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacements( long partitionId ) { - return partitionPlacements.values() - .stream() - .filter( p -> p.partitionId == partitionId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTablesForPeriodicProcessing() { - List procTables = new ArrayList<>(); - for ( Iterator iterator = frequencyDependentTables.iterator(); iterator.hasNext(); ) { - long tableId = -1; - try { - tableId = iterator.next(); - procTables.add( getTable( tableId ) ); - } catch ( UnknownTableIdRuntimeException e ) { - iterator.remove(); - } - } - - return procTables; - } - - - /** - * {@inheritDoc} - */ - @Override - public void addTableToPeriodicProcessing( long tableId ) { - int beforeSize = frequencyDependentTables.size(); - getTable( tableId ); - if ( !frequencyDependentTables.contains( tableId ) ) { - frequencyDependentTables.add( tableId ); - } - // Initially starts the periodic job if this was the first table to enable periodic processing - if ( beforeSize == 0 && frequencyDependentTables.size() == 1 ) { - // Start Job for periodic processing - FrequencyMap.INSTANCE.initialize(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void removeTableFromPeriodicProcessing( long tableId ) { - getTable( tableId ); - if ( !frequencyDependentTables.contains( tableId ) ) { - frequencyDependentTables.remove( tableId ); - } - - // Terminates the periodic job if this was the last table with periodic processing - if ( frequencyDependentTables.size() == 0 ) { - // Terminate Job for periodic processing - FrequencyMap.INSTANCE.terminate(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ) { - CatalogPartitionPlacement placement = partitionPlacements.get( new Object[]{ adapterId, partitionId } ); - return placement != null; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTableKeys( long tableId ) { - return keys.values().stream().filter( k -> k.tableId == tableId ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getIndexes( CatalogKey key ) { - return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getForeignKeys( CatalogKey key ) { - return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getConstraints( CatalogKey key ) { - return constraints.values().stream().filter( c -> c.keyId == key.id ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isIndex( long keyId ) { - return indexes.values().stream().anyMatch( i -> i.keyId == keyId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isConstraint( long keyId ) { - return constraints.values().stream().anyMatch( c -> c.keyId == keyId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isForeignKey( long keyId ) { - return foreignKeys.values().stream().anyMatch( f -> f.referencedKeyId == keyId ); - } - - - /** - * Check if the specified key is used as primary key, index or constraint. If so, this is a NoOp. If it is not used, the key is deleted. - */ - private void deleteKeyIfNoLongerUsed( Long keyId ) { - if ( keyId == null ) { - return; - } - CatalogKey key = getKey( keyId ); - LogicalTable table = getTable( key.tableId ); - if ( table.primaryKey != null && table.primaryKey.equals( keyId ) ) { - return; - } - if ( constraints.values().stream().anyMatch( c -> c.keyId == keyId ) ) { - return; - } - if ( foreignKeys.values().stream().anyMatch( f -> f.id == keyId ) ) { - return; - } - if ( indexes.values().stream().anyMatch( i -> i.keyId == keyId ) ) { - return; - } - synchronized ( this ) { - keys.remove( keyId ); - keyColumns.remove( key.columnIds.stream().mapToLong( Long::longValue ).toArray() ); - } - listeners.firePropertyChange( "key", key, null ); - } - - - /** - * Returns the id of they defined by the specified column ids. If this key does not yet exist, create it. - * - * @param tableId on which the key is defined - * @param columnIds all involved columns - * @param enforcementTime at which point during execution the key should be enforced - * @return the id of the key - * @throws GenericCatalogException if the key does not exist - */ - private long getOrAddKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { - Long keyId = keyColumns.get( columnIds.stream().mapToLong( Long::longValue ).toArray() ); - if ( keyId != null ) { - return keyId; - } - return addKey( tableId, columnIds, enforcementTime ); - } - - - private long addKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - long id = keyIdBuilder.getAndIncrement(); - CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); - synchronized ( this ) { - keys.put( id, key ); - keyColumns.put( columnIds.stream().mapToLong( Long::longValue ).toArray(), id ); - } - listeners.firePropertyChange( "key", null, key ); - return id; - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getKeys() { - return new ArrayList<>( keys.values() ); - } - - - /** - * Get a key by its id - * - * @return The key - */ - private CatalogKey getKey( long keyId ) { - try { - return Objects.requireNonNull( keys.get( keyId ) ); - } catch ( NullPointerException e ) { - throw new UnknownKeyIdRuntimeException( keyId ); - } - } - - - static class CatalogValidator { - - public void validate() throws GenericCatalogException { - - } - - - public void startCheck() { - columns.forEach( ( key, column ) -> { - assert (schemas.containsKey( column.schemaId )); - assert (Objects.requireNonNull( schemaChildren.get( column.schemaId ) ).contains( column.tableId )); - - assert (tables.containsKey( column.tableId )); - assert (Objects.requireNonNull( tableChildren.get( column.tableId ) ).contains( column.id )); - - assert (columnNames.containsKey( new Object[]{ column.schemaId, column.tableId, column.name } )); - } ); - - columnPlacements.forEach( ( key, placement ) -> { - assert (columns.containsKey( placement.columnId )); - assert (adapters.containsKey( placement.adapterId )); - } ); - } - - } - -} diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java deleted file mode 100644 index 6957786946..0000000000 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImplBackup.java +++ /dev/null @@ -1,5107 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog; - - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import java.io.File; -import java.io.IOException; -import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.Getter; -import lombok.NonNull; -import lombok.extern.slf4j.Slf4j; -import org.mapdb.BTreeMap; -import org.mapdb.DB; -import org.mapdb.DBException.SerializationError; -import org.mapdb.DBMaker; -import org.mapdb.HTreeMap; -import org.mapdb.Serializer; -import org.mapdb.serializer.SerializerArrayTuple; -import org.pf4j.Extension; -import org.polypheny.db.StatusService; -import org.polypheny.db.StatusService.ErrorConfig; -import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.AdapterManager; -import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.algebra.AlgCollation; -import org.polypheny.db.algebra.AlgCollations; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Sort; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogDefaultValue; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogGraphMapping; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogMaterializedView; -import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.CatalogView; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.MaterializedCriteria; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalEntity; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.GraphAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownCollectionException; -import org.polypheny.db.catalog.exceptions.UnknownCollectionPlacementException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownColumnIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownColumnPlacementRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownGraphException; -import org.polypheny.db.catalog.exceptions.UnknownGraphPlacementsException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; -import org.polypheny.db.catalog.exceptions.UnknownIndexIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownKeyIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionGroupIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionPlacementException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownTableIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; -import org.polypheny.db.catalog.exceptions.UnknownUserIdRuntimeException; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.ConstraintType; -import org.polypheny.db.catalog.logistic.DataPlacementRole; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; -import org.polypheny.db.catalog.logistic.IndexType; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.logistic.PlacementType; -import org.polypheny.db.catalog.snapshot.Snapshot; -import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.iface.QueryInterfaceManager; -import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.languages.QueryParameters; -import org.polypheny.db.nodes.Node; -import org.polypheny.db.partition.FrequencyMap; -import org.polypheny.db.partition.properties.PartitionProperty; -import org.polypheny.db.processing.ExtendedQueryParameters; -import org.polypheny.db.processing.Processor; -import org.polypheny.db.transaction.Statement; -import org.polypheny.db.transaction.Transaction; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.type.PolyTypeFamily; -import org.polypheny.db.util.ImmutableIntList; -import org.polypheny.db.util.Pair; -import org.polypheny.db.util.PolyphenyHomeDirManager; -import org.polypheny.db.view.MaterializedViewManager; - -@Extension -@Slf4j -public class CatalogImplBackup extends Catalog { - - private static final String FILE_PATH = "mapDB"; - private static DB db; - - private static HTreeMap users; - private static HTreeMap userNames; - - private static BTreeMap databases; - private static BTreeMap databaseNames; - private static HTreeMap> databaseChildren; - - private static BTreeMap schemas; - private static BTreeMap schemaNames; - private static HTreeMap> schemaChildren; - - private static BTreeMap tables; - private static BTreeMap tableNames; - private static HTreeMap> tableChildren; - - private static BTreeMap collections; - private static BTreeMap collectionNames; - - private static BTreeMap collectionPlacements; - - private static BTreeMap documentMappings; - - private static BTreeMap columns; - private static BTreeMap columnNames; - private static BTreeMap columnPlacements; - - private static HTreeMap adapters; - private static HTreeMap adapterNames; - - private static HTreeMap queryInterfaces; - private static HTreeMap queryInterfaceNames; - - private static HTreeMap keys; - private static HTreeMap keyColumns; - - private static HTreeMap primaryKeys; - private static HTreeMap foreignKeys; - private static HTreeMap constraints; - private static HTreeMap indexes; - - private static BTreeMap partitionGroups; - private static BTreeMap partitions; - private static BTreeMap partitionPlacements; // (AdapterId, Partition) - - // Container Object that contains all other placements - private static BTreeMap dataPlacements; // (AdapterId, TableId) -> CatalogDataPlacement - - private static BTreeMap graphs; - private static BTreeMap graphAliases; - private static BTreeMap graphNames; - private static BTreeMap graphPlacements; - - private static BTreeMap graphMappings; - - private static Long openTable; - - private static final AtomicInteger adapterIdBuilder = new AtomicInteger( 1 ); - private static final AtomicInteger queryInterfaceIdBuilder = new AtomicInteger( 1 ); - private static final AtomicInteger userIdBuilder = new AtomicInteger( 1 ); - - private static final AtomicLong databaseIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong namespaceIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong entityIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong columnIdBuilder = new AtomicLong( 1 ); - - private static final AtomicLong partitionGroupIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong partitionIdBuilder = new AtomicLong( 1000 ); - - private static final AtomicLong keyIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong constraintIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong indexIdBuilder = new AtomicLong( 1 ); - private static final AtomicLong foreignKeyIdBuilder = new AtomicLong( 1 ); - - private static final AtomicLong physicalPositionBuilder = new AtomicLong(); - - private static Set frequencyDependentTables = new HashSet<>(); // All tables to consider for periodic processing - - // Keeps a list of all tableIDs which are going to be deleted. This is required to avoid constraints when recursively - // removing a table and all placements and partitions. Otherwise **validatePartitionDistribution()** inside the Catalog - // would throw an error. - private static final List tablesFlaggedForDeletion = new ArrayList<>(); - - Comparator columnComparator = Comparator.comparingInt( o -> o.position ); - - // {@link AlgNode} used to create view and materialized view - @Getter - private final Map nodeInfo = new HashMap<>(); - - - public CatalogImplBackup() { - this( FILE_PATH, true, true, false ); - } - - - /** - * Creates a new catalog after the given parameters - * - * @param fileName name of persistent catalog file - * @param doInitSchema if the default schema is initiated - * @param doInitInformationPage if a new informationPage should be created - * @param deleteAfter if the file is deleted when the catalog is closed - */ - public CatalogImplBackup( String fileName, boolean doInitSchema, boolean doInitInformationPage, boolean deleteAfter ) { - super(); - - if ( db != null ) { - db.close(); - } - synchronized ( this ) { - - if ( Catalog.memoryCatalog || Catalog.testMode ) { - isPersistent = false; - } else { - isPersistent = isPersistent(); - } - - if ( isPersistent ) { - StatusService.printInfo( "Making the catalog persistent." ); - File folder = PolyphenyHomeDirManager.getInstance().registerNewFolder( "catalog" ); - - if ( Catalog.resetCatalog ) { - StatusService.printInfo( "Resetting catalog on startup." ); - if ( new File( folder, fileName ).exists() ) { - //noinspection ResultOfMethodCallIgnored - new File( folder, fileName ).delete(); - } - } - - if ( !deleteAfter ) { - db = DBMaker - .fileDB( new File( folder, fileName ) ) - .closeOnJvmShutdown() - .transactionEnable() - .fileMmapEnableIfSupported() - .fileMmapPreclearDisable() - .make(); - } else { - db = DBMaker - .fileDB( new File( folder, fileName ) ) - .closeOnJvmShutdown() - .fileDeleteAfterClose() - .transactionEnable() - .fileMmapEnableIfSupported() - .fileMmapPreclearDisable() - .make(); - } - db.getStore().fileLoad(); - - } else { - StatusService.printInfo( "Making the catalog in-memory." ); - db = DBMaker - .memoryDB() - .transactionEnable() - .closeOnJvmShutdown() - .make(); - } - - initDBLayout( db ); - - // mirrors default data from old sql file - restoreAllIdBuilders(); - try { - - if ( doInitSchema ) { - insertDefaultData(); - } - - } catch ( GenericCatalogException | UnknownUserException | UnknownTableException | - UnknownSchemaException | UnknownAdapterException | UnknownColumnException e ) { - throw new RuntimeException( e ); - } - if ( doInitInformationPage ) { - new CatalogInfoPage( this ); - } - - new CatalogValidator().startCheck(); - } - } - - - @Override - public void commit() throws NoTablePrimaryKeyException { - if ( openTable != null ) { - throw new NoTablePrimaryKeyException(); - } - db.commit(); - } - - - @Override - public void rollback() { - db.rollback(); - } - - - /** - * Checks if a file can be created on the system, accessed and changed - * - * @return if it was possible - */ - private boolean isPersistent() { - File file = PolyphenyHomeDirManager.getInstance().registerNewFile( "testfile" ); - try { - if ( !file.exists() ) { - boolean res = file.createNewFile(); - if ( !res ) { - return false; - } - } - } catch ( IOException e ) { - return false; - } - if ( !file.canRead() || !file.canWrite() ) { - return false; - } - file.delete(); - - return true; - } - - - /** - * Initializes the default catalog layout - * - * @param db the databases object on which the layout is created - */ - private void initDBLayout( DB db ) { - try { - initUserInfo( db ); - initDatabaseInfo( db ); - initSchemaInfo( db ); - initTableInfo( db ); - initGraphInfo( db ); - initDocumentInfo( db ); - initColumnInfo( db ); - initKeysAndConstraintsInfo( db ); - initAdapterInfo( db ); - initQueryInterfaceInfo( db ); - } catch ( SerializationError e ) { - log.error( "!!!!!!!!!!! Error while restoring the catalog !!!!!!!!!!!" ); - log.error( "This usually means that there have been changes to the internal structure of the catalog with the last update of Polypheny-DB." ); - log.error( "To fix this, you must reset the catalog. To do this, please start Polypheny-DB once with the argument \"-resetCatalog\"." ); - StatusService.printError( - "Unsupported version of catalog! Unable to restore the schema.", - ErrorConfig.builder().func( ErrorConfig.DO_NOTHING ).doExit( true ).showButton( true ).buttonMessage( "Exit" ).build() ); - } - } - - - @Override - public void restoreColumnPlacements( Transaction transaction ) { - AdapterManager manager = AdapterManager.getInstance(); - - Map> restoredTables = new HashMap<>(); - - for ( LogicalColumn c : columns.values() ) { - List placements = getColumnPlacement( c.id ); - LogicalTable catalogTable = getTable( c.tableId ); - - // No column placements need to be restored if it is a view - if ( catalogTable.entityType != EntityType.VIEW ) { - if ( placements.size() == 0 ) { - // No placements shouldn't happen - throw new RuntimeException( "There seems to be no placement for the column with the id " + c.id ); - } else if ( placements.size() == 1 ) { - Adapter adapter = manager.getAdapter( placements.get( 0 ).adapterId ); - if ( adapter instanceof DataStore ) { - DataStore store = (DataStore) adapter; - if ( !store.isPersistent() ) { - - // TODO only full placements atm here - - if ( !restoredTables.containsKey( store.getAdapterId() ) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, null ); - restoredTables.put( store.getAdapterId(), Collections.singletonList( catalogTable.id ) ); - - } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( catalogTable.id )) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), catalogTable, null ); - List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); - ids.add( catalogTable.id ); - restoredTables.put( store.getAdapterId(), ids ); - } - } - } - } else { - Map persistent = placements.stream().collect( Collectors.toMap( p -> p.adapterId, p -> manager.getStore( p.adapterId ).isPersistent() ) ); - - if ( !persistent.containsValue( true ) ) { // no persistent placement for this column - LogicalTable table = getTable( c.tableId ); - for ( CatalogColumnPlacement p : placements ) { - DataStore store = manager.getStore( p.adapterId ); - - if ( !restoredTables.containsKey( store.getAdapterId() ) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, null ); - List ids = new ArrayList<>(); - ids.add( table.id ); - restoredTables.put( store.getAdapterId(), ids ); - - } else if ( !(restoredTables.containsKey( store.getAdapterId() ) && restoredTables.get( store.getAdapterId() ).contains( table.id )) ) { - store.createPhysicalTable( transaction.createStatement().getPrepareContext(), table, null ); - List ids = new ArrayList<>( restoredTables.get( store.getAdapterId() ) ); - ids.add( table.id ); - restoredTables.put( store.getAdapterId(), ids ); - } - } - } else if ( persistent.containsValue( true ) && persistent.containsValue( false ) ) { - // TODO DL change so column gets copied - for ( Entry p : persistent.entrySet() ) { - if ( !p.getValue() ) { - deleteColumnPlacement( p.getKey(), c.id, false ); - } - } - } - } - } - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void restoreViews( Transaction transaction ) { - Statement statement = transaction.createStatement(); - - for ( LogicalTable c : tables.values() ) { - if ( c.entityType == EntityType.VIEW || c.entityType == EntityType.MATERIALIZED_VIEW ) { - String query; - QueryLanguage language; - if ( c.entityType == EntityType.VIEW ) { - query = ((CatalogView) c).getQuery(); - language = ((CatalogView) c).getLanguage(); - } else { - query = ((CatalogMaterializedView) c).getQuery(); - language = ((CatalogMaterializedView) c).getLanguage(); - } - - switch ( language.getSerializedName() ) { - case "sql": - Processor sqlProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "rel" ) ); - Node sqlNode = sqlProcessor.parse( query ).get( 0 ); - AlgRoot algRoot = sqlProcessor.translate( - statement, - sqlProcessor.validate( statement.getTransaction(), sqlNode, RuntimeConfig.ADD_DEFAULT_VALUES_IN_INSERTS.getBoolean() ).left, - new QueryParameters( query, c.getNamespaceType() ) ); - nodeInfo.put( c.id, algRoot.alg ); - break; - - case "rel": - Processor jsonRelProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "rel" ) ); - AlgNode result = jsonRelProcessor.translate( statement, null, new QueryParameters( query, c.getNamespaceType() ) ).alg; - - final AlgDataType rowType = result.getRowType(); - final List> fields = Pair.zip( ImmutableIntList.identity( rowType.getFieldCount() ), rowType.getFieldNames() ); - final AlgCollation collation = - result instanceof Sort - ? ((Sort) result).collation - : AlgCollations.EMPTY; - AlgRoot root = new AlgRoot( result, result.getRowType(), Kind.SELECT, fields, collation ); - - nodeInfo.put( c.id, root.alg ); - break; - - case "mongo": - Processor mqlProcessor = statement.getTransaction().getProcessor( QueryLanguage.from( "mongo" ) ); - Node mqlNode = mqlProcessor.parse( query ).get( 0 ); - - AlgRoot mqlRel = mqlProcessor.translate( - statement, - mqlNode, - new ExtendedQueryParameters( query, NamespaceType.DOCUMENT, getNamespace( defaultDatabaseId ).name ) ); - nodeInfo.put( c.id, mqlRel.alg ); - break; - } - if ( c.entityType == EntityType.MATERIALIZED_VIEW ) { - log.info( "Updating materialized view: {}", c.getNamespaceName() + "." + c.name ); - MaterializedViewManager materializedManager = MaterializedViewManager.getInstance(); - materializedManager.addMaterializedInfo( c.id, ((CatalogMaterializedView) c).getMaterializedCriteria() ); - materializedManager.updateData( statement.getTransaction(), c.id ); - materializedManager.updateMaterializedTime( c.id ); - } - } - } - } - - - /** - * Sets the idBuilder for a given map to the new starting position - * - * @param map the map to which the idBuilder belongs - * @param idBuilder which is creates new unique ids - */ - private void restoreIdBuilder( Map map, AtomicInteger idBuilder ) { - if ( !map.isEmpty() ) { - idBuilder.set( Collections.max( map.keySet() ) + 1 ); - } - } - - - private void restoreIdBuilder( Map map, AtomicLong idBuilder ) { - if ( !map.isEmpty() ) { - idBuilder.set( Collections.max( map.keySet() ) + 1 ); - } - } - - - private void restoreAllIdBuilders() { - restoreIdBuilder( schemas, namespaceIdBuilder ); - restoreIdBuilder( databases, databaseIdBuilder ); - restoreIdBuilder( tables, entityIdBuilder ); - restoreIdBuilder( columns, columnIdBuilder ); - restoreIdBuilder( users, userIdBuilder ); - restoreIdBuilder( keys, keyIdBuilder ); - restoreIdBuilder( constraints, columnIdBuilder ); - restoreIdBuilder( indexes, indexIdBuilder ); - restoreIdBuilder( adapters, adapterIdBuilder ); - restoreIdBuilder( queryInterfaces, queryInterfaceIdBuilder ); - restoreIdBuilder( foreignKeys, foreignKeyIdBuilder ); - restoreIdBuilder( partitionGroups, partitionGroupIdBuilder ); - restoreIdBuilder( partitions, partitionIdBuilder ); - - // Restore physical position builder - if ( columnPlacements.size() > 0 ) { - long highestPosition = 0; - for ( CatalogColumnPlacement placement : columnPlacements.values() ) { - if ( placement.physicalPosition > highestPosition ) { - highestPosition = placement.physicalPosition; - } - } - physicalPositionBuilder.set( highestPosition + 1 ); - } - } - - - /** - * Initiates all needed maps for adapters - * - * adapters: adapterId {@code ->} CatalogAdapter - * adapterName: adapterName {@code ->} CatalogAdapter - */ - private void initAdapterInfo( DB db ) { - adapters = db.hashMap( "adapters", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); - adapterNames = db.hashMap( "adapterNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); - } - - - /** - * Initiates all needed maps for query interfaces - * - * queryInterfaces: ifaceId CatalogQueryInterface - * queryInterfaceNames: ifaceName CatalogQueryInterface - */ - private void initQueryInterfaceInfo( DB db ) { - queryInterfaces = db.hashMap( "queryInterfaces", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); - queryInterfaceNames = db.hashMap( "queryInterfaceNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); - } - - - /** - * Creates all needed maps for keys and constraints - * - * keyColumns: [columnId1, columnId2,...] keyId - * keys: keyId CatalogKey - * primaryKeys: keyId CatalogPrimaryKey - * foreignKeys: keyId CatalogForeignKey - * constraints: constraintId CatalogConstraint - * indexes: indexId {@code ->} CatalogIndex - */ - private void initKeysAndConstraintsInfo( DB db ) { - keyColumns = db.hashMap( "keyColumns", Serializer.LONG_ARRAY, Serializer.LONG ).createOrOpen(); - keys = db.hashMap( "keys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - primaryKeys = db.hashMap( "primaryKeys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - foreignKeys = db.hashMap( "foreignKeys", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - constraints = db.hashMap( "constraints", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - indexes = db.hashMap( "indexes", Serializer.LONG, new GenericSerializer() ).createOrOpen(); - } - - - /** - * Creates all needed maps for users - * - * users: userId {@code ->} CatalogUser - * userNames: name {@code ->} CatalogUser - */ - private void initUserInfo( DB db ) { - users = db.hashMap( "users", Serializer.INTEGER, new GenericSerializer() ).createOrOpen(); - userNames = db.hashMap( "usersNames", Serializer.STRING, new GenericSerializer() ).createOrOpen(); - } - - - /** - * Initialize the column maps - * - * columns: columnId {@code ->} CatalogColumn - * columnNames: new Object[]{databaseId, schemaId, tableId, columnName} {@code ->} CatalogColumn - * columnPlacements: new Object[]{adapterId, columnId} {@code ->} CatalogPlacement - */ - private void initColumnInfo( DB db ) { - //noinspection unchecked - columns = db.treeMap( "columns", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - //noinspection unchecked - columnNames = db.treeMap( "columnNames", new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - //noinspection unchecked - columnPlacements = db.treeMap( "columnPlacement", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); - } - - - /** - * Creates all maps needed for tables - * - * tables: tableId {@code ->} CatalogEntity - * tableChildren: tableId {@code ->} [columnId, columnId,..] - * tableNames: new Object[]{databaseId, schemaId, tableName} {@code ->} CatalogEntity - */ - private void initTableInfo( DB db ) { - //noinspection unchecked - tables = db.treeMap( "tables", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - tableChildren = db.hashMap( "tableChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); - //noinspection unchecked - tableNames = db.treeMap( "tableNames" ) - .keySerializer( new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.STRING ) ) - .valueSerializer( Serializer.JAVA ) - .createOrOpen(); - dataPlacements = db.treeMap( "dataPlacement", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); - partitionGroups = db.treeMap( "partitionGroups", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - partitions = db.treeMap( "partitions", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - - partitionPlacements = db.treeMap( "partitionPlacements", new SerializerArrayTuple( Serializer.INTEGER, Serializer.LONG ), Serializer.JAVA ).createOrOpen(); - - // Restores all Tables dependent on periodic checks like TEMPERATURE Partitioning - frequencyDependentTables = tables.values().stream().filter( t -> t.partitionProperty.reliesOnPeriodicChecks ).map( t -> t.id ).collect( Collectors.toSet() ); - } - - - @SuppressWarnings("unchecked") - private void initGraphInfo( DB db ) { - graphs = db.treeMap( "graphs", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - graphNames = db.treeMap( "graphNames", new SerializerArrayTuple( Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - graphPlacements = db.treeMap( "graphPlacements", new SerializerArrayTuple( Serializer.LONG, Serializer.INTEGER ), Serializer.JAVA ).createOrOpen(); - - graphMappings = db.treeMap( "graphMappings", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - graphAliases = db.treeMap( "graphAliases", Serializer.STRING, Serializer.JAVA ).createOrOpen(); - } - - - @SuppressWarnings("unchecked") - private void initDocumentInfo( DB db ) { - collections = db.treeMap( "collections", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - collectionNames = db.treeMap( "collectionNames", new SerializerArrayTuple( Serializer.LONG, Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - - documentMappings = db.treeMap( "documentMappings", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - - collectionPlacements = db.treeMap( "collectionPlacements", new SerializerArrayTuple( Serializer.LONG, Serializer.INTEGER ), Serializer.JAVA ).createOrOpen(); - } - - - /** - * Creates all needed maps for schemas - * - * schemas: schemaId {@code ->} CatalogNamespace - * schemaChildren: schemaId {@code ->} [tableId, tableId, etc] - * schemaNames: new Object[]{databaseId, schemaName} {@code ->} CatalogNamespace - */ - private void initSchemaInfo( DB db ) { - //noinspection unchecked - schemas = db.treeMap( "schemas", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - schemaChildren = db.hashMap( "schemaChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); - //noinspection unchecked - schemaNames = db.treeMap( "schemaNames", new SerializerArrayTuple( Serializer.LONG, Serializer.STRING ), Serializer.JAVA ).createOrOpen(); - } - - - /** - * Creates maps for databases - * - * databases: databaseId {@code ->} CatalogDatabase - * databaseNames: databaseName {@code ->} CatalogDatabase - * databaseChildren: databaseId {@code ->} [tableId, tableId,...] - */ - private void initDatabaseInfo( DB db ) { - //noinspection unchecked - databases = db.treeMap( "databases", Serializer.LONG, Serializer.JAVA ).createOrOpen(); - //noinspection unchecked - databaseNames = db.treeMap( "databaseNames", Serializer.STRING, Serializer.JAVA ).createOrOpen(); - databaseChildren = db.hashMap( "databaseChildren", Serializer.LONG, new GenericSerializer>() ).createOrOpen(); - } - - - /** - * Fills the catalog database with default data, skips if data is already inserted - */ - private void insertDefaultData() throws GenericCatalogException, UnknownUserException, UnknownTableException, UnknownSchemaException, UnknownAdapterException, UnknownColumnException { - - ////////////// - // init users - int systemId; - if ( !userNames.containsKey( "system" ) ) { - systemId = addUser( "system", "" ); - } else { - systemId = getUser( "system" ).id; - } - - if ( !userNames.containsKey( "pa" ) ) { - addUser( "pa", "" ); - } - Catalog.defaultUserId = systemId; - - ////////////// - // init schema - - long schemaId; - if ( !schemaNames.containsKey( new Object[]{ "public" } ) ) { - schemaId = addNamespace( "public", NamespaceType.getDefault(), false ); - } else { - schemaId = getNamespace( "public" ).id; - } - - ////////////// - // init adapters - if ( adapterNames.size() == 0 ) { - // Deploy default store - addAdapter( "hsqldb", defaultStore.getAdapterName(), AdapterType.STORE, defaultStore.getDefaultSettings() ); - - // Deploy default CSV view - addAdapter( "hr", defaultSource.getAdapterName(), AdapterType.SOURCE, defaultSource.getDefaultSettings() ); - - // init schema - CatalogAdapter csv = getAdapter( "hr" ); - if ( !testMode ) { - if ( !tableNames.containsKey( new Object[]{ schemaId, "depts" } ) ) { - addTable( "depts", schemaId, systemId, EntityType.SOURCE, false ); - } - if ( !tableNames.containsKey( new Object[]{ schemaId, "emps" } ) ) { - addTable( "emps", schemaId, systemId, EntityType.SOURCE, false ); - } - if ( !tableNames.containsKey( new Object[]{ schemaId, "emp" } ) ) { - addTable( "emp", schemaId, systemId, EntityType.SOURCE, false ); - } - if ( !tableNames.containsKey( new Object[]{ schemaId, "work" } ) ) { - addTable( "work", schemaId, systemId, EntityType.SOURCE, false ); - addDefaultCsvColumns( csv ); - } - } - } - - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( e ); - } - - } - - - @Override - public void restoreInterfacesIfNecessary() { - //////////////////////// - // init query interfaces - if ( queryInterfaceNames.size() == 0 ) { - QueryInterfaceManager.getREGISTER().values().forEach( i -> addQueryInterface( i.interfaceName, i.clazz.getName(), i.defaultSettings ) ); - - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( e ); - } - } - } - - - /** - * Initiates default columns for csv files - */ - private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaException, UnknownTableException, GenericCatalogException, UnknownColumnException { - LogicalNamespace schema = getNamespace( "public" ); - LogicalTable depts = getTable( schema.id, "depts" ); - - addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - - LogicalTable emps = getTable( schema.id, "emps" ); - addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); - addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); - addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); - - LogicalTable emp = getTable( schema.id, "emp" ); - addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); - addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 ); - addDefaultCsvColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 ); - addDefaultCsvColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); - addDefaultCsvColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null ); - addDefaultCsvColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); - addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); - addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); - - LogicalTable work = getTable( schema.id, "work" ); - addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null ); - addDefaultCsvColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 ); - addDefaultCsvColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); - addDefaultCsvColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 ); - addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); - addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); - - // set all needed primary keys - addPrimaryKey( depts.id, Collections.singletonList( getColumn( depts.id, "deptno" ).id ) ); - addPrimaryKey( emps.id, Collections.singletonList( getColumn( emps.id, "empid" ).id ) ); - addPrimaryKey( emp.id, Collections.singletonList( getColumn( emp.id, "employeeno" ).id ) ); - addPrimaryKey( work.id, Collections.singletonList( getColumn( work.id, "employeeno" ).id ) ); - - // set foreign keys - addForeignKey( - emps.id, - ImmutableList.of( getColumn( emps.id, "deptno" ).id ), - depts.id, - ImmutableList.of( getColumn( depts.id, "deptno" ).id ), - "fk_emps_depts", - ForeignKeyOption.NONE, - ForeignKeyOption.NONE ); - addForeignKey( - work.id, - ImmutableList.of( getColumn( work.id, "employeeno" ).id ), - emp.id, - ImmutableList.of( getColumn( emp.id, "employeeno" ).id ), - "fk_work_emp", - ForeignKeyOption.NONE, - ForeignKeyOption.NONE ); - } - - - private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !checkIfExistsColumn( table.id, name ) ) { - long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - String filename = table.name + ".csv"; - if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { - filename += ".gz"; - } - - addColumnPlacement( csv.id, colId, PlacementType.AUTOMATIC, filename, table.name, name ); - updateColumnPlacementPhysicalPosition( csv.id, colId, position ); - - long partitionId = table.partitionProperty.partitionIds.get( 0 ); - addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, filename, table.name, DataPlacementRole.UPTODATE ); - } - } - - - private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !checkIfExistsColumn( table.id, name ) ) { - long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - addColumnPlacement( adapter.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name ); - updateColumnPlacementPhysicalPosition( adapter.id, colId, position ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void validateColumns() { - CatalogValidator validator = new CatalogValidator(); - db.rollback(); - try { - validator.validate(); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void close() { - db.close(); - } - - - /** - * {@inheritDoc} - */ - @Override - public void clear() { - db.getAll().clear(); - initDBLayout( db ); - restoreAllIdBuilders(); - } - - - @Override - public Snapshot getSnapshot( long id ) { - return null; - } - - - /** - * {@inheritDoc} - */ - public long addDatabase( String name, int ownerId, String ownerName, long defaultSchemaId, String defaultSchemaName ) { - long id = databaseIdBuilder.getAndIncrement(); - CatalogDatabase database = new CatalogDatabase( id, name, ownerId, ownerName, defaultSchemaId, defaultSchemaName ); - synchronized ( this ) { - databases.put( id, database ); - databaseNames.put( name, database ); - databaseChildren.put( id, ImmutableList.builder().build() ); - } - listeners.firePropertyChange( "database", null, database ); - return id; - } - - - /** - * {@inheritDoc} - */ - public void deleteDatabase( long databaseId ) { - CatalogDatabase database = getDatabase( databaseId ); - if ( database != null ) { - synchronized ( this ) { - databases.remove( databaseId ); - databaseNames.remove( database.name ); - databaseChildren.remove( databaseId ); - } - } - } - - - /** - * {@inheritDoc} - */ - @Override - public int addUser( String name, String password ) { - CatalogUser user = new CatalogUser( userIdBuilder.getAndIncrement(), name, password ); - synchronized ( this ) { - users.put( user.id, user ); - userNames.put( user.name, user ); - } - listeners.firePropertyChange( "user", null, user ); - return user.id; - } - - - /** - * {@inheritDoc} - */ - public List getDatabases( Pattern pattern ) { - if ( pattern != null ) { - if ( pattern.containsWildcards ) { - return databaseNames.entrySet().stream().filter( e -> e.getKey().matches( pattern.toRegex() ) ).map( Entry::getValue ).sorted().collect( Collectors.toList() ); - } else { - if ( databaseNames.containsKey( pattern.pattern ) ) { - return Collections.singletonList( databaseNames.get( pattern.pattern ) ); - } else { - return new ArrayList<>(); - } - } - } else { - return new ArrayList<>( databases.values() ); - } - } - - - /** - * {@inheritDoc} - */ - private CatalogDatabase getDatabase( long databaseId ) { - try { - return Objects.requireNonNull( databases.get( databaseId ) ); - } catch ( NullPointerException e ) { - throw new UnknownDatabaseIdRuntimeException( databaseId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public @NonNull List getNamespaces( Pattern name ) { - if ( name != null ) { - return schemaNames.values().stream().filter( s -> s.name.matches( name.toRegex() ) ).collect( Collectors.toList() ); - } - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalNamespace getNamespace( long id ) { - try { - return Objects.requireNonNull( schemas.get( id ) ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaIdRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalNamespace getNamespace( final String schemaName ) throws UnknownSchemaException { - String name = schemaName.toLowerCase(); - try { - return Objects.requireNonNull( schemaNames.get( new Object[]{ name } ) ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaException( schemaName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ) { - name = name.toLowerCase(); - CatalogUser owner = getUser( ownerId ); - long id = namespaceIdBuilder.getAndIncrement(); - LogicalNamespace schema = new LogicalNamespace( id, name, ownerId, owner.name, namespaceType, namespaceType == NamespaceType.DOCUMENT || namespaceType == NamespaceType.GRAPH ); - synchronized ( this ) { - schemas.put( id, schema ); - schemaNames.put( new Object[]{ name }, schema ); - schemaChildren.put( id, ImmutableList.builder().build() ); - } - listeners.firePropertyChange( "namespace", null, schema ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsNamespace( String name ) { - name = name.toLowerCase(); - return schemaNames.containsKey( new Object[]{ name } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void renameNamespace( long schemaId, String name ) { - name = name.toLowerCase(); - try { - LogicalNamespace old = Objects.requireNonNull( schemas.get( schemaId ) ); - LogicalNamespace schema = new LogicalNamespace( old.id, name, old.ownerId, old.ownerName, old.namespaceType, false ); - - synchronized ( this ) { - schemas.replace( schemaId, schema ); - schemaNames.remove( new Object[]{ old.name } ); - schemaNames.put( new Object[]{ name }, schema ); - } - listeners.firePropertyChange( "schema", old, schema ); - } catch ( NullPointerException e ) { - throw new UnknownSchemaIdRuntimeException( schemaId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ) { - if ( getGraphs( new Pattern( name ) ).size() != 0 && !ifNotExists ) { - throw new GraphAlreadyExistsException( name ); - } - - long id = addNamespace( name, NamespaceType.GRAPH, false ); - - LogicalGraph graph = new LogicalGraph( id, name, Catalog.defaultUserId, modifiable, ImmutableList.of(), true ); - - synchronized ( this ) { - graphs.put( id, graph ); - graphNames.put( new Object[]{ name }, graph ); - } - - listeners.firePropertyChange( "graph", null, graph ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { - LogicalGraph graph = Objects.requireNonNull( getGraph( graphId ) ); - - if ( graphAliases.containsKey( alias ) ) { - if ( !ifNotExists ) { - throw new RuntimeException( "Error while creating alias: " + alias ); - } - return; - } - - synchronized ( this ) { - graphAliases.put( alias, graph ); - } - listeners.firePropertyChange( "graphAlias", null, alias ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void removeGraphAlias( long graphId, String alias, boolean ifExists ) { - if ( !graphAliases.containsKey( alias ) ) { - if ( !ifExists ) { - throw new RuntimeException( "Error while removing alias: " + alias ); - } - return; - } - synchronized ( this ) { - graphAliases.remove( alias ); - } - listeners.firePropertyChange( "graphAlias", alias, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogGraphMapping getGraphMapping( long graphId ) { - return Objects.requireNonNull( graphMappings.get( graphId ) ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException { - /// --- nodes - // table id nodes -> id, node, labels - long nodesId; - if ( !onlyPlacement ) { - nodesId = addTable( "_nodes_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); - } else { - nodesId = getTable( id, "_nodes_" ).id; - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), nodesId ) ); - - long idNodeId; - long labelNodeId; - if ( !onlyPlacement ) { - idNodeId = addColumn( "_id_", nodesId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - labelNodeId = addColumn( "_label_", nodesId, 1, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - } else { - idNodeId = getColumn( nodesId, "_id_" ).id; - labelNodeId = getColumn( nodesId, "_label_" ).id; - } - - for ( DataStore s : stores ) { - addColumnPlacement( - s.getAdapterId(), - idNodeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - labelNodeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - if ( !onlyPlacement ) { - addPrimaryKey( nodesId, List.of( idNodeId, labelNodeId ) ); - } - - /// --- node properties - - // table id nodes -> id, node, labels - long nodesPropertyId; - if ( !onlyPlacement ) { - nodesPropertyId = addTable( "_n_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); - } else { - nodesPropertyId = getTable( id, "_n_properties_" ).id; - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), nodesPropertyId ) ); - - long idNodesPropertyId; - long keyNodePropertyId; - long valueNodePropertyId; - - if ( !onlyPlacement ) { - idNodesPropertyId = addColumn( "_id_", nodesPropertyId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - keyNodePropertyId = addColumn( "_key_", nodesPropertyId, 1, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - valueNodePropertyId = addColumn( "_value_", nodesPropertyId, 2, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - } else { - idNodesPropertyId = getColumn( nodesPropertyId, "_id_" ).id; - keyNodePropertyId = getColumn( nodesPropertyId, "_key_" ).id; - valueNodePropertyId = getColumn( nodesPropertyId, "_value_" ).id; - } - - for ( DataStore s : stores ) { - addColumnPlacement( - s.getAdapterId(), - idNodesPropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - keyNodePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - valueNodePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - if ( !onlyPlacement ) { - addPrimaryKey( nodesPropertyId, List.of( idNodesPropertyId, keyNodePropertyId ) ); - } - - /// --- edges - - // table id relationships -> id, rel, labels - long edgesId; - if ( !onlyPlacement ) { - edgesId = addTable( "_edges_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); - } else { - edgesId = getTable( id, "_edges_" ).id; - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), edgesId ) ); - - long idEdgeId; - long labelEdgeId; - long sourceEdgeId; - long targetEdgeId; - - if ( !onlyPlacement ) { - idEdgeId = addColumn( - "_id_", - edgesId, - 0, - PolyType.VARCHAR, - null, - 36, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - labelEdgeId = addColumn( - "_label_", - edgesId, - 1, - PolyType.VARCHAR, - null, - 255, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - sourceEdgeId = addColumn( - "_l_id_", - edgesId, - 2, - PolyType.VARCHAR, - null, - 36, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - targetEdgeId = addColumn( - "_r_id_", - edgesId, - 3, - PolyType.VARCHAR, - null, - 36, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - } else { - idEdgeId = getColumn( edgesId, "_id_" ).id; - labelEdgeId = getColumn( edgesId, "_label_" ).id; - sourceEdgeId = getColumn( edgesId, "_l_id_" ).id; - targetEdgeId = getColumn( edgesId, "_r_id_" ).id; - } - - for ( DataStore store : stores ) { - addColumnPlacement( - store.getAdapterId(), - idEdgeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - store.getAdapterId(), - labelEdgeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - store.getAdapterId(), - sourceEdgeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - store.getAdapterId(), - targetEdgeId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - if ( !onlyPlacement ) { - addPrimaryKey( edgesId, Collections.singletonList( idEdgeId ) ); - } - - /// --- edge properties - - // table id nodes -> id, node, labels - long edgesPropertyId; - if ( !onlyPlacement ) { - edgesPropertyId = addTable( "_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); - } else { - edgesPropertyId = getTable( id, "_properties_" ).id; - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), edgesPropertyId ) ); - - long idEdgePropertyId; - long keyEdgePropertyId; - long valueEdgePropertyId; - - if ( !onlyPlacement ) { - idEdgePropertyId = addColumn( - "_id_", - edgesPropertyId, - 0, - PolyType.VARCHAR, - null, - 255, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - keyEdgePropertyId = addColumn( - "_key_", - edgesPropertyId, - 1, - PolyType.VARCHAR, - null, - 255, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - valueEdgePropertyId = addColumn( - "_value_", - edgesPropertyId, - 2, - PolyType.VARCHAR, - null, - 255, - null, - null, - null, - false, - Collation.getDefaultCollation() ); - } else { - idEdgePropertyId = getColumn( edgesPropertyId, "_id_" ).id; - keyEdgePropertyId = getColumn( edgesPropertyId, "_key_" ).id; - valueEdgePropertyId = getColumn( edgesPropertyId, "_value_" ).id; - } - - for ( DataStore s : stores ) { - addColumnPlacement( - s.getAdapterId(), - idEdgePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - keyEdgePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - valueEdgePropertyId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - if ( !onlyPlacement ) { - addPrimaryKey( edgesPropertyId, List.of( idEdgePropertyId, keyEdgePropertyId ) ); - - CatalogGraphMapping mapping = new CatalogGraphMapping( - id, - nodesId, - idNodeId, - labelNodeId, - nodesPropertyId, - idNodesPropertyId, - keyNodePropertyId, - valueNodePropertyId, - edgesId, - idEdgeId, - labelEdgeId, - sourceEdgeId, - targetEdgeId, - edgesPropertyId, - idEdgePropertyId, - keyEdgePropertyId, - valueEdgePropertyId ); - - graphMappings.put( id, mapping ); - } - - } - - - private void removeGraphLogistics( long graphId ) { - if ( !graphMappings.containsKey( graphId ) ) { - throw new UnknownGraphException( graphId ); - } - - deleteNamespace( graphId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteGraph( long id ) { - if ( !graphs.containsKey( id ) ) { - throw new UnknownGraphException( id ); - } - - LogicalGraph old = Objects.requireNonNull( graphs.get( id ) ); - - removeGraphLogistics( id ); - - synchronized ( this ) { - old.placements.forEach( a -> graphPlacements.remove( new Object[]{ old.id, a } ) ); - graphs.remove( id ); - graphNames.remove( new Object[]{ old.name } ); - graphMappings.remove( id ); - } - listeners.firePropertyChange( "graph", old, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalGraph getGraph( long id ) { - if ( !graphs.containsKey( id ) ) { - throw new UnknownGraphException( id ); - } - return graphs.get( id ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getGraphs( Pattern graphName ) { - if ( graphName != null ) { - return ImmutableList.copyOf( - Stream.concat( - graphAliases.values().stream(), - graphs.values().stream() ).filter( g -> g.name.matches( graphName.pattern.toLowerCase() ) ) - .collect( Collectors.toList() ) ); - } else { - return ImmutableList.copyOf( graphs.values() ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteNamespace( long schemaId ) { - LogicalNamespace schema = getNamespace( schemaId ); - synchronized ( this ) { - schemaNames.remove( new Object[]{ schema.name } ); - - for ( Long id : Objects.requireNonNull( schemaChildren.get( schemaId ) ) ) { - deleteTable( id ); - } - schemaChildren.remove( schemaId ); - schemas.remove( schemaId ); - - } - listeners.firePropertyChange( "Schema", schema, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTables( long schemaId, Pattern tableNamePattern ) { - if ( schemas.containsKey( schemaId ) ) { - - LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); - if ( tableNamePattern != null ) { - return Collections.singletonList( tableNames.get( new Object[]{ schemaId, tableNamePattern.pattern } ) ); - } else { - return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schemaId } ).values() ); - } - } - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { - if ( schemaNamePattern != null && tableNamePattern != null ) { - LogicalNamespace schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); - if ( schema != null ) { - return Collections.singletonList( Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableNamePattern.pattern } ) ) ); - } - } else if ( schemaNamePattern != null ) { - LogicalNamespace schema = schemaNames.get( new Object[]{ schemaNamePattern.pattern } ); - if ( schema != null ) { - return new ArrayList<>( tableNames.prefixSubMap( new Object[]{ schema.id } ).values() ); - } - } else { - return new ArrayList<>( tableNames.values() ); - } - - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTable( long tableId ) { - try { - return Objects.requireNonNull( tables.get( tableId ) ); - } catch ( NullPointerException e ) { - throw new UnknownTableIdRuntimeException( tableId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { - try { - LogicalNamespace schema = getNamespace( schemaId ); - if ( !schema.caseSensitive ) { - tableName = tableName.toLowerCase(); - } - return Objects.requireNonNull( tableNames.get( new Object[]{ schemaId, tableName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownTableException( schemaId, tableName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTableFromPartition( long partitionId ) { - return getTable( getPartition( partitionId ).tableId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { - try { - LogicalNamespace schema = getNamespace( schemaName ); - if ( !schema.caseSensitive ) { - tableName = tableName.toLowerCase(); - } - - return Objects.requireNonNull( tableNames.get( new Object[]{ schema.id, tableName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownTableException( schemaName, tableName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { - long id = entityIdBuilder.getAndIncrement(); - LogicalNamespace schema = getNamespace( namespaceId ); - if ( !schema.caseSensitive ) { - name = name.toLowerCase(); - } - - try { - //Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition - List partitionGroupIds = new ArrayList<>(); - partitionGroupIds.add( addPartitionGroup( id, "full", namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); - //get All(only one) PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds - CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); - - PartitionProperty partitionProperty = PartitionProperty.builder() - .partitionType( PartitionType.NONE ) - .isPartitioned( false ) - .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) - .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) - .reliesOnPeriodicChecks( false ) - .build(); - - LogicalTable table = new LogicalTable( - id, - name, - ImmutableList.of(), - namespaceId, - ownerId, - entityType, - null, - ImmutableList.of(), - modifiable, - partitionProperty, - ImmutableList.of() ); - - updateEntityLogistics( name, namespaceId, id, schema, table ); - if ( schema.namespaceType != NamespaceType.DOCUMENT ) { - openTable = id; - } - - } catch ( GenericCatalogException e ) { - throw new RuntimeException( "Error when adding table " + name, e ); - } - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { - long id = entityIdBuilder.getAndIncrement(); - LogicalNamespace schema = getNamespace( namespaceId ); - - if ( !schema.caseSensitive ) { - name = name.toLowerCase(); - } - - PartitionProperty partitionProperty = PartitionProperty.builder() - .partitionType( PartitionType.NONE ) - .reliesOnPeriodicChecks( false ) - .partitionIds( ImmutableList.copyOf( new ArrayList<>() ) ) - .partitionGroupIds( ImmutableList.copyOf( new ArrayList<>() ) ) - .build(); - - if ( entityType != EntityType.VIEW ) { - // Should not happen, addViewTable is only called with EntityType.View - throw new RuntimeException( "addViewTable is only possible with EntityType = VIEW" ); - } - CatalogView viewTable = new CatalogView( - id, - name, - ImmutableList.of(), - namespaceId, - ownerId, - entityType, - query,//definition, - null, - ImmutableList.of(), - modifiable, - partitionProperty, - algCollation, - ImmutableList.of(), - underlyingTables, - language.getSerializedName() //fieldList - ); - addConnectedViews( underlyingTables, viewTable.id ); - updateEntityLogistics( name, namespaceId, id, schema, viewTable ); - nodeInfo.put( id, definition ); - - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { - long id = entityIdBuilder.getAndIncrement(); - LogicalNamespace schema = getNamespace( namespaceId ); - - if ( !schema.caseSensitive ) { - name = name.toLowerCase(); - } - - // Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition - List partitionGroupIds = new ArrayList<>(); - partitionGroupIds.add( addPartitionGroup( id, "full", namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); - - // Get the single PartitionGroup and consequently retrieve all contained partitionIds to add them to completeList of partitionIds in the partitionProperty - CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); - - PartitionProperty partitionProperty = PartitionProperty.builder() - .partitionType( PartitionType.NONE ) - .isPartitioned( false ) - .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) - .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) - .reliesOnPeriodicChecks( false ) - .build(); - - if ( entityType == EntityType.MATERIALIZED_VIEW ) { - Map> map = new HashMap<>(); - for ( Entry> e : underlyingTables.entrySet() ) { - if ( map.put( e.getKey(), ImmutableList.copyOf( e.getValue() ) ) != null ) { - throw new IllegalStateException( "Duplicate key" ); - } - } - CatalogMaterializedView materializedViewTable = new CatalogMaterializedView( - id, - name, - List.of(), - namespaceId, - ownerId, - entityType, - query, - null, - List.of(), - modifiable, - partitionProperty, - algCollation, - List.of(), - Map.copyOf( map ), - language.getSerializedName(), - materializedCriteria, - ordered - ); - addConnectedViews( underlyingTables, materializedViewTable.id ); - updateEntityLogistics( name, namespaceId, id, schema, materializedViewTable ); - - nodeInfo.put( id, definition ); - } else { - // Should not happen, addViewTable is only called with EntityType.View - throw new RuntimeException( "addMaterializedViewTable is only possible with EntityType = MATERIALIZED_VIEW" ); - } - return id; - } - - - /** - * Update all information after the addition of all kind of tables - */ - private void updateEntityLogistics( String name, long namespaceId, long id, LogicalNamespace schema, LogicalTable entity ) { - synchronized ( this ) { - tables.put( id, entity ); - tableChildren.put( id, ImmutableList.builder().build() ); - tableNames.put( new Object[]{ namespaceId, name }, entity ); - List children = new ArrayList<>( Objects.requireNonNull( schemaChildren.get( namespaceId ) ) ); - children.add( id ); - schemaChildren.replace( namespaceId, ImmutableList.copyOf( children ) ); - } - - listeners.firePropertyChange( "entity", null, entity ); - } - - - /** - * Add additional Information to Table, what Views are connected to table - */ - public void addConnectedViews( Map> underlyingTables, long viewId ) { - for ( long id : underlyingTables.keySet() ) { - LogicalTable old = getTable( id ); - List connectedViews; - connectedViews = new ArrayList<>( old.connectedViews ); - connectedViews.add( viewId ); - LogicalTable table = old.withConnectedViews( ImmutableList.copyOf( connectedViews ) ); - synchronized ( this ) { - tables.replace( id, table ); - assert table != null; - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteViewDependencies( CatalogView catalogView ) { - for ( long id : catalogView.getUnderlyingTables().keySet() ) { - LogicalTable old = getTable( id ); - List connectedViews = old.connectedViews.stream().filter( e -> e != catalogView.id ).collect( Collectors.toList() ); - - LogicalTable table = old.withConnectedViews( ImmutableList.copyOf( connectedViews ) ); - - synchronized ( this ) { - tables.replace( id, table ); - assert table != null; - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsEntity( long namespaceId, String entityName ) { - LogicalNamespace schema = getNamespace( namespaceId ); - if ( !schema.caseSensitive ) { - entityName = entityName.toLowerCase(); - } - return tableNames.containsKey( new Object[]{ namespaceId, entityName } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsEntity( long tableId ) { - return tables.containsKey( tableId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void renameTable( long tableId, String name ) { - LogicalTable old = getTable( tableId ); - if ( !getNamespace( old.namespaceId ).caseSensitive ) { - name = name.toLowerCase(); - } - - LogicalTable table = old.withName( name ); - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.remove( new Object[]{ table.namespaceId, old.name } ); - tableNames.put( new Object[]{ table.namespaceId, name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteTable( long tableId ) { - LogicalTable table = getTable( tableId ); - List children = new ArrayList<>( Objects.requireNonNull( schemaChildren.get( table.namespaceId ) ) ); - children.remove( tableId ); - synchronized ( this ) { - schemaChildren.replace( table.namespaceId, ImmutableList.copyOf( children ) ); - - if ( table.partitionProperty.reliesOnPeriodicChecks ) { - removeTableFromPeriodicProcessing( tableId ); - } - - if ( table.partitionProperty.isPartitioned ) { - for ( Long partitionGroupId : Objects.requireNonNull( table.partitionProperty.partitionGroupIds ) ) { - deletePartitionGroup( table.id, table.namespaceId, partitionGroupId ); - } - } - - for ( Long columnId : Objects.requireNonNull( tableChildren.get( tableId ) ) ) { - deleteColumn( columnId ); - } - - // Remove all placement containers along with all placements - table.dataPlacements.forEach( adapterId -> removeDataPlacement( adapterId, tableId ) ); - - tableChildren.remove( tableId ); - tables.remove( tableId ); - tableNames.remove( new Object[]{ table.namespaceId, table.name } ); - flagTableForDeletion( table.id, false ); - // primary key was deleted and open table has to be closed - if ( openTable != null && openTable == tableId ) { - openTable = null; - } - - } - listeners.firePropertyChange( "table", table, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void setTableOwner( long tableId, int ownerId ) { - LogicalTable old = getTable( tableId ); - LogicalTable table = old.withOwnerId( ownerId ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void setPrimaryKey( long tableId, Long keyId ) { - LogicalTable old = getTable( tableId ); - - LogicalTable table = old.withPrimaryKey( keyId ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); - - if ( keyId == null ) { - openTable = tableId; - } else { - primaryKeys.put( keyId, new CatalogPrimaryKey( Objects.requireNonNull( keys.get( keyId ) ) ) ); - openTable = null; - } - } - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void addColumnPlacement( int adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - LogicalColumn column = Objects.requireNonNull( columns.get( columnId ) ); - CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); - - CatalogColumnPlacement columnPlacement = new CatalogColumnPlacement( - column.schemaId, - column.tableId, - columnId, - adapterId, - store.uniqueName, - placementType, - physicalSchemaName, - physicalColumnName, - physicalPositionBuilder.getAndIncrement() ); - - synchronized ( this ) { - columnPlacements.put( new Object[]{ adapterId, columnId }, columnPlacement ); - - // Adds this ColumnPlacement to existing DataPlacement container - addColumnsToDataPlacement( adapterId, column.tableId, List.of( columnId ) ); - } - listeners.firePropertyChange( "columnPlacement", null, columnPlacement ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateMaterializedViewRefreshTime( long materializedViewId ) { - CatalogMaterializedView old = (CatalogMaterializedView) getTable( materializedViewId ); - - MaterializedCriteria materializedCriteria = old.getMaterializedCriteria(); - materializedCriteria.setLastUpdate( new Timestamp( System.currentTimeMillis() ) ); - - CatalogMaterializedView view = old.withMaterializedCriteria( materializedCriteria ); - - synchronized ( this ) { - tables.replace( materializedViewId, view ); - tableNames.replace( - new Object[]{ view.namespaceId, view.name }, - view ); - } - listeners.firePropertyChange( "table", old, view ); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalCollection getCollection( long id ) { - if ( !collections.containsKey( id ) ) { - throw new UnknownTableIdRuntimeException( id ); - } - return Objects.requireNonNull( collections.get( id ) ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getCollections( long namespaceId, Pattern namePattern ) { - if ( schemas.containsKey( namespaceId ) ) { - LogicalNamespace schema = Objects.requireNonNull( schemas.get( namespaceId ) ); - if ( namePattern != null ) { - LogicalCollection collection = collectionNames.get( new Object[]{ namespaceId, namePattern.pattern } ); - if ( collection == null ) { - return new ArrayList<>(); - } - return Collections.singletonList( collection ); - } else { - return new ArrayList<>( collectionNames.prefixSubMap( new Object[]{ namespaceId } ).values() ); - } - } - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ) { - long collectionId = entityIdBuilder.getAndIncrement(); - if ( id != null ) { - collectionId = id; - } - - LogicalNamespace namespace = getNamespace( schemaId ); - LogicalCollection collection = new LogicalCollection( - Catalog.defaultDatabaseId, - schemaId, - collectionId, - name, - List.of(), - EntityType.ENTITY, - null ); - - synchronized ( this ) { - collections.put( collectionId, collection ); - collectionNames.put( new Object[]{ schemaId, name }, collection ); - } - listeners.firePropertyChange( "collection", null, entity ); - - return collectionId; - } - - - /** - * {@inheritDoc} - */ - @Override - public long addCollectionPlacement( long namespaceId, int adapterId, long collectionId, PlacementType placementType ) { - long id = partitionIdBuilder.getAndIncrement(); - CatalogCollectionPlacement placement = new CatalogCollectionPlacement( namespaceId, adapterId, collectionId, null, null, id ); - LogicalCollection old = collections.get( collectionId ); - if ( old == null ) { - throw new UnknownCollectionException( collectionId ); - } - - LogicalCollection collection = old.addPlacement( adapterId ); - - synchronized ( this ) { - collectionPlacements.put( new Object[]{ collectionId, adapterId }, placement ); - collections.replace( collectionId, collection ); - collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); - } - listeners.firePropertyChange( "collectionPlacement", null, placement ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogCollectionMapping getCollectionMapping( long id ) { - if ( !documentMappings.containsKey( id ) ) { - throw new UnknownTableIdRuntimeException( id ); - } - return Objects.requireNonNull( documentMappings.get( id ) ); - } - - - /** - * {@inheritDoc} - */ - @Override - public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { - long tableId; - if ( onlyPlacement ) { - try { - tableId = getTable( schemaId, name ).id; - } catch ( UnknownTableException e ) { - throw new RuntimeException( e ); - } - } else { - tableId = addTable( name, schemaId, Catalog.defaultUserId, EntityType.ENTITY, true ); - } - - stores.forEach( store -> addDataPlacement( store.getAdapterId(), tableId ) ); - - long idId; - long dataId; - if ( !onlyPlacement ) { - idId = addColumn( "_id_", tableId, 0, PolyType.VARCHAR, null, 255, null, null, null, false, Collation.getDefaultCollation() ); - dataId = addColumn( "_data_", tableId, 1, PolyType.JSON, null, null, null, null, null, false, Collation.getDefaultCollation() ); - } else { - try { - idId = getColumn( tableId, "_id_" ).id; - dataId = getColumn( tableId, "_data_" ).id; - } catch ( UnknownColumnException e ) { - throw new RuntimeException( "Error while adding a document placement." ); - } - } - - for ( DataStore s : stores ) { - addColumnPlacement( - s.getAdapterId(), - idId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - - addColumnPlacement( - s.getAdapterId(), - dataId, - PlacementType.AUTOMATIC, - null, - null, - null - ); - } - - addPrimaryKey( tableId, List.of( idId, dataId ) ); - - if ( !onlyPlacement ) { - CatalogCollectionMapping mapping = new CatalogCollectionMapping( tableId, idId, dataId ); - documentMappings.put( tableId, mapping ); - } - - return tableId; - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteCollection( long id ) { - LogicalCollection collection = getCollection( id ); - - synchronized ( this ) { - collections.remove( collection.namespaceId ); - collectionNames.remove( new Object[]{ collection.databaseId, collection.namespaceId, collection.name } ); - collection.placements.forEach( p -> collectionPlacements.remove( new Object[]{ collection.id, p } ) ); - } - listeners.firePropertyChange( "collection", null, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void dropCollectionPlacement( long id, int adapterId ) { - LogicalCollection oldCollection = Objects.requireNonNull( collections.get( id ) ); - LogicalCollection collection = oldCollection.removePlacement( adapterId ); - - synchronized ( this ) { - collectionPlacements.remove( new Object[]{ id, adapterId } ); - collections.replace( id, collection ); - collectionNames.replace( new Object[]{ collection.databaseId, collection.namespaceId, collection.name }, collection ); - } - listeners.firePropertyChange( "collectionPlacement", null, null ); - } - - - /** - * {@inheritDoc} - */ - public List getGraphPlacements( int adapterId ) { - return graphPlacements.entrySet().stream().filter( e -> e.getKey()[1].equals( adapterId ) ).map( Entry::getValue ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getCollectionPlacementsByAdapter( int adapterId ) { - return collectionPlacements.values().stream().filter( p -> p.adapter == adapterId ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogCollectionPlacement getCollectionPlacement( long collectionId, int adapterId ) { - if ( !collectionPlacements.containsKey( new Object[]{ collectionId, adapterId } ) ) { - throw new UnknownCollectionPlacementException( collectionId, adapterId ); - } - - return collectionPlacements.get( new Object[]{ collectionId, adapterId } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteColumnPlacement( int adapterId, long columnId, boolean columnOnly ) { - LogicalTable oldTable = getTable( getColumn( columnId ).tableId ); - - synchronized ( this ) { - if ( log.isDebugEnabled() ) { - log.debug( "Is flagged for deletion {}", isTableFlaggedForDeletion( oldTable.id ) ); - } - - if ( oldTable.partitionProperty.isPartitioned ) { - if ( !isTableFlaggedForDeletion( oldTable.id ) ) { - if ( !columnOnly ) { - if ( !validateDataPlacementsConstraints( oldTable.id, adapterId, Arrays.asList( columnId ), new ArrayList<>() ) ) { - throw new RuntimeException( "Partition Distribution failed" ); - } - } - } - } - - removeColumnsFromDataPlacement( adapterId, oldTable.id, Arrays.asList( columnId ) ); - columnPlacements.remove( new Object[]{ adapterId, columnId } ); - } - listeners.firePropertyChange( "columnPlacement", oldTable, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogColumnPlacement getColumnPlacement( int adapterId, long columnId ) { - try { - return Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsColumnPlacement( int adapterId, long columnId ) { - CatalogColumnPlacement placement = columnPlacements.get( new Object[]{ adapterId, columnId } ); - return placement != null; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsOnAdapter( int adapterId ) { - return new ArrayList<>( columnPlacements.prefixSubMap( new Object[]{ adapterId } ).values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsOnAdapterPerTable( int adapterId, long tableId ) { - final Comparator columnPlacementComparator = Comparator.comparingInt( p -> getColumn( p.columnId ).position ); - return getColumnPlacementsOnAdapter( adapterId ) - .stream() - .filter( p -> p.tableId == tableId ) - .sorted( columnPlacementComparator ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsByColumn( long columnId ) { - return columnPlacements.values() - .stream() - .filter( p -> p.columnId == columnId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { - LogicalTable table = getTable( tableId ); - Map> columnPlacementsByAdapter = new HashMap<>(); - - table.dataPlacements.forEach( adapterId -> columnPlacementsByAdapter.put( - adapterId, - ImmutableList.copyOf( - getDataPlacement( adapterId, tableId ).columnPlacementsOnAdapter ) - ) - ); - - return ImmutableMap.copyOf( columnPlacementsByAdapter ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPhysicalsOnAdapter( long tableId ) { - LogicalTable table = getTable( tableId ); - Map> partitionPlacementsByAdapter = new HashMap<>(); - - table.dataPlacements.forEach( adapterId -> partitionPlacementsByAdapter.put( - adapterId, - ImmutableList.copyOf( - getDataPlacement( adapterId, tableId ).getAllPartitionIds() ) - ) - ); - - return ImmutableMap.copyOf( partitionPlacementsByAdapter ); - } - - - /** - * {@inheritDoc} - */ - @Override - public long getPartitionGroupByPartition( long partitionId ) { - return getPartition( partitionId ).partitionGroupId; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacement( long columnId ) { - return columnPlacements.values() - .stream() - .filter( p -> p.columnId == columnId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsOnAdapterAndSchema( int adapterId, long schemaId ) { - try { - return getColumnPlacementsOnAdapter( adapterId ).stream().filter( p -> Objects.requireNonNull( columns.get( p.columnId ) ).schemaId == schemaId ).collect( Collectors.toList() ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getNamespace( schemaId ); - return new ArrayList<>(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateColumnPlacementType( int adapterId, long columnId, PlacementType placementType ) { - try { - CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - CatalogColumnPlacement placement = new CatalogColumnPlacement( - old.namespaceId, - old.tableId, - old.columnId, - old.adapterId, - old.adapterUniqueName, - placementType, - old.physicalSchemaName, - old.physicalColumnName, - old.physicalPosition ); - synchronized ( this ) { - columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); - } - listeners.firePropertyChange( "columnPlacement", old, placement ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId, long position ) { - try { - CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - CatalogColumnPlacement placement = new CatalogColumnPlacement( - old.namespaceId, - old.tableId, - old.columnId, - old.adapterId, - old.adapterUniqueName, - old.placementType, - old.physicalSchemaName, - old.physicalColumnName, - position ); - synchronized ( this ) { - columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); - } - listeners.firePropertyChange( "columnPlacement", old, placement ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateColumnPlacementPhysicalPosition( int adapterId, long columnId ) { - try { - CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - CatalogColumnPlacement placement = new CatalogColumnPlacement( - old.namespaceId, - old.tableId, - old.columnId, - old.adapterId, - old.adapterUniqueName, - old.placementType, - old.physicalSchemaName, - old.physicalColumnName, - physicalPositionBuilder.getAndIncrement() ); - synchronized ( this ) { - columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); - } - listeners.firePropertyChange( "columnPlacement", old, placement ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateColumnPlacementPhysicalNames( int adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ) { - try { - CatalogColumnPlacement old = Objects.requireNonNull( columnPlacements.get( new Object[]{ adapterId, columnId } ) ); - CatalogColumnPlacement placement = new CatalogColumnPlacement( - old.namespaceId, - old.tableId, - old.columnId, - old.adapterId, - old.adapterUniqueName, - old.placementType, - physicalSchemaName, - physicalColumnName, - updatePhysicalColumnPosition ? physicalPositionBuilder.getAndIncrement() : old.physicalPosition ); - synchronized ( this ) { - columnPlacements.replace( new Object[]{ adapterId, columnId }, placement ); - } - listeners.firePropertyChange( "columnPlacement", old, placement ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getColumn( columnId ); - throw new UnknownColumnPlacementRuntimeException( adapterId, columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumns( long tableId ) { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - return columnNames.prefixSubMap( new Object[]{ table.namespaceId, table.id } ).values().stream().sorted( columnComparator ).collect( Collectors.toList() ); - } catch ( NullPointerException e ) { - return new ArrayList<>(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { - List catalogEntities = getTables( schemaNamePattern, tableNamePattern ); - - if ( catalogEntities.size() > 0 ) { - Stream catalogColumns = catalogEntities.stream().filter( t -> tableChildren.containsKey( t.id ) ).flatMap( t -> Objects.requireNonNull( tableChildren.get( t.id ) ).stream() ).map( columns::get ); - - if ( columnNamePattern != null ) { - catalogColumns = catalogColumns.filter( c -> c.name.matches( columnNamePattern.toRegex() ) ); - } - return catalogColumns.collect( Collectors.toList() ); - } - - return new ArrayList<>(); - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalColumn getColumn( long columnId ) { - try { - return Objects.requireNonNull( columns.get( columnId ) ); - } catch ( NullPointerException e ) { - throw new UnknownColumnIdRuntimeException( columnId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { - try { - LogicalTable table = getTable( tableId ); - if ( !getNamespace( table.namespaceId ).caseSensitive ) { - columnName = columnName.toLowerCase(); - } - return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownColumnException( tableId, columnName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { - try { - LogicalTable table = getTable( schemaName, tableName ); - return Objects.requireNonNull( columnNames.get( new Object[]{ table.namespaceId, table.id, columnName } ) ); - } catch ( NullPointerException e ) { - throw new UnknownColumnException( schemaName, tableName, columnName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { - LogicalTable table = getTable( tableId ); - - if ( !getNamespace( table.namespaceId ).caseSensitive ) { - name = name.toLowerCase(); - } - - if ( type.getFamily() == PolyTypeFamily.CHARACTER && collation == null ) { - throw new RuntimeException( "Collation is not allowed to be null for char types." ); - } - if ( scale != null && length != null ) { - if ( scale > length ) { - throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); - } - } - - long id = columnIdBuilder.getAndIncrement(); - LogicalColumn column = new LogicalColumn( - id, - name, - tableId, - table.namespaceId, - position, - type, - collectionsType, - length, - scale, - dimension, - cardinality, - nullable, - collation, - null ); - - synchronized ( this ) { - columns.put( id, column ); - columnNames.put( new Object[]{ table.namespaceId, table.id, name }, column ); - List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( tableId ) ) ); - children.add( id ); - tableChildren.replace( tableId, ImmutableList.copyOf( children ) ); - - List columnIds = new ArrayList<>( table.fieldIds ); - columnIds.add( id ); - - LogicalTable updatedTable; - - updatedTable = table.withConnectedViews( ImmutableList.copyOf( columnIds ) ); - tables.replace( tableId, updatedTable ); - tableNames.replace( new Object[]{ updatedTable.namespaceId, updatedTable.name }, updatedTable ); - } - listeners.firePropertyChange( "column", null, column ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void renameColumn( long columnId, String name ) { - LogicalColumn old = getColumn( columnId ); - - if ( !getNamespace( old.schemaId ).caseSensitive ) { - name = name.toLowerCase(); - } - - LogicalColumn column = new LogicalColumn( old.id, name, old.tableId, old.schemaId, old.position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.remove( new Object[]{ column.schemaId, column.tableId, old.name } ); - columnNames.put( new Object[]{ column.schemaId, column.tableId, name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void setColumnPosition( long columnId, int position ) { - LogicalColumn old = getColumn( columnId ); - LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, position, old.type, old.collectionsType, old.length, old.scale, old.dimension, old.cardinality, old.nullable, old.collation, old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ column.schemaId, column.tableId, column.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality ) throws GenericCatalogException { - try { - LogicalColumn old = Objects.requireNonNull( columns.get( columnId ) ); - - if ( scale != null && scale > length ) { - throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); - } - - // Check that the column is not part of a key - for ( CatalogKey key : getKeys() ) { - if ( key.columnIds.contains( columnId ) ) { - String name = "UNKNOWN"; - if ( key instanceof CatalogPrimaryKey ) { - name = "PRIMARY KEY"; - } else if ( key instanceof CatalogForeignKey ) { - name = ((CatalogForeignKey) key).name; - } else { - List constraints = getConstraints( key ); - if ( constraints.size() > 0 ) { - name = constraints.get( 0 ).name; - } - } - throw new GenericCatalogException( "The column \"" + old.name + "\" is part of the key \"" + name + "\". Unable to change the type of a column that is part of a key." ); - } - } - - Collation collation = type.getFamily() == PolyTypeFamily.CHARACTER - ? Collation.getById( RuntimeConfig.DEFAULT_COLLATION.getInteger() ) - : null; - LogicalColumn column = new LogicalColumn( old.id, old.name, old.tableId, old.schemaId, old.position, type, collectionsType, length, scale, dimension, cardinality, old.nullable, collation, old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { - try { - LogicalColumn old = Objects.requireNonNull( columns.get( columnId ) ); - if ( nullable ) { - // Check if the column is part of a primary key (pk's are not allowed to contain null values) - LogicalTable table = Objects.requireNonNull( tables.get( old.tableId ) ); - if ( table.primaryKey != null ) { - CatalogKey catalogKey = getPrimaryKey( table.primaryKey ); - if ( catalogKey.columnIds.contains( columnId ) ) { - throw new GenericCatalogException( "Unable to allow null values in a column that is part of the primary key." ); - } - } - } else { - // TODO: Check that the column does not contain any null values - getColumnPlacement( columnId ); - } - LogicalColumn column = new LogicalColumn( - old.id, - old.name, - old.tableId, - old.schemaId, - old.position, - old.type, - old.collectionsType, - old.length, - old.scale, - old.dimension, - old.cardinality, - nullable, - old.collation, - old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void setCollation( long columnId, Collation collation ) { - LogicalColumn old = getColumn( columnId ); - - if ( old.type.getFamily() != PolyTypeFamily.CHARACTER ) { - throw new RuntimeException( "Illegal attempt to set collation for a non-char column!" ); - } - LogicalColumn column = new LogicalColumn( - old.id, - old.name, - old.tableId, - old.schemaId, - old.position, - old.type, - old.collectionsType, - old.length, - old.scale, - old.dimension, - old.cardinality, - old.nullable, - collation, - old.defaultValue ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsColumn( long tableId, String columnName ) { - LogicalTable table = getTable( tableId ); - return columnNames.containsKey( new Object[]{ table.namespaceId, tableId, columnName } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteColumn( long columnId ) { - //TODO also delete keys with that column? - LogicalColumn column = getColumn( columnId ); - - List children = new ArrayList<>( Objects.requireNonNull( tableChildren.get( column.tableId ) ) ); - children.remove( columnId ); - - LogicalTable old = getTable( column.tableId ); - List columnIds = new ArrayList<>( old.fieldIds ); - columnIds.remove( columnId ); - - LogicalTable table = old.withFieldIds( ImmutableList.copyOf( columnIds ) ); - - synchronized ( this ) { - columnNames.remove( new Object[]{ column.schemaId, column.tableId, column.name } ); - tableChildren.replace( column.tableId, ImmutableList.copyOf( children ) ); - - deleteDefaultValue( columnId ); - for ( CatalogColumnPlacement p : getColumnPlacement( columnId ) ) { - deleteColumnPlacement( p.adapterId, p.columnId, false ); - } - tables.replace( column.tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, table.name }, table ); - - columns.remove( columnId ); - } - listeners.firePropertyChange( "column", column, null ); - } - - - /** - * {@inheritDoc} - * - * TODO: String is only a temporary solution - */ - @Override - public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { - LogicalColumn old = getColumn( columnId ); - LogicalColumn column = new LogicalColumn( - old.id, - old.name, - old.tableId, - old.schemaId, - old.position, - old.type, - old.collectionsType, - old.length, - old.scale, - old.dimension, - old.cardinality, - old.nullable, - old.collation, - new CatalogDefaultValue( columnId, type, defaultValue, "defaultValue" ) ); - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ column.schemaId, column.tableId, column.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteDefaultValue( long columnId ) { - LogicalColumn old = getColumn( columnId ); - LogicalColumn column = new LogicalColumn( - old.id, - old.name, - old.tableId, - old.schemaId, - old.position, - old.type, - old.collectionsType, - old.length, - old.scale, - old.dimension, - old.cardinality, - old.nullable, - old.collation, - null ); - if ( old.defaultValue != null ) { - synchronized ( this ) { - columns.replace( columnId, column ); - columnNames.replace( new Object[]{ old.schemaId, old.tableId, old.name }, column ); - } - listeners.firePropertyChange( "column", old, column ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogPrimaryKey getPrimaryKey( long key ) { - try { - return Objects.requireNonNull( primaryKeys.get( key ) ); - } catch ( NullPointerException e ) { - throw new UnknownKeyIdRuntimeException( key ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isPrimaryKey( long key ) { - try { - Long primary = getTable( Objects.requireNonNull( keys.get( key ) ).tableId ).primaryKey; - return primary != null && primary == key; - } catch ( NullPointerException e ) { - throw new UnknownKeyIdRuntimeException( key ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { - try { - // Check if the columns are set 'not null' - List nullableColumns = columnIds.stream().map( columns::get ).filter( Objects::nonNull ).filter( c -> c.nullable ).collect( Collectors.toList() ); - for ( LogicalColumn col : nullableColumns ) { - throw new GenericCatalogException( "Primary key is not allowed to contain null values but the column '" + col.name + "' is declared nullable." ); - } - - // TODO: Check if the current values are unique - - // Check if there is already a primary key defined for this table and if so, delete it. - LogicalTable table = getTable( tableId ); - - if ( table.primaryKey != null ) { - // CatalogCombinedKey combinedKey = getCombinedKey( table.primaryKey ); - if ( getKeyUniqueCount( table.primaryKey ) == 1 && isForeignKey( tableId ) ) { - // This primary key is the only constraint for the uniqueness of this key. - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key, first drop the foreign keys or create a unique constraint." ); - } - synchronized ( this ) { - setPrimaryKey( tableId, null ); - deleteKeyIfNoLongerUsed( table.primaryKey ); - } - } - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); - setPrimaryKey( tableId, keyId ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - private int getKeyUniqueCount( long keyId ) { - CatalogKey key = keys.get( keyId ); - int count = 0; - if ( isPrimaryKey( keyId ) ) { - count++; - } - - for ( CatalogConstraint constraint : getConstraints( key ) ) { - if ( constraint.type == ConstraintType.UNIQUE ) { - count++; - } - } - - for ( CatalogIndex index : getIndexes( key ) ) { - if ( index.unique ) { - count++; - } - } - - return count; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getForeignKeys( long tableId ) { - return foreignKeys.values().stream().filter( f -> f.tableId == tableId ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getExportedKeys( long tableId ) { - return foreignKeys.values().stream().filter( k -> k.referencedKeyTableId == tableId ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getConstraints( long tableId ) { - List keysOfTable = keys.values().stream().filter( k -> k.tableId == tableId ).map( k -> k.id ).collect( Collectors.toList() ); - return constraints.values().stream().filter( c -> keysOfTable.contains( c.keyId ) ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { - try { - return constraints.values().stream() - .filter( c -> c.key.tableId == tableId && c.name.equals( constraintName ) ) - .findFirst() - .orElseThrow( NullPointerException::new ); - } catch ( NullPointerException e ) { - throw new UnknownConstraintException( tableId, constraintName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { - try { - return foreignKeys.values().stream() - .filter( f -> f.tableId == tableId && f.name.equals( foreignKeyName ) ) - .findFirst() - .orElseThrow( NullPointerException::new ); - } catch ( NullPointerException e ) { - throw new UnknownForeignKeyException( tableId, foreignKeyName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); - - for ( CatalogKey refKey : childKeys ) { - if ( refKey.columnIds.size() == referencesIds.size() && refKey.columnIds.containsAll( referencesIds ) && referencesIds.containsAll( refKey.columnIds ) ) { - - // CatalogKey combinedKey = getCombinedKey( refKey.id ); - - int i = 0; - for ( long referencedColumnId : refKey.columnIds ) { - LogicalColumn referencingColumn = getColumn( columnIds.get( i++ ) ); - LogicalColumn referencedColumn = getColumn( referencedColumnId ); - if ( referencedColumn.type != referencingColumn.type ) { - throw new GenericCatalogException( "The data type of the referenced columns does not match the data type of the referencing column: " + referencingColumn.type.name() + " != " + referencedColumn.type ); - } - } - // TODO same keys for key and foreign key - if ( getKeyUniqueCount( refKey.id ) > 0 ) { - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); - CatalogForeignKey key = new CatalogForeignKey( - keyId, - constraintName, - tableId, - table.namespaceId, - refKey.id, - refKey.tableId, - refKey.schemaId, - columnIds, - referencesIds, - onUpdate, - onDelete ); - synchronized ( this ) { - foreignKeys.put( keyId, key ); - } - listeners.firePropertyChange( "foreignKey", null, key ); - return; - } - } - } - throw new GenericCatalogException( "There is no key over the referenced columns." ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { - try { - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); - // Check if there is already a unique constraint - List catalogConstraints = constraints.values().stream() - .filter( c -> c.keyId == keyId && c.type == ConstraintType.UNIQUE ) - .collect( Collectors.toList() ); - if ( catalogConstraints.size() > 0 ) { - throw new GenericCatalogException( "There is already a unique constraint!" ); - } - long id = constraintIdBuilder.getAndIncrement(); - synchronized ( this ) { - constraints.put( id, new CatalogConstraint( id, keyId, ConstraintType.UNIQUE, constraintName, Objects.requireNonNull( keys.get( keyId ) ) ) ); - } - listeners.firePropertyChange( "constraint", null, keyId ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getIndexes( long tableId, boolean onlyUnique ) { - if ( !onlyUnique ) { - return indexes.values().stream().filter( i -> i.key.tableId == tableId ).collect( Collectors.toList() ); - } else { - return indexes.values().stream().filter( i -> i.key.tableId == tableId && i.unique ).collect( Collectors.toList() ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { - try { - return indexes.values().stream() - .filter( i -> i.key.tableId == tableId && i.name.equals( indexName ) ) - .findFirst() - .orElseThrow( NullPointerException::new ); - } catch ( NullPointerException e ) { - throw new UnknownIndexException( tableId, indexName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsIndex( long tableId, String indexName ) { - try { - LogicalTable table = getTable( tableId ); - getIndex( table.id, indexName ); - return true; - } catch ( UnknownIndexException e ) { - return false; - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogIndex getIndex( long indexId ) { - try { - return Objects.requireNonNull( indexes.get( indexId ) ); - } catch ( NullPointerException e ) { - throw new UnknownIndexIdRuntimeException( indexId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getIndexes() { - return new ArrayList<>( indexes.values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, int location, IndexType type, String indexName ) throws GenericCatalogException { - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); - if ( unique ) { - // TODO: Check if the current values are unique - } - long id = indexIdBuilder.getAndIncrement(); - synchronized ( this ) { - indexes.put( id, new CatalogIndex( - id, - indexName, - unique, - method, - methodDisplayName, - type, - location, - keyId, - Objects.requireNonNull( keys.get( keyId ) ), - null ) ); - } - listeners.firePropertyChange( "index", null, keyId ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void setIndexPhysicalName( long indexId, String physicalName ) { - try { - CatalogIndex oldEntry = Objects.requireNonNull( indexes.get( indexId ) ); - CatalogIndex newEntry = new CatalogIndex( - oldEntry.id, - oldEntry.name, - oldEntry.unique, - oldEntry.method, - oldEntry.methodDisplayName, - oldEntry.type, - oldEntry.location, - oldEntry.keyId, - oldEntry.key, - physicalName ); - synchronized ( this ) { - indexes.replace( indexId, newEntry ); - } - listeners.firePropertyChange( "index", oldEntry, newEntry ); - } catch ( NullPointerException e ) { - throw new UnknownIndexIdRuntimeException( indexId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteIndex( long indexId ) { - CatalogIndex index = getIndex( indexId ); - if ( index.unique ) { - if ( getKeyUniqueCount( index.keyId ) == 1 && isForeignKey( index.keyId ) ) { - // This unique index is the only constraint for the uniqueness of this key. - //throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To delete this index, first add a unique constraint." ); - } - } - synchronized ( this ) { - indexes.remove( indexId ); - } - listeners.firePropertyChange( "index", index.key, null ); - deleteKeyIfNoLongerUsed( index.keyId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deletePrimaryKey( long tableId ) throws GenericCatalogException { - LogicalTable table = getTable( tableId ); - - // TODO: Check if the currently stored values are unique - if ( table.primaryKey != null ) { - // Check if this primary key is required to maintain to uniqueness - // CatalogCombinedKey key = getCombinedKey( table.primaryKey ); - if ( isForeignKey( table.primaryKey ) ) { - if ( getKeyUniqueCount( table.primaryKey ) < 2 ) { - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key either drop the foreign key or create a unique constraint." ); - } - } - - setPrimaryKey( tableId, null ); - deleteKeyIfNoLongerUsed( table.primaryKey ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { - try { - CatalogForeignKey catalogForeignKey = Objects.requireNonNull( foreignKeys.get( foreignKeyId ) ); - synchronized ( this ) { - foreignKeys.remove( catalogForeignKey.id ); - deleteKeyIfNoLongerUsed( catalogForeignKey.id ); - } - listeners.firePropertyChange( "foreignKey", foreignKeyId, null ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteConstraint( long constraintId ) throws GenericCatalogException { - try { - CatalogConstraint catalogConstraint = Objects.requireNonNull( constraints.get( constraintId ) ); - - //CatalogCombinedKey key = getCombinedKey( catalogConstraint.keyId ); - if ( catalogConstraint.type == ConstraintType.UNIQUE && isForeignKey( catalogConstraint.keyId ) ) { - if ( getKeyUniqueCount( catalogConstraint.keyId ) < 2 ) { - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. Unable to drop unique constraint." ); - } - } - synchronized ( this ) { - constraints.remove( catalogConstraint.id ); - } - listeners.firePropertyChange( "constraint", catalogConstraint, null ); - deleteKeyIfNoLongerUsed( catalogConstraint.keyId ); - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogUser getUser( String name ) throws UnknownUserException { - try { - return Objects.requireNonNull( userNames.get( name ) ); - } catch ( NullPointerException e ) { - throw new UnknownUserException( name ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogUser getUser( long id ) { - try { - return Objects.requireNonNull( users.get( id ) ); - } catch ( NullPointerException e ) { - throw new UnknownUserIdRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAdapters() { - return new ArrayList<>( adapters.values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { - uniqueName = uniqueName.toLowerCase(); - try { - return Objects.requireNonNull( adapterNames.get( uniqueName ) ); - } catch ( NullPointerException e ) { - throw new UnknownAdapterException( uniqueName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogAdapter getAdapter( long id ) { - try { - return Objects.requireNonNull( adapters.get( id ) ); - } catch ( NullPointerException e ) { - throw new UnknownAdapterIdRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsAdapter( long id ) { - return adapters.containsKey( id ); - } - - - /** - * {@inheritDoc} - */ - @Override - public long addAdapter( String uniqueName, String adapterName, AdapterType type, Map settings ) { - uniqueName = uniqueName.toLowerCase(); - - int id = adapterIdBuilder.getAndIncrement(); - Map temp = new HashMap<>( settings ); - CatalogAdapter adapter = new CatalogAdapter( id, uniqueName, adapterName, type, temp ); - synchronized ( this ) { - adapters.put( id, adapter ); - adapterNames.put( uniqueName, adapter ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "An error occurred while creating the adapter." ); - } - listeners.firePropertyChange( "adapter", null, adapter ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateAdapterSettings( long adapterId, Map newSettings ) { - CatalogAdapter old = getAdapter( adapterId ); - Map temp = new HashMap<>(); - newSettings.forEach( temp::put ); - CatalogAdapter adapter = new CatalogAdapter( old.id, old.uniqueName, old.adapterName, old.type, temp ); - synchronized ( this ) { - adapters.put( adapter.id, adapter ); - adapterNames.put( adapter.uniqueName, adapter ); - } - listeners.firePropertyChange( "adapter", old, adapter ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteAdapter( long id ) { - try { - CatalogAdapter adapter = Objects.requireNonNull( adapters.get( id ) ); - synchronized ( this ) { - adapters.remove( id ); - adapterNames.remove( adapter.uniqueName ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "An error occurred while deleting the adapter." ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "Could not delete adapter" ); - } - listeners.firePropertyChange( "adapter", adapter, null ); - } catch ( NullPointerException e ) { - throw new UnknownAdapterIdRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getQueryInterfaces() { - return new ArrayList<>( queryInterfaces.values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { - uniqueName = uniqueName.toLowerCase(); - try { - return Objects.requireNonNull( queryInterfaceNames.get( uniqueName ) ); - } catch ( NullPointerException e ) { - throw new UnknownQueryInterfaceException( uniqueName ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogQueryInterface getQueryInterface( long id ) { - try { - return Objects.requireNonNull( queryInterfaces.get( id ) ); - } catch ( NullPointerException e ) { - throw new UnknownQueryInterfaceRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addQueryInterface( String uniqueName, String clazz, Map settings ) { - uniqueName = uniqueName.toLowerCase(); - - int id = queryInterfaceIdBuilder.getAndIncrement(); - Map temp = new HashMap<>( settings ); - CatalogQueryInterface queryInterface = new CatalogQueryInterface( id, uniqueName, clazz, temp ); - synchronized ( this ) { - queryInterfaces.put( id, queryInterface ); - queryInterfaceNames.put( uniqueName, queryInterface ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "An error occurred while creating the query interface." ); - } - listeners.firePropertyChange( "queryInterface", null, queryInterface ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteQueryInterface( long id ) { - try { - CatalogQueryInterface queryInterface = Objects.requireNonNull( queryInterfaces.get( id ) ); - synchronized ( this ) { - queryInterfaces.remove( id ); - queryInterfaceNames.remove( queryInterface.name ); - } - try { - commit(); - } catch ( NoTablePrimaryKeyException e ) { - throw new RuntimeException( "An error occurred while deleting the query interface." ); - } - listeners.firePropertyChange( "queryInterface", queryInterface, null ); - } catch ( NullPointerException e ) { - throw new UnknownQueryInterfaceRuntimeException( id ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { - try { - long id = partitionGroupIdBuilder.getAndIncrement(); - if ( log.isDebugEnabled() ) { - log.debug( "Creating partitionGroup of type '{}' with id '{}'", partitionType, id ); - } - LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); - - List partitionIds = new ArrayList<>(); - for ( int i = 0; i < numberOfInternalPartitions; i++ ) { - long partId = addPartition( tableId, schemaId, id, effectivePartitionGroupQualifier, isUnbound ); - partitionIds.add( partId ); - } - - CatalogPartitionGroup partitionGroup = new CatalogPartitionGroup( - id, - partitionGroupName, - tableId, - schemaId, - 0, - null, - ImmutableList.copyOf( partitionIds ), - isUnbound ); - - synchronized ( this ) { - partitionGroups.put( id, partitionGroup ); - } - //listeners.firePropertyChange( "partitionGroups", null, partitionGroup ); - return id; - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ) throws UnknownPartitionGroupIdRuntimeException { - if ( log.isDebugEnabled() ) { - log.debug( "Deleting partitionGroup with id '{}' on table with id '{}'", partitionGroupId, tableId ); - } - // Check whether there this partition id exists - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - synchronized ( this ) { - for ( long partitionId : partitionGroup.partitionIds ) { - deletePartition( tableId, schemaId, partitionId ); - } - partitionGroups.remove( partitionGroupId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updatePartitionGroup( long partitionGroupId, List partitionIds ) throws UnknownPartitionGroupIdRuntimeException { - - // Check whether there this partition id exists - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - - CatalogPartitionGroup updatedCatalogPartitionGroup = new CatalogPartitionGroup( - partitionGroup.id, - partitionGroup.partitionGroupName, - partitionGroup.tableId, - partitionGroup.schemaId, - partitionGroup.partitionKey, - partitionGroup.partitionQualifiers, - ImmutableList.copyOf( partitionIds ), - partitionGroup.isUnbound ); - - synchronized ( this ) { - partitionGroups.replace( partitionGroupId, updatedCatalogPartitionGroup ); - listeners.firePropertyChange( "partitionGroup", partitionGroup, updatedCatalogPartitionGroup ); - } - - } - - - /** - * {@inheritDoc} - */ - @Override - public void addPartitionToGroup( long partitionGroupId, Long partitionId ) { - // Check whether there this partition id exists - getPartition( partitionId ); - - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); - - if ( !newPartitionIds.contains( partitionId ) ) { - newPartitionIds.add( partitionId ); - updatePartitionGroup( partitionGroupId, newPartitionIds ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void removePartitionFromGroup( long partitionGroupId, Long partitionId ) { - // Check whether there this partition id exists - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); - - if ( newPartitionIds.contains( partitionId ) ) { - newPartitionIds.remove( partitionId ); - updatePartitionGroup( partitionGroupId, newPartitionIds ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updatePartition( long partitionId, Long partitionGroupId ) { - // Check whether there this partition id exists - CatalogPartitionGroup partitionGroup = getPartitionGroup( partitionGroupId ); - List newPartitionIds = new ArrayList<>( partitionGroup.partitionIds ); - - CatalogPartition oldPartition = getPartition( partitionId ); - - if ( !newPartitionIds.contains( partitionId ) ) { - newPartitionIds.add( partitionId ); - - addPartitionToGroup( partitionGroupId, partitionId ); - removePartitionFromGroup( oldPartition.partitionGroupId, partitionId ); - - CatalogPartition updatedPartition = new CatalogPartition( - oldPartition.id, - oldPartition.tableId, - oldPartition.schemaId, - oldPartition.partitionQualifiers, - oldPartition.isUnbound, - partitionGroupId - ); - - synchronized ( this ) { - partitions.put( updatedPartition.id, updatedPartition ); - } - listeners.firePropertyChange( "partition", oldPartition, updatedPartition ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) throws UnknownPartitionGroupIdRuntimeException { - try { - return Objects.requireNonNull( partitionGroups.get( partitionGroupId ) ); - } catch ( NullPointerException e ) { - throw new UnknownPartitionGroupIdRuntimeException( partitionGroupId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionQualifier, boolean isUnbound ) throws GenericCatalogException { - try { - long id = partitionIdBuilder.getAndIncrement(); - if ( log.isDebugEnabled() ) { - log.debug( "Creating partition with id '{}'", id ); - } - LogicalNamespace schema = Objects.requireNonNull( schemas.get( schemaId ) ); - - CatalogPartition partition = new CatalogPartition( - id, - tableId, - schemaId, - effectivePartitionQualifier, - isUnbound, - partitionGroupId ); - - synchronized ( this ) { - partitions.put( id, partition ); - } - listeners.firePropertyChange( "partition", null, partition ); - return id; - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deletePartition( long tableId, long schemaId, long partitionId ) { - if ( log.isDebugEnabled() ) { - log.debug( "Deleting partition with id '{}' on table with id '{}'", partitionId, tableId ); - } - // Check whether there this partition id exists - getPartition( partitionId ); - synchronized ( this ) { - for ( CatalogPartitionPlacement partitionPlacement : getPartitionPlacements( partitionId ) ) { - deletePartitionPlacement( partitionPlacement.adapterId, partitionId ); - } - partitions.remove( partitionId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogPartition getPartition( long partitionId ) { - try { - return Objects.requireNonNull( partitions.get( partitionId ) ); - } catch ( NullPointerException e ) { - throw new UnknownPartitionGroupIdRuntimeException( partitionId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionsByTable( long tableId ) { - return partitions.values() - .stream() - .filter( p -> p.tableId == tableId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { - LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - - LogicalTable table = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.ownerId, - old.entityType, - old.primaryKey, - old.dataPlacements, - old.modifiable, - partitionProperty, - old.connectedViews ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - - if ( table.partitionProperty.reliesOnPeriodicChecks ) { - addTableToPeriodicProcessing( tableId ); - } - } - - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void mergeTable( long tableId ) { - LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - - if ( old.partitionProperty.reliesOnPeriodicChecks ) { - removeTableFromPeriodicProcessing( tableId ); - } - - //Technically every Table is partitioned. But tables classified as UNPARTITIONED only consist of one PartitionGroup and one large partition - List partitionGroupIds = new ArrayList<>(); - try { - partitionGroupIds.add( addPartitionGroup( tableId, "full", old.namespaceId, PartitionType.NONE, 1, new ArrayList<>(), true ) ); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( e ); - } - - // Get All(only one) PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds - CatalogPartitionGroup defaultUnpartitionedGroup = getPartitionGroup( partitionGroupIds.get( 0 ) ); - PartitionProperty partitionProperty = PartitionProperty.builder() - .partitionType( PartitionType.NONE ) - .isPartitioned( false ) - .partitionGroupIds( ImmutableList.copyOf( partitionGroupIds ) ) - .partitionIds( ImmutableList.copyOf( defaultUnpartitionedGroup.partitionIds ) ) - .reliesOnPeriodicChecks( false ) - .build(); - - LogicalTable table = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.ownerId, - old.entityType, - old.primaryKey, - old.dataPlacements, - old.modifiable, - partitionProperty, - old.connectedViews ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { - LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - - LogicalTable table = new LogicalTable( - old.id, - old.name, - old.fieldIds, - old.namespaceId, - old.ownerId, - old.entityType, - old.primaryKey, - old.dataPlacements, - old.modifiable, - partitionProperty, - old.connectedViews ); - - synchronized ( this ) { - tables.replace( tableId, table ); - tableNames.replace( new Object[]{ table.namespaceId, old.name }, table ); - } - - listeners.firePropertyChange( "table", old, table ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroups( long tableId ) { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - List partitionGroups = new ArrayList<>(); - if ( table.partitionProperty.partitionGroupIds == null ) { - return new ArrayList<>(); - } - for ( long partId : table.partitionProperty.partitionGroupIds ) { - partitionGroups.add( getPartitionGroup( partId ) ); - } - return partitionGroups; - } catch ( UnknownPartitionGroupIdRuntimeException e ) { - return new ArrayList<>(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { - List catalogEntities = getTables( schemaNamePattern, tableNamePattern ); - Stream partitionGroupStream = Stream.of(); - for ( LogicalTable catalogTable : catalogEntities ) { - partitionGroupStream = Stream.concat( partitionGroupStream, getPartitionGroups( catalogTable.id ).stream() ); - } - return partitionGroupStream.collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitions( long partitionGroupId ) { - try { - CatalogPartitionGroup partitionGroup = Objects.requireNonNull( partitionGroups.get( partitionGroupId ) ); - List partitions = new ArrayList<>(); - if ( partitionGroup.partitionIds == null ) { - return new ArrayList<>(); - } - for ( long partId : partitionGroup.partitionIds ) { - partitions.add( getPartition( partId ) ); - } - return partitions; - } catch ( UnknownPartitionGroupIdRuntimeException e ) { - return new ArrayList<>(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { - List catalogPartitionGroups = getPartitionGroups( schemaNamePattern, tableNamePattern ); - Stream partitionStream = Stream.of(); - for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { - partitionStream = Stream.concat( partitionStream, getPartitions( catalogPartitionGroup.id ).stream() ); - } - return partitionStream.collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroupNames( long tableId ) { - List partitionGroupNames = new ArrayList<>(); - for ( CatalogPartitionGroup catalogPartitionGroup : getPartitionGroups( tableId ) ) { - partitionGroupNames.add( catalogPartitionGroup.partitionGroupName ); - } - return partitionGroupNames; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { - List catalogColumnPlacements = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : getColumnPlacement( columnId ) ) { - if ( getPartitionGroupsOnDataPlacement( ccp.adapterId, tableId ).contains( partitionGroupId ) ) { - catalogColumnPlacements.add( ccp ); - } - } - - return catalogColumnPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { - Set catalogAdapters = new HashSet<>(); - - for ( CatalogDataPlacement dataPlacement : getDataPlacements( tableId ) ) { - for ( long partitionId : dataPlacement.getAllPartitionIds() ) { - long partitionGroup = getPartitionGroupByPartition( partitionId ); - if ( partitionGroup == partitionGroupId ) { - catalogAdapters.add( getAdapter( dataPlacement.adapterId ) ); - } - } - } - - return new ArrayList<>( catalogAdapters ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroupsOnDataPlacement( int adapterId, long tableId ) { - Set partitionGroups = new HashSet<>(); - CatalogDataPlacement dataPlacement = getDataPlacement( adapterId, tableId ); - - dataPlacement.getAllPartitionIds().forEach( - partitionId -> partitionGroups.add( getPartitionGroupByPartition( partitionId ) - ) - ); - - return new ArrayList<>( partitionGroups ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionsOnDataPlacement( int adapterId, long tableId ) { - return getDataPlacement( adapterId, tableId ).getAllPartitionIds(); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ) { - List partitionGroups = getPartitionGroupsOnDataPlacement( adapterId, tableId ); - if ( partitionGroups == null ) { - return new ArrayList<>(); - } - - List partitionGroupIndexList = new ArrayList<>(); - LogicalTable catalogTable = getTable( tableId ); - for ( int index = 0; index < catalogTable.partitionProperty.partitionGroupIds.size(); index++ ) { - if ( partitionGroups.contains( catalogTable.partitionProperty.partitionGroupIds.get( index ) ) ) { - partitionGroupIndexList.add( (long) index ); - } - } - return partitionGroupIndexList; - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogDataPlacement getDataPlacement( int adapterId, long tableId ) { - return dataPlacements.get( new Object[]{ adapterId, tableId } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getDataPlacements( long tableId ) { - List catalogDataPlacements = new ArrayList<>(); - - getTable( tableId ).dataPlacements.forEach( adapterId -> catalogDataPlacements.add( getDataPlacement( adapterId, tableId ) ) ); - - return catalogDataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAllFullDataPlacements( long tableId ) { - List dataPlacements = new ArrayList<>(); - List allDataPlacements = getDataPlacements( tableId ); - - for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { - if ( dataPlacement.hasFullPlacement() ) { - dataPlacements.add( dataPlacement ); - } - } - return dataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAllColumnFullDataPlacements( long tableId ) { - List dataPlacements = new ArrayList<>(); - List allDataPlacements = getDataPlacements( tableId ); - - for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { - if ( dataPlacement.hasColumnFullPlacement() ) { - dataPlacements.add( dataPlacement ); - } - } - return dataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAllPartitionFullDataPlacements( long tableId ) { - List dataPlacements = new ArrayList<>(); - List allDataPlacements = getDataPlacements( tableId ); - - for ( CatalogDataPlacement dataPlacement : allDataPlacements ) { - if ( dataPlacement.hasPartitionFullPlacement() ) { - dataPlacements.add( dataPlacement ); - } - } - return dataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { - List catalogDataPlacements = new ArrayList<>(); - for ( CatalogDataPlacement dataPlacement : getDataPlacements( tableId ) ) { - if ( dataPlacement.dataPlacementRole.equals( role ) ) { - catalogDataPlacements.add( dataPlacement ); - } - } - return catalogDataPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { - List partitionPlacements = new ArrayList<>(); - for ( CatalogDataPlacement dataPlacement : getDataPlacementsByRole( tableId, role ) ) { - if ( dataPlacement.partitionPlacementsOnAdapterByRole.containsKey( role ) ) { - dataPlacement.partitionPlacementsOnAdapterByRole.get( role ) - .forEach( - partitionId -> partitionPlacements.add( getPartitionPlacement( dataPlacement.adapterId, partitionId ) ) - ); - } - } - return partitionPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { - List partitionPlacements = new ArrayList<>(); - for ( CatalogPartitionPlacement partitionPlacement : getPartitionPlacements( partitionId ) ) { - if ( partitionPlacement.role.equals( role ) ) { - partitionPlacements.add( partitionPlacement ); - } - } - return partitionPlacements; - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { - if ( (columnIdsToBeRemoved.isEmpty() && partitionsIdsToBeRemoved.isEmpty()) || isTableFlaggedForDeletion( tableId ) ) { - log.warn( "Invoked validation with two empty lists of columns and partitions to be revoked. Is therefore always true..." ); - return true; - } - - // TODO @HENNLO Focus on PartitionPlacements that are labeled as UPTODATE nodes. The outdated nodes do not - // necessarily need placement constraints - - LogicalTable table = getTable( tableId ); - List dataPlacements = getDataPlacements( tableId ); - - // Checks for every column on every DataPlacement if each column is placed with all partitions - for ( long columnId : table.fieldIds ) { - List partitionsToBeCheckedForColumn = table.partitionProperty.partitionIds.stream().collect( Collectors.toList() ); - // Check for every column if it has every partition - for ( CatalogDataPlacement dataPlacement : dataPlacements ) { - // Can instantly return because we still have a full placement somewhere - if ( dataPlacement.hasFullPlacement() && dataPlacement.adapterId != adapterId ) { - return true; - } - - List effectiveColumnsOnStore = dataPlacement.columnPlacementsOnAdapter.stream().collect( Collectors.toList() ); - List effectivePartitionsOnStore = dataPlacement.getAllPartitionIds(); - - // Remove columns and partitions from store to not evaluate them - if ( dataPlacement.adapterId == adapterId ) { - - // Skips columns that shall be removed - if ( columnIdsToBeRemoved.contains( columnId ) ) { - continue; - } - - // Only process those parts that shall be present after change - effectiveColumnsOnStore.removeAll( columnIdsToBeRemoved ); - effectivePartitionsOnStore.removeAll( partitionsIdsToBeRemoved ); - } - - if ( effectiveColumnsOnStore.contains( columnId ) ) { - partitionsToBeCheckedForColumn.removeAll( effectivePartitionsOnStore ); - } else { - continue; - } - - // Found all partitions for column, continue with next column - if ( partitionsToBeCheckedForColumn.isEmpty() ) { - break; - } - } - - if ( !partitionsToBeCheckedForColumn.isEmpty() ) { - return false; - } - } - - return true; - } - - - /** - * {@inheritDoc} - */ - @Override - public void flagTableForDeletion( long tableId, boolean flag ) { - if ( flag && !tablesFlaggedForDeletion.contains( tableId ) ) { - tablesFlaggedForDeletion.add( tableId ); - } else if ( !flag && tablesFlaggedForDeletion.contains( tableId ) ) { - tablesFlaggedForDeletion.remove( tableId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isTableFlaggedForDeletion( long tableId ) { - return tablesFlaggedForDeletion.contains( tableId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void addPartitionPlacement( long namespaceId, int adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { - if ( !checkIfExistsPartitionPlacement( adapterId, partitionId ) ) { - CatalogAdapter store = Objects.requireNonNull( adapters.get( adapterId ) ); - CatalogPartitionPlacement partitionPlacement = new CatalogPartitionPlacement( - namespaceId, - tableId, - adapterId, - store.uniqueName, - placementType, - physicalSchemaName, - physicalTableName, - partitionId, - role ); - - synchronized ( this ) { - partitionPlacements.put( new Object[]{ adapterId, partitionId }, partitionPlacement ); - - // Adds this PartitionPlacement to existing DataPlacement container - addPartitionsToDataPlacement( adapterId, tableId, List.of( partitionId ) ); - - listeners.firePropertyChange( "partitionPlacement", null, partitionPlacements ); - } - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogDataPlacement addDataPlacementIfNotExists( int adapterId, long tableId ) { - CatalogDataPlacement dataPlacement; - if ( (dataPlacement = getDataPlacement( adapterId, tableId )) == null ) { - if ( log.isDebugEnabled() ) { - log.debug( "No DataPlacement exists on adapter '{}' for entity '{}'. Creating a new one.", getAdapter( adapterId ), getTable( tableId ) ); - } - addDataPlacement( adapterId, tableId ); - dataPlacement = getDataPlacement( adapterId, tableId ); - } - - return dataPlacement; - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { - LogicalTable old = Objects.requireNonNull( tables.get( tableId ) ); - - LogicalTable newTable = old.withDataPlacements( ImmutableList.copyOf( newDataPlacements ) ); - - synchronized ( this ) { - tables.replace( tableId, newTable ); - tableNames.replace( new Object[]{ newTable.namespaceId, newTable.name }, newTable ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void addDataPlacement( int adapterId, long tableId ) { - if ( log.isDebugEnabled() ) { - log.debug( "Creating DataPlacement on adapter '{}' for entity '{}'", getAdapter( adapterId ), getTable( tableId ) ); - } - - if ( !dataPlacements.containsKey( new Object[]{ adapterId, tableId } ) ) { - CatalogDataPlacement dataPlacement = new CatalogDataPlacement( - tableId, - adapterId, - PlacementType.AUTOMATIC, - DataPlacementRole.UPTODATE, - ImmutableList.of(), - ImmutableList.of() ); - - synchronized ( this ) { - dataPlacements.put( new Object[]{ adapterId, tableId }, dataPlacement ); - addSingleDataPlacementToTable( adapterId, tableId ); - } - listeners.firePropertyChange( "dataPlacement", null, dataPlacement ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void modifyDataPlacement( int adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ) { - - try { - CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - synchronized ( this ) { - dataPlacements.replace( new Object[]{ adapterId, tableId }, catalogDataPlacement ); - } - listeners.firePropertyChange( "dataPlacement", oldDataPlacement, catalogDataPlacement ); - } catch ( NullPointerException e ) { - e.printStackTrace(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public long addGraphPlacement( int adapterId, long graphId ) { - long id = partitionIdBuilder.getAndIncrement(); - CatalogGraphPlacement placement = new CatalogGraphPlacement( adapterId, graphId, null, id ); - LogicalGraph old = graphs.get( graphId ); - if ( old == null ) { - throw new UnknownGraphException( graphId ); - } - - LogicalGraph graph = old.addPlacement( adapterId ); - - synchronized ( this ) { - graphPlacements.put( new Object[]{ graph.id, adapterId }, placement ); - graphs.replace( graph.id, graph ); - graphNames.replace( new Object[]{ graph.name }, graph ); - } - listeners.firePropertyChange( "graphPlacement", null, placement ); - return id; - } - - - /** - * {@inheritDoc} - */ - @Override - public void deleteGraphPlacement( int adapterId, long graphId ) { - if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { - throw new UnknownGraphPlacementsException( graphId, adapterId ); - } - CatalogGraphPlacement placement = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); - - deleteGraphPlacementLogistics( placement.graphId, adapterId ); - - LogicalGraph old = Objects.requireNonNull( graphs.get( placement.graphId ) ); - - LogicalGraph graph = old.removePlacement( adapterId ); - - synchronized ( this ) { - graphPlacements.remove( new Object[]{ graphId, adapterId } ); - graphs.replace( graphId, graph ); - graphNames.replace( new Object[]{ Catalog.defaultDatabaseId, graph.name }, graph ); - } - listeners.firePropertyChange( "graphPlacements", null, null ); - } - - - private void deleteGraphPlacementLogistics( long graphId, int adapterId ) { - if ( !graphMappings.containsKey( graphId ) ) { - throw new UnknownGraphException( graphId ); - } - CatalogGraphMapping mapping = Objects.requireNonNull( graphMappings.get( graphId ) ); - if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { - throw new UnknownGraphPlacementsException( graphId, adapterId ); - } - CatalogGraphPlacement placement = Objects.requireNonNull( graphPlacements.get( new Object[]{ graphId, adapterId } ) ); - - removeSingleDataPlacementFromTable( placement.adapterId, mapping.nodesId ); - removeSingleDataPlacementFromTable( placement.adapterId, mapping.nodesPropertyId ); - removeSingleDataPlacementFromTable( placement.adapterId, mapping.edgesId ); - removeSingleDataPlacementFromTable( placement.adapterId, mapping.edgesPropertyId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogGraphPlacement getGraphPlacement( long graphId, int adapterId ) { - if ( !graphPlacements.containsKey( new Object[]{ graphId, adapterId } ) ) { - throw new UnknownGraphPlacementsException( graphId, adapterId ); - } - - return graphPlacements.get( new Object[]{ graphId, adapterId } ); - } - - - /** - * {@inheritDoc} - */ - @Override - public void removeDataPlacement( int adapterId, long tableId ) { - CatalogDataPlacement dataPlacement = getDataPlacement( adapterId, tableId ); - - if ( log.isDebugEnabled() ) { - log.debug( "Removing DataPlacement on adapter '{}' for entity '{}'", getAdapter( adapterId ), getTable( tableId ) ); - } - - // Make sure that all columnPlacements and partitionPlacements are correctly dropped. - // Although, they should've been dropped earlier. - - // Recursively removing columns that exist on this placement - for ( Long columnId : dataPlacement.columnPlacementsOnAdapter ) { - try { - deleteColumnPlacement( adapterId, columnId, false ); - } catch ( UnknownColumnIdRuntimeException e ) { - log.debug( "Column has been removed before the placement" ); - } - } - - // Recursively removing partitions that exist on this placement - for ( Long partitionId : dataPlacement.getAllPartitionIds() ) { - try { - deletePartitionPlacement( adapterId, partitionId ); - } catch ( UnknownColumnIdRuntimeException e ) { - log.debug( "Partition has been removed before the placement" ); - } - } - - synchronized ( this ) { - dataPlacements.remove( new Object[]{ adapterId, tableId } ); - removeSingleDataPlacementFromTable( adapterId, tableId ); - } - listeners.firePropertyChange( "dataPlacement", dataPlacement, null ); - } - - - /** - * {@inheritDoc} - */ - @Override - protected void addSingleDataPlacementToTable( Integer adapterId, long tableId ) { - LogicalTable old = getTable( tableId ); - List updatedPlacements = new ArrayList<>( old.dataPlacements ); - - if ( !updatedPlacements.contains( adapterId ) ) { - updatedPlacements.add( adapterId ); - updateDataPlacementsOnTable( tableId, updatedPlacements ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void removeSingleDataPlacementFromTable( Integer adapterId, long tableId ) { - LogicalTable old = getTable( tableId ); - List updatedPlacements = new ArrayList<>( old.dataPlacements ); - - if ( updatedPlacements.contains( adapterId ) ) { - updatedPlacements.remove( adapterId ); - updateDataPlacementsOnTable( tableId, updatedPlacements ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void addColumnsToDataPlacement( int adapterId, long tableId, List columnIds ) { - CatalogDataPlacement oldDataPlacement = addDataPlacementIfNotExists( adapterId, tableId ); - - Set columnPlacementsOnAdapter = new HashSet<>( oldDataPlacement.columnPlacementsOnAdapter ); - - // Merges new columnIds to list of already existing placements - columnPlacementsOnAdapter.addAll( columnIds ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - ImmutableList.copyOf( new ArrayList<>( columnPlacementsOnAdapter ) ), - ImmutableList.copyOf( oldDataPlacement.getAllPartitionIds() ) - ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Added columns: {} of table {}, to placement on adapter {}.", columnIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void removeColumnsFromDataPlacement( int adapterId, long tableId, List columnIds ) { - CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - - Set columnPlacementsOnAdapter = new HashSet<>( oldDataPlacement.columnPlacementsOnAdapter ); - columnPlacementsOnAdapter.removeAll( columnIds ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - ImmutableList.copyOf( new ArrayList<>( columnPlacementsOnAdapter ) ), - ImmutableList.copyOf( oldDataPlacement.getAllPartitionIds() ) - ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Removed columns: {} from table {}, to placement on adapter {}.", columnIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void addPartitionsToDataPlacement( int adapterId, long tableId, List partitionIds ) { - CatalogDataPlacement oldDataPlacement = addDataPlacementIfNotExists( adapterId, tableId ); - - Set partitionPlacementsOnAdapter = new HashSet<>( oldDataPlacement.getAllPartitionIds() ); - partitionPlacementsOnAdapter.addAll( partitionIds ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - oldDataPlacement.columnPlacementsOnAdapter, - ImmutableList.copyOf( new ArrayList<>( partitionPlacementsOnAdapter ) ) ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Added partitions: {} of table {}, to placement on adapter {}.", partitionIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - protected void removePartitionsFromDataPlacement( int adapterId, long tableId, List partitionIds ) { - CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - - Set partitionPlacementsOnAdapter = new HashSet<>( oldDataPlacement.getAllPartitionIds() ); - partitionIds.forEach( partitionPlacementsOnAdapter::remove ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - oldDataPlacement.columnPlacementsOnAdapter, - ImmutableList.copyOf( new ArrayList<>( partitionPlacementsOnAdapter ) ) ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Removed partitions: {} from table {}, to placement on adapter {}.", partitionIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void updateDataPlacement( int adapterId, long tableId, List columnIds, List partitionIds ) { - CatalogDataPlacement oldDataPlacement = getDataPlacement( adapterId, tableId ); - - CatalogDataPlacement newDataPlacement = new CatalogDataPlacement( - oldDataPlacement.tableId, - oldDataPlacement.adapterId, - oldDataPlacement.placementType, - oldDataPlacement.dataPlacementRole, - ImmutableList.copyOf( columnIds ), - ImmutableList.copyOf( partitionIds ) ); - - modifyDataPlacement( adapterId, tableId, newDataPlacement ); - - if ( log.isDebugEnabled() ) { - log.debug( "Added columns {} & partitions: {} of table {}, to placement on adapter {}.", columnIds, partitionIds, tableId, adapterId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void deletePartitionPlacement( int adapterId, long partitionId ) { - if ( checkIfExistsPartitionPlacement( adapterId, partitionId ) ) { - synchronized ( this ) { - partitionPlacements.remove( new Object[]{ adapterId, partitionId } ); - removePartitionsFromDataPlacement( adapterId, getTableFromPartition( partitionId ).id, Arrays.asList( partitionId ) ); - } - } - } - - - /** - * {@inheritDoc} - */ - @Override - public CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ) { - try { - return Objects.requireNonNull( partitionPlacements.get( new Object[]{ adapterId, partitionId } ) ); - } catch ( NullPointerException e ) { - getAdapter( adapterId ); - getPartition( partitionId ); - throw new UnknownPartitionPlacementException( adapterId, partitionId ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacementsByAdapter( int adapterId ) { - return new ArrayList<>( partitionPlacements.prefixSubMap( new Object[]{ adapterId } ).values() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacementsByTableOnAdapter( int adapterId, long tableId ) { - return getPartitionPlacementsByAdapter( adapterId ) - .stream() - .filter( p -> p.tableId == tableId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getAllPartitionPlacementsByTable( long tableId ) { - return partitionPlacements.values() - .stream() - .filter( p -> p.tableId == tableId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getPartitionPlacements( long partitionId ) { - return partitionPlacements.values() - .stream() - .filter( p -> p.partitionId == partitionId ) - .collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTablesForPeriodicProcessing() { - List procTables = new ArrayList<>(); - for ( Iterator iterator = frequencyDependentTables.iterator(); iterator.hasNext(); ) { - long tableId = -1; - try { - tableId = iterator.next(); - procTables.add( getTable( tableId ) ); - } catch ( UnknownTableIdRuntimeException e ) { - iterator.remove(); - } - } - - return procTables; - } - - - /** - * {@inheritDoc} - */ - @Override - public void addTableToPeriodicProcessing( long tableId ) { - int beforeSize = frequencyDependentTables.size(); - getTable( tableId ); - if ( !frequencyDependentTables.contains( tableId ) ) { - frequencyDependentTables.add( tableId ); - } - // Initially starts the periodic job if this was the first table to enable periodic processing - if ( beforeSize == 0 && frequencyDependentTables.size() == 1 ) { - // Start Job for periodic processing - FrequencyMap.INSTANCE.initialize(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public void removeTableFromPeriodicProcessing( long tableId ) { - getTable( tableId ); - if ( !frequencyDependentTables.contains( tableId ) ) { - frequencyDependentTables.remove( tableId ); - } - - // Terminates the periodic job if this was the last table with periodic processing - if ( frequencyDependentTables.size() == 0 ) { - // Terminate Job for periodic processing - FrequencyMap.INSTANCE.terminate(); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean checkIfExistsPartitionPlacement( int adapterId, long partitionId ) { - CatalogPartitionPlacement placement = partitionPlacements.get( new Object[]{ adapterId, partitionId } ); - return placement != null; - } - - - /** - * {@inheritDoc} - */ - @Override - public List getTableKeys( long tableId ) { - return keys.values().stream().filter( k -> k.tableId == tableId ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getIndexes( CatalogKey key ) { - return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getForeignKeys( CatalogKey key ) { - return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public List getConstraints( CatalogKey key ) { - return constraints.values().stream().filter( c -> c.keyId == key.id ).collect( Collectors.toList() ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isIndex( long keyId ) { - return indexes.values().stream().anyMatch( i -> i.keyId == keyId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isConstraint( long keyId ) { - return constraints.values().stream().anyMatch( c -> c.keyId == keyId ); - } - - - /** - * {@inheritDoc} - */ - @Override - public boolean isForeignKey( long keyId ) { - return foreignKeys.values().stream().anyMatch( f -> f.referencedKeyId == keyId ); - } - - - /** - * Check if the specified key is used as primary key, index or constraint. If so, this is a NoOp. If it is not used, the key is deleted. - */ - private void deleteKeyIfNoLongerUsed( Long keyId ) { - if ( keyId == null ) { - return; - } - CatalogKey key = getKey( keyId ); - LogicalTable table = getTable( key.tableId ); - if ( table.primaryKey != null && table.primaryKey.equals( keyId ) ) { - return; - } - if ( constraints.values().stream().anyMatch( c -> c.keyId == keyId ) ) { - return; - } - if ( foreignKeys.values().stream().anyMatch( f -> f.id == keyId ) ) { - return; - } - if ( indexes.values().stream().anyMatch( i -> i.keyId == keyId ) ) { - return; - } - synchronized ( this ) { - keys.remove( keyId ); - keyColumns.remove( key.columnIds.stream().mapToLong( Long::longValue ).toArray() ); - } - listeners.firePropertyChange( "key", key, null ); - } - - - /** - * Returns the id of they defined by the specified column ids. If this key does not yet exist, create it. - * - * @param tableId on which the key is defined - * @param columnIds all involved columns - * @param enforcementTime at which point during execution the key should be enforced - * @return the id of the key - * @throws GenericCatalogException if the key does not exist - */ - private long getOrAddKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { - Long keyId = keyColumns.get( columnIds.stream().mapToLong( Long::longValue ).toArray() ); - if ( keyId != null ) { - return keyId; - } - return addKey( tableId, columnIds, enforcementTime ); - } - - - private long addKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - long id = keyIdBuilder.getAndIncrement(); - CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); - synchronized ( this ) { - keys.put( id, key ); - keyColumns.put( columnIds.stream().mapToLong( Long::longValue ).toArray(), id ); - } - listeners.firePropertyChange( "key", null, key ); - return id; - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); - } - } - - - /** - * {@inheritDoc} - */ - @Override - public List getKeys() { - return new ArrayList<>( keys.values() ); - } - - - /** - * Get a key by its id - * - * @return The key - */ - private CatalogKey getKey( long keyId ) { - try { - return Objects.requireNonNull( keys.get( keyId ) ); - } catch ( NullPointerException e ) { - throw new UnknownKeyIdRuntimeException( keyId ); - } - } - - - static class CatalogValidator { - - public void validate() throws GenericCatalogException { - - } - - - public void startCheck() { - columns.forEach( ( key, column ) -> { - assert (schemas.containsKey( column.schemaId )); - assert (Objects.requireNonNull( schemaChildren.get( column.schemaId ) ).contains( column.tableId )); - - assert (tables.containsKey( column.tableId )); - assert (Objects.requireNonNull( tableChildren.get( column.tableId ) ).contains( column.id )); - - assert (columnNames.containsKey( new Object[]{ column.schemaId, column.tableId, column.name } )); - } ); - - columnPlacements.forEach( ( key, placement ) -> { - assert (columns.containsKey( placement.columnId )); - assert (adapters.containsKey( placement.adapterId )); - } ); - } - - } - -} diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java index c9998296b2..1c80652cbe 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java @@ -22,9 +22,9 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -48,12 +48,12 @@ public void execute( Context context, Statement statement, QueryParameters param long namespaceId; try { - namespaceId = catalog.getSchema( Catalog.defaultDatabaseId, ((MqlQueryParameters) parameters).getDatabase() ).id; + namespaceId = catalog.getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; } catch ( UnknownSchemaException e ) { throw new RuntimeException( "The used document database (Polypheny Schema) is not available." ); } - List collections = catalog.getCollections( namespaceId, new Pattern( getCollection() ) ); + List collections = catalog.getLogicalDoc( namespaceId ).getCollections( new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { throw new RuntimeException( "Error while adding new collection placement, collection not found." ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDropDatabase.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDropDatabase.java index ee917d2ae1..61e767d9fa 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDropDatabase.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDropDatabase.java @@ -41,7 +41,7 @@ public void execute( Context context, Statement statement, QueryParameters param String database = ((MqlQueryParameters) parameters).getDatabase(); try { - DdlManager.getInstance().dropSchema( Catalog.defaultDatabaseId, database, true, statement ); + DdlManager.getInstance().dropNamespace( database, true, statement ); } catch ( SchemaNotExistException | DdlOnSourceException e ) { throw new RuntimeException( e ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index d05ae664c8..8e94f810c8 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -46,6 +46,7 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; @@ -441,6 +442,12 @@ public List getIndexes() { } + @Override + public List getTablesForPeriodicProcessing() { + return null; + } + + @Override public PolyCatalog copy() { return deserialize( serialize(), PolyCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java index 61f2c0b1d7..a9c50c47d8 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java @@ -127,13 +127,13 @@ public LogicalCollection getCollection( long collectionId ) { @Override - public List getCollections( long namespaceId, Pattern namePattern ) { + public List getCollections( Pattern namePattern ) { return null; } @Override - public long addCollection( Long id, String name, long schemaId, int currentUserId, EntityType entity, boolean modifiable ) { + public long addCollection( Long id, String name, int currentUserId, EntityType entity, boolean modifiable ) { return 0; } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 9f98639989..c4b54cf27d 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -550,7 +550,7 @@ AlgBuilder sort( AlgBuilder algBuilder, RexBuilder rexBuilder, List Date: Thu, 2 Mar 2023 15:11:38 +0100 Subject: [PATCH 033/436] added cacheMaps functionality --- .../org/polypheny/db/catalog/Catalog.java | 15 +- .../catalog/catalogs/AllocationCatalog.java | 2 +- .../catalogs/AllocationRelationalCatalog.java | 140 ++--- .../db/catalog/catalogs/PhysicalCatalog.java | 2 + .../entity/CatalogColumnPlacement.java | 4 +- .../db/catalog/entity/CatalogIndex.java | 4 +- .../entity/allocation/AllocationEntity.java | 4 +- .../entity/allocation/AllocationTable.java | 25 +- .../entity/physical/PhysicalEntity.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 62 +- .../db/routing/routers/BaseRouter.java | 8 +- .../db/routing/routers/DmlRouterImpl.java | 4 +- .../polypheny/db/catalog/ConnectedMap.java | 115 ++++ .../org/polypheny/db/catalog/IdBuilder.java | 33 +- .../org/polypheny/db/catalog/NCatalog.java | 6 +- .../org/polypheny/db/catalog/PolyCatalog.java | 63 +- .../catalog/allocation/AllocationCatalog.java | 106 ---- .../allocation/HorizontalPartition.java | 40 -- .../allocation/PolyAllocDocCatalog.java | 84 +++ .../allocation/PolyAllocGraphCatalog.java | 68 +++ .../allocation/PolyAllocRelCatalog.java | 545 ++++++++++++++++++ .../catalog/allocation/VerticalPartition.java | 40 -- .../{document => }/DocumentCatalog.java | 42 +- .../logical/{graph => }/GraphCatalog.java | 31 +- .../{relational => }/RelationalCatalog.java | 32 +- 25 files changed, 1028 insertions(+), 449 deletions(-) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ConnectedMap.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/HorizontalPartition.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/VerticalPartition.java rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/{document => }/DocumentCatalog.java (82%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/{graph => }/GraphCatalog.java (83%) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/{relational => }/RelationalCatalog.java (96%) diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 37a1164227..3bb1a21248 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -32,6 +32,7 @@ import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; +import org.polypheny.db.catalog.catalogs.PhysicalCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogIndex; @@ -89,18 +90,20 @@ public static Catalog getInstance() { public abstract void rollback(); - public abstract LogicalRelationalCatalog getLogicalRel( long id ); + public abstract LogicalRelationalCatalog getLogicalRel( long namespaceId ); - public abstract LogicalDocumentCatalog getLogicalDoc( long id ); + public abstract LogicalDocumentCatalog getLogicalDoc( long namespaceId ); - public abstract LogicalGraphCatalog getLogicalGraph( long id ); + public abstract LogicalGraphCatalog getLogicalGraph( long namespaceId ); - public abstract AllocationRelationalCatalog getAllocRel( long id ); + public abstract AllocationRelationalCatalog getAllocRel( long namespaceId ); - public abstract AllocationDocumentCatalog getAllocDoc( long id ); + public abstract AllocationDocumentCatalog getAllocDoc( long namespaceId ); - public abstract AllocationGraphCatalog getAllocGraph( long id ); + public abstract AllocationGraphCatalog getAllocGraph( long namespaceId ); + + public abstract PhysicalCatalog getPhysical( long namespaceId ); public abstract PhysicalEntity getPhysicalEntity( long id ); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java index 3163b8731a..ffd165da6f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java @@ -21,6 +21,6 @@ public interface AllocationCatalog { - List> getAllocationsOnAdapter( long id ); + List> getAllocationsOnAdapter( long id ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 6b2eb2b889..24b8f2f4a2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -45,7 +45,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param physicalTableName The table name on the adapter * @param physicalColumnName The column name on the adapter */ - public abstract void addColumnPlacement( long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ); + void addColumnPlacement( long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ); /** * Deletes all dependent column placements @@ -54,7 +54,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @param columnOnly columnOnly If delete originates from a dropColumn */ - public abstract void deleteColumnPlacement( long adapterId, long columnId, boolean columnOnly ); + void deleteColumnPlacement( long adapterId, long columnId, boolean columnOnly ); /** * Gets a collective list of column placements per column on an adapter. @@ -64,7 +64,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @return The specific column placement */ - public abstract CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ); + CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ); /** * Checks if there is a column with the specified name in the specified table. @@ -73,7 +73,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @return true if there is a column placement, false if not. */ - public abstract boolean checkIfExistsColumnPlacement( long adapterId, long columnId ); + boolean checkIfExistsColumnPlacement( long adapterId, long columnId ); /** * Get all column placements of a column @@ -81,7 +81,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the specific column * @return List of column placements of specific column */ - public abstract List getColumnPlacement( long columnId ); + List getColumnPlacements( long columnId ); /** * Get column placements of a specific table on a specific adapter on column detail level. @@ -90,7 +90,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The id of the adapter * @return List of column placements of the table on the specified adapter */ - public abstract List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ); + List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ); /** * Get column placements on a adapter. On column detail level @@ -99,7 +99,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The id of the adapter * @return List of column placements on the specified adapter */ - public abstract List getColumnPlacementsOnAdapter( long adapterId ); + List getColumnPlacementsOnAdapter( long adapterId ); /** * Gets a collection of column placements for a given column. @@ -107,7 +107,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column of requested column placements * @return The collection of placements sorted */ - public abstract List getColumnPlacementsByColumn( long columnId ); + List getColumnPlacementsByColumn( long columnId ); /** * Gets all column placements of a table structured by the id of the adapters. @@ -115,14 +115,14 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The id of the table for the requested column placements * @return The requested collection */ - public abstract ImmutableMap> getColumnPlacementsByAdapter( long tableId ); + ImmutableMap> getColumnPlacementsByAdapter( long tableId ); /** * Gets the partition group sorted by partition. * * @param partitionId The id of the partitions group */ - public abstract long getPartitionGroupByPartition( long partitionId ); + long getPartitionGroupByPartition( long partitionId ); /** @@ -132,7 +132,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param schemaId The id of the schema * @return List of column placements on this adapter and schema */ - public abstract List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ); + List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ); /** * Update the type of a placement. @@ -141,7 +141,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @param placementType The new type of placement */ - public abstract void updateColumnPlacementType( long adapterId, long columnId, PlacementType placementType ); + void updateColumnPlacementType( long adapterId, long columnId, PlacementType placementType ); /** * Update physical position of a column placement on a specified adapter. @@ -150,7 +150,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @param position The physical position to set */ - public abstract void updateColumnPlacementPhysicalPosition( long adapterId, long columnId, long position ); + void updateColumnPlacementPhysicalPosition( long adapterId, long columnId, long position ); /** * Update physical position of a column placement on a specified adapter. Uses auto-increment to get the globally increasing number. @@ -158,7 +158,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The id of the adapter * @param columnId The id of the column */ - public abstract void updateColumnPlacementPhysicalPosition( long adapterId, long columnId ); + void updateColumnPlacementPhysicalPosition( long adapterId, long columnId ); /** * Change physical names of all column placements. @@ -169,7 +169,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param physicalColumnName The physical column name * @param updatePhysicalColumnPosition Whether to reset the column position (the highest number in the table; represents that the column is now at the last position) */ - public abstract void updateColumnPlacementPhysicalNames( long adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ); + void updateColumnPlacementPhysicalNames( long adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ); /** @@ -180,7 +180,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionType partition Type of the added partition * @return The id of the created partitionGroup */ - public abstract long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; + long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; /** * Should only be called from mergePartitions(). Deletes a single partition and all references. @@ -189,7 +189,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param schemaId The unique id of the table * @param partitionGroupId The partitionId to be deleted */ - public abstract void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ); + void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ); /** * Get a partition object by its unique id @@ -197,7 +197,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionGroupId The unique id of the partition * @return A catalog partitionGroup */ - public abstract CatalogPartitionGroup getPartitionGroup( long partitionGroupId ); + CatalogPartitionGroup getPartitionGroup( long partitionGroupId ); /** * Adds a partition to the catalog @@ -207,7 +207,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionGroupId partitionGroupId where the partition should be initially added to * @return The id of the created partition */ - public abstract long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; + long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; /** * Deletes a single partition and all references. @@ -216,7 +216,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param schemaId The unique id of the table * @param partitionId The partitionId to be deleted */ - public abstract void deletePartition( long tableId, long schemaId, long partitionId ); + void deletePartition( long tableId, long schemaId, long partitionId ); /** * Get a partition object by its unique id @@ -224,7 +224,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionId The unique id of the partition * @return A catalog partition */ - public abstract CatalogPartition getPartition( long partitionId ); + CatalogPartition getPartition( long partitionId ); /** * Retrieves a list of partitions which are associated with a specific table @@ -232,7 +232,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId Table for which partitions shall be gathered * @return List of all partitions associated with that table */ - public abstract List getPartitionsByTable( long tableId ); + List getPartitionsByTable( long tableId ); /** * Effectively partitions a table with the specified partitionType @@ -243,7 +243,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param numPartitionGroups Explicit number of partitions * @param partitionGroupIds List of ids of the catalog partitions */ - public abstract void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ); + void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ); /** * Merges a partitioned table. @@ -251,7 +251,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * * @param tableId Table to be merged */ - public abstract void mergeTable( long tableId ); + void mergeTable( long tableId ); /** * Updates partitionProperties on table @@ -259,7 +259,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId Table to be partitioned * @param partitionProperty Partition properties */ - public abstract void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ); + void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ); /** * Get a List of all partitions belonging to a specific table @@ -267,7 +267,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId Table to be queried * @return list of all partitions on this table */ - public abstract List getPartitionGroups( long tableId ); + List getPartitionGroups( long tableId ); /** * Get all partitions of the specified database which fit to the specified filter patterns. @@ -277,7 +277,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ); + List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Updates the specified partition group with the attached partitionIds @@ -285,7 +285,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionGroupId Partition Group to be updated * @param partitionIds List of new partitionIds */ - public abstract void updatePartitionGroup( long partitionGroupId, List partitionIds ); + void updatePartitionGroup( long partitionGroupId, List partitionIds ); /** * Adds a partition to an already existing partition Group @@ -293,7 +293,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionGroupId Group to add to * @param partitionId Partition to add */ - public abstract void addPartitionToGroup( long partitionGroupId, Long partitionId ); + void addPartitionToGroup( long partitionGroupId, Long partitionId ); /** * Removes a partition from an already existing partition Group @@ -301,7 +301,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionGroupId Group to remove the partition from * @param partitionId Partition to remove */ - public abstract void removePartitionFromGroup( long partitionGroupId, Long partitionId ); + void removePartitionFromGroup( long partitionGroupId, Long partitionId ); /** * Assign the partition to a new partitionGroup @@ -309,7 +309,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionId Partition to move * @param partitionGroupId New target group to move the partition to */ - public abstract void updatePartition( long partitionId, Long partitionGroupId ); + void updatePartition( long partitionId, Long partitionGroupId ); /** * Get a List of all partitions belonging to a specific table @@ -317,7 +317,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionGroupId Table to be queried * @return list of all partitions on this table */ - public abstract List getPartitions( long partitionGroupId ); + List getPartitions( long partitionGroupId ); /** * Get all partitions of the specified database which fit to the specified filter patterns. @@ -327,7 +327,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ); + List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Get a list of all partition name belonging to a specific table @@ -335,7 +335,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId Table to be queried * @return list of all partition names on this table */ - public abstract List getPartitionGroupNames( long tableId ); + List getPartitionGroupNames( long tableId ); /** * Get placements by partition. Identify the location of partitions. @@ -346,7 +346,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of tje column * @return List of CatalogColumnPlacements */ - public abstract List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ); + List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ); /** * Get adapters by partition. Identify the location of partitions/replicas @@ -356,7 +356,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionGroupId The unique id of the partition * @return List of CatalogAdapters */ - public abstract List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ); + List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ); /** * Get all partitions of a DataPlacement (identified by adapterId and tableId) @@ -365,7 +365,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The unique id of the table * @return List of partitionIds */ - public abstract List getPartitionGroupsOnDataPlacement( long adapterId, long tableId ); + List getPartitionGroupsOnDataPlacement( long adapterId, long tableId ); /** * Get all partitions of a DataPlacement (identified by adapterId and tableId) @@ -374,7 +374,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The unique id of the table * @return List of partitionIds */ - public abstract List getPartitionsOnDataPlacement( long adapterId, long tableId ); + List getPartitionsOnDataPlacement( long adapterId, long tableId ); /** * Returns list with the index of the partitions on this store from 0..numPartitions @@ -383,7 +383,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The unique id of the table * @return List of partitionId Indices */ - public abstract List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ); + List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ); /** * Returns a specific DataPlacement of a given table. @@ -392,7 +392,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placement from * @return DataPlacement of a table placed on a specific store */ - public abstract CatalogDataPlacement getDataPlacement( long adapterId, long tableId ); + CatalogDataPlacement getDataPlacement( long adapterId, long tableId ); /** * Returns all DataPlacements of a given table. @@ -400,7 +400,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placements from * @return List of all DataPlacements for the table */ - public abstract List getDataPlacements( long tableId ); + List getDataPlacements( long tableId ); /** * Returns a list of all DataPlacements that contain all columns as well as all partitions @@ -408,7 +408,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the list from * @return list of all full DataPlacements */ - public abstract List getAllFullDataPlacements( long tableId ); + List getAllFullDataPlacements( long tableId ); /** * Returns a list of all DataPlacements that contain all columns @@ -416,7 +416,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the list from * @return list of all full DataPlacements */ - public abstract List getAllColumnFullDataPlacements( long tableId ); + List getAllColumnFullDataPlacements( long tableId ); /** * Returns a list of all DataPlacements that contain all partitions @@ -424,7 +424,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the list from * @return list of all full DataPlacements */ - public abstract List getAllPartitionFullDataPlacements( long tableId ); + List getAllPartitionFullDataPlacements( long tableId ); /** * Returns all DataPlacements of a given table that are associated with a given role. @@ -433,7 +433,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param role role to specifically filter * @return List of all DataPlacements for the table that are associated with a specific role */ - public abstract List getDataPlacementsByRole( long tableId, DataPlacementRole role ); + List getDataPlacementsByRole( long tableId, DataPlacementRole role ); /** * Returns all PartitionPlacements of a given table that are associated with a given role. @@ -442,7 +442,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param role role to specifically filter * @return List of all PartitionPlacements for the table that are associated with a specific role */ - public abstract List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ); + List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ); /** * Returns all PartitionPlacements of a given table with a given ID that are associated with a given role. @@ -452,7 +452,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionId filter by ID * @return List of all PartitionPlacements for the table that are associated with a specific role for a specific partitionId */ - public abstract List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ); + List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ); /** * Checks if the planned changes are allowed in terms of placements that need to be present. @@ -464,7 +464,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionsIdsToBeRemoved partitions that shall be removed * @return true if these changes can be made to the data placement, false if not */ - public abstract boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ); + boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ); /** @@ -478,7 +478,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param physicalSchemaName The schema name on the adapter * @param physicalTableName The table name on the adapter */ - public abstract void addPartitionPlacement( long namespaceId, long adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); + void addPartitionPlacement( long namespaceId, long adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); /** * Adds a new DataPlacement for a given table on a specific store @@ -486,7 +486,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter where placement should be located * @param tableId table to retrieve the placement from */ - public abstract void addDataPlacement( long adapterId, long tableId ); + void addDataPlacement( long adapterId, long tableId ); /** * Adds a new DataPlacement for a given table on a specific store. @@ -496,7 +496,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placement from * @return DataPlacement of a table placed on a specific store */ - public abstract CatalogDataPlacement addDataPlacementIfNotExists( long adapterId, long tableId ); + CatalogDataPlacement addDataPlacementIfNotExists( long adapterId, long tableId ); /** * Modifies a specific DataPlacement of a given table. @@ -505,7 +505,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placement from * @param catalogDataPlacement new dataPlacement to be written */ - abstract void modifyDataPlacement( long adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ); + void modifyDataPlacement( long adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ); /** @@ -514,7 +514,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter where placement should be removed from * @param tableId table to retrieve the placement from */ - public abstract void removeDataPlacement( long adapterId, long tableId ); + void removeDataPlacement( long adapterId, long tableId ); /** * Adds a single dataPlacement on a store for a specific table @@ -522,7 +522,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter id corresponding to a new DataPlacements * @param tableId table to be updated */ - abstract void addSingleDataPlacementToTable( long adapterId, long tableId ); + void addSingleDataPlacementToTable( long adapterId, long tableId ); /** * Removes a single dataPlacement from a store for a specific table @@ -530,7 +530,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter id corresponding to a new DataPlacements * @param tableId table to be updated */ - abstract void removeSingleDataPlacementFromTable( long adapterId, long tableId ); + void removeSingleDataPlacementFromTable( long adapterId, long tableId ); /** * Updates the list of data placements on a table @@ -538,7 +538,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param newDataPlacements list of new DataPlacements that shall replace the old ones */ - public abstract void updateDataPlacementsOnTable( long tableId, List newDataPlacements ); + void updateDataPlacementsOnTable( long tableId, List newDataPlacements ); /** * Adds columns to dataPlacement on a store for a specific table @@ -547,7 +547,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param columnIds List of columnIds to add to a specific store for the table */ - abstract void addColumnsToDataPlacement( long adapterId, long tableId, List columnIds ); + void addColumnsToDataPlacement( long adapterId, long tableId, List columnIds ); /** * Remove columns to dataPlacement on a store for a specific table @@ -556,7 +556,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param columnIds List of columnIds to remove from a specific store for the table */ - abstract void removeColumnsFromDataPlacement( long adapterId, long tableId, List columnIds ); + void removeColumnsFromDataPlacement( long adapterId, long tableId, List columnIds ); /** * Adds partitions to dataPlacement on a store for a specific table @@ -565,7 +565,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param partitionIds List of partitionIds to add to a specific store for the table */ - abstract void addPartitionsToDataPlacement( long adapterId, long tableId, List partitionIds ); + void addPartitionsToDataPlacement( long adapterId, long tableId, List partitionIds ); /** * Remove partitions to dataPlacement on a store for a specific table @@ -574,7 +574,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to be updated * @param partitionIds List of partitionIds to remove from a specific store for the table */ - abstract void removePartitionsFromDataPlacement( long adapterId, long tableId, List partitionIds ); + void removePartitionsFromDataPlacement( long adapterId, long tableId, List partitionIds ); /** * Updates and overrides list of associated columnPlacements {@code &} partitionPlacements for a given data placement @@ -584,7 +584,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnIds List of columnIds to be located on a specific store for the table * @param partitionIds List of partitionIds to be located on a specific store for the table */ - public abstract void updateDataPlacement( long adapterId, long tableId, List columnIds, List partitionIds ); + void updateDataPlacement( long adapterId, long tableId, List columnIds, List partitionIds ); /** @@ -593,7 +593,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The adapter on which the table should be placed on * @param partitionId The id of a partition which shall be removed from that store. */ - public abstract void deletePartitionPlacement( long adapterId, long partitionId ); + void deletePartitionPlacement( long adapterId, long partitionId ); /** * Returns a specific partition entity which is placed on a store. @@ -602,7 +602,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionId The id of the requested partition * @return The requested PartitionPlacement on that store for a given is */ - public abstract CatalogPartitionPlacement getPartitionPlacement( long adapterId, long partitionId ); + CatalogPartitionPlacement getPartitionPlacement( long adapterId, long partitionId ); /** * Returns a list of all Partition Placements which currently reside on an adapter, disregarded of the table. @@ -610,7 +610,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId The adapter on which the requested partition placements reside * @return A list of all Partition Placements, that are currently located on that specific store */ - public abstract List getPartitionPlacementsByAdapter( long adapterId ); + List getPartitionPlacementsByAdapter( long adapterId ); /** * Returns a list of all Partition Placements which currently reside on an adapter, for a specific table. @@ -619,7 +619,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The table for which all partition placements on an adapter should be considered * @return A list of all Partition Placements, that are currently located on that specific store for an individual table */ - public abstract List getPartitionPlacementsByTableOnAdapter( long adapterId, long tableId ); + List getPartitionPlacementsByTableOnAdapter( long adapterId, long tableId ); /** * Returns a list of all Partition Placements which are currently associated with a table. @@ -627,7 +627,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The table on which the requested partition placements are currently associated with. * @return A list of all Partition Placements, that belong to the desired table */ - public abstract List getAllPartitionPlacementsByTable( long tableId ); + List getAllPartitionPlacementsByTable( long tableId ); /** * Get all Partition Placements which are associated with an individual partition ID. @@ -636,21 +636,21 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionId The requested partition ID * @return A list of Partition Placements which are physically responsible for that partition */ - public abstract List getPartitionPlacements( long partitionId ); + List getPartitionPlacements( long partitionId ); /** * Registers a table to be considered for periodic processing * * @param tableId ID of table to be considered for periodic processing */ - public abstract void addTableToPeriodicProcessing( long tableId ); + void addTableToPeriodicProcessing( long tableId ); /** * Remove a table from periodic background processing * * @param tableId ID of table to be removed for periodic processing */ - public abstract void removeTableFromPeriodicProcessing( long tableId ); + void removeTableFromPeriodicProcessing( long tableId ); /** * Probes if a Partition Placement on an adapter for a specific partition already exists. @@ -659,7 +659,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionId Partition which to check * @return teh response of the probe */ - public abstract boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ); + boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index a155335c8e..831471c9ff 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -23,4 +23,6 @@ public interface PhysicalCatalog { List> getPhysicalsOnAdapter( long id ); + PhysicalEntity getPhysicalEntity( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java index 92dfc5de30..ad6d322d23 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java @@ -32,7 +32,7 @@ public class CatalogColumnPlacement implements CatalogObject { public final long namespaceId; public final long tableId; public final long columnId; - public final int adapterId; + public final long adapterId; public final String adapterUniqueName; public final PlacementType placementType; @@ -46,7 +46,7 @@ public CatalogColumnPlacement( final long namespaceId, final long tableId, final long columnId, - final int adapterId, + final long adapterId, @NonNull final String adapterUniqueName, @NonNull final PlacementType placementType, final String physicalSchemaName, diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java index 22af8c0e7b..4ea85591bc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java @@ -37,7 +37,7 @@ public final class CatalogIndex implements Serializable { public final String physicalName; public final boolean unique; public final IndexType type; - public final Integer location; + public final long location; public final String method; public final String methodDisplayName; @@ -52,7 +52,7 @@ public CatalogIndex( final String method, final String methodDisplayName, final IndexType type, - final Integer location, + final Long location, final long keyId, final CatalogKey key, final String physicalName ) { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java index 0620c4f238..aa1dffecd9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -32,8 +32,8 @@ public abstract class AllocationEntity extends LogicalE public L logical; - protected AllocationEntity( L logical, long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( id, name, namespaceId, namespaceName, type, namespaceType ); + protected AllocationEntity( L logical, long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( id, name, logical.namespaceId, logical.namespaceName, type, namespaceType ); this.adapterId = adapterId; this.logical = logical; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 0895f34490..a8a2efff38 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -17,12 +17,14 @@ package org.polypheny.db.catalog.entity.allocation; import java.io.Serializable; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Value; +import lombok.With; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; @@ -31,23 +33,25 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PlacementType; @EqualsAndHashCode(callSuper = true) @Value +@With public class AllocationTable extends AllocationEntity { public List placements; public long adapterId; - public long logicalId; public LogicalTable logicalTable; + public String adapterName; - public AllocationTable( LogicalTable logicalTable, long id, long logicalId, String name, long namespaceId, String namespaceName, long adapterId, List placements ) { - super( logicalTable, id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); + public AllocationTable( LogicalTable logicalTable, long id, String name, long adapterId, String adapterName, List placements ) { + super( logicalTable, id, name, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.logicalTable = logicalTable; - this.logicalId = logicalId; this.adapterId = adapterId; this.placements = placements; + this.adapterName = adapterName; } @@ -83,4 +87,17 @@ public String getNamespaceName() { } + public AllocationTable withAddedColumn( long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { + List placements = new ArrayList<>( this.placements ); + placements.add( new CatalogColumnPlacement( logical.namespaceId, id, columnId, adapterId, adapterName, placementType, physicalSchemaName, physicalColumnName, 0 ) ); + + return withPlacements( placements ); + } + + + public AllocationTable withRemovedColumn( long columnId ) { + List placements = new ArrayList<>( this.placements ); + return withPlacements( placements.stream().filter( p -> p.columnId != columnId ).collect( Collectors.toList() ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java index 69701b89ed..46b0e9260e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java @@ -24,7 +24,7 @@ public abstract class PhysicalEntity extends AllocationEntity { protected PhysicalEntity( L logical, long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( logical, id, name, namespaceId, namespaceName, type, namespaceType, adapterId ); + super( logical, id, name, type, namespaceType, adapterId ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 5477533b28..06d560dbd6 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -280,8 +280,7 @@ private void handleSource( DataSource adapter ) { PlacementType.STATIC, exportedColumn.physicalSchemaName, exportedColumn.physicalTableName, - exportedColumn.physicalColumnName - ); // Not a valid partitionGroupID --> placeholder + exportedColumn.physicalColumnName ); // Not a valid partitionGroupID --> placeholder catalog.getAllocRel( defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), columnId, exportedColumn.physicalPosition ); if ( exportedColumn.primary ) { primaryKeyColIds.add( columnId ); @@ -440,11 +439,11 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure there is only one adapter - if ( catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( catalogTable.columns.get( 0 ).id ).size() != 1 ) { + if ( catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( catalogTable.columns.get( 0 ).id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } - int adapterId = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( catalogTable.columns.get( 0 ).id ).get( 0 ).adapterId; + int adapterId = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( catalogTable.columns.get( 0 ).id ).get( 0 ).adapterId; DataSource dataSource = (DataSource) AdapterManager.getInstance().getAdapter( adapterId ); String physicalTableName = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( adapterId, catalogTable.partitionProperty.partitionIds.get( 0 ) ).physicalTableName; @@ -495,8 +494,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys PlacementType.STATIC, exportedColumn.physicalSchemaName, exportedColumn.physicalTableName, - exportedColumn.physicalColumnName - );//Not a valid partitionID --> placeholder + exportedColumn.physicalColumnName );//Not a valid partitionID --> placeholder // Set column position catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, columnId, exportedColumn.physicalPosition ); @@ -567,11 +565,10 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( store.getAdapterId(), addedColumn.id, - PlacementType.AUTOMATIC, - null, // Will be set later - null, // Will be set later - null // Will be set later - );//Not a valid partitionID --> placeholder + PlacementType.AUTOMATIC, // Will be set later + null, // Will be set later + null, // Will be set later + null );//Not a valid partitionID --> placeholder AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); } @@ -872,8 +869,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { PlacementType.MANUAL, null, null, - null - ); + null ); addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); } // Check if placement includes primary key columns @@ -886,8 +882,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { PlacementType.AUTOMATIC, null, null, - null - ); + null ); addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); } } @@ -937,18 +932,17 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, // Add new column placements long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores - List oldPkPlacements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( pkColumnId ); + List oldPkPlacements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( pkColumnId ); for ( CatalogColumnPlacement ccp : oldPkPlacements ) { for ( long columnId : columnIds ) { if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( ccp.adapterId, columnId, - PlacementType.AUTOMATIC, - null, // Will be set later - null, // Will be set later - null // Will be set later - ); + PlacementType.AUTOMATIC, // Will be set later + null, // Will be set later + null, // Will be set later + null ); AdapterManager.getInstance().getStore( ccp.adapterId ).addColumn( statement.getPrepareContext(), catalog.getLogicalRel( catalogTable.namespaceId ).getTable( ccp.tableId ), @@ -1175,7 +1169,7 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT type.scale, type.dimension, type.cardinality ); - for ( CatalogColumnPlacement placement : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( logicalColumn.id ) ) { + for ( CatalogColumnPlacement placement : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( logicalColumn.id ) ) { AdapterManager.getInstance().getStore( placement.adapterId ).updateColumnType( statement.getPrepareContext(), placement, @@ -1351,7 +1345,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Remove columns physically for ( long columnId : columnsToRemove ) { // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( columnId ) ); // Drop column placement catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), columnId, true ); } @@ -1413,7 +1407,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { for ( long cid : columnIds ) { if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { - CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), cid ); + CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( cid ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); @@ -1426,8 +1420,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { PlacementType.MANUAL, null, null, - null - ); + null ); // Add column on store storeInstance.addColumn( statement.getPrepareContext(), catalogTable, catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); // Add to list of columns for which we need to copy data @@ -1567,7 +1560,7 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da // Make sure that this store does not contain a placement of this column if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { - CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); + CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( logicalColumn.id ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( @@ -1585,8 +1578,7 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da PlacementType.MANUAL, null, null, - null - ); + null ); // Add column on store storeInstance.addColumn( statement.getPrepareContext(), catalogTable, logicalColumn ); // Copy the data to the newly added column placements @@ -1633,7 +1625,7 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D throw new PlacementIsPrimaryException(); } // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( logicalColumn.id ) ); // Drop column placement catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id, false ); @@ -1835,8 +1827,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a placementType, null, null, - null - ); + null ); List logicalColumns; if ( addedColumns.containsKey( adapterId ) ) { @@ -2645,7 +2636,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List stores = new ArrayList<>(); fillStores = true; } - List catalogColumnPlacements = catalog.getAllocRel( partitionInfo.table.namespaceId ).getColumnPlacement( pkColumn.id ); + List catalogColumnPlacements = catalog.getAllocRel( partitionInfo.table.namespaceId ).getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { if ( fillStores ) { // Ask router on which store(s) the table should be placed @@ -2758,7 +2749,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme LogicalColumn pkColumn = catalog.getLogicalRel( partitionedTable.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) - List catalogColumnPlacements = catalog.getAllocRel( partitionedTable.namespaceId ).getColumnPlacement( pkColumn.id ); + List catalogColumnPlacements = catalog.getAllocRel( partitionedTable.namespaceId ).getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { // Ask router on which store(s) the table should be placed Adapter adapter = AdapterManager.getInstance().getAdapter( ccp.adapterId ); @@ -2881,8 +2872,7 @@ private void addColumn( long namespaceId, String columnName, ColumnTypeInformati placementType, null, null, - null - ); + null ); } } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index e239a88013..c00c6de6c7 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -124,9 +124,9 @@ protected static Map> selectPlacement( Logica List placementList = new LinkedList<>(); for ( LogicalColumn column : table.columns ) { if ( catalog.getAllocRel( table.namespaceId ).getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); + placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacements( column.id ) ); } else { - placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacement( column.id ).get( 0 ) ); + placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacements( column.id ).get( 0 ) ); } } @@ -297,7 +297,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< // Add primary key for ( Entry> entry : placementsByAdapter.entrySet() ) { for ( LogicalColumn pkColumn : pkColumns ) { - CatalogColumnPlacement pkPlacement = catalog.getAllocRel( currentPlacements.get( 0 ).namespaceId ).getColumnPlacement( entry.getKey(), pkColumn.id ); + CatalogColumnPlacement pkPlacement = catalog.getAllocRel( currentPlacements.get( 0 ).namespaceId ).getColumnPlacements( pkColumn.id ); if ( !entry.getValue().contains( pkPlacement ) ) { entry.getValue().add( pkPlacement ); } @@ -482,7 +482,7 @@ public AlgNode getRelationalScan( LogicalLpgScan alg, long adapterId, Statement protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, long columnId, int adapterId ) { /*LogicalTable nodes = Catalog.getInstance().getTable( tableId ); - CatalogColumnPlacement placement = Catalog.getInstance().getColumnPlacement( adapterId, columnId ); + CatalogColumnPlacement placement = Catalog.getInstance().getColumnPlacements( adapterId, columnId ); List qualifiedTableName = ImmutableList.of( PolySchemaBuilder.buildAdapterSchemaName( placement.adapterUniqueName, diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 2102d0526d..21c43984e5 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -149,7 +149,7 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { LogicalColumn pkColumn = catalog.getLogicalRel( modify.entity.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); // Essentially gets a list of all stores where this table resides - List pkPlacements = catalog.getAllocRel( modify.entity.namespaceId ).getColumnPlacement( pkColumn.id ); + List pkPlacements = catalog.getAllocRel( modify.entity.namespaceId ).getColumnPlacements( pkColumn.id ); if ( catalogTable.partitionProperty.isPartitioned && log.isDebugEnabled() ) { log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, catalogTable.partitionProperty.partitionGroupIds ); @@ -1297,7 +1297,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical long pkid = fromTable.primaryKey; List pkColumnIds = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( pkid ).columnIds; LogicalColumn pkColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( pkColumn.id ); + List pkPlacements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( pkColumn.id ); List nodes = new ArrayList<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ConnectedMap.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ConnectedMap.java new file mode 100644 index 0000000000..d2717a6aad --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ConnectedMap.java @@ -0,0 +1,115 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import org.jetbrains.annotations.NotNull; + +public class ConnectedMap extends ConcurrentHashMap { + + ConcurrentLinkedQueue>> onChange = new ConcurrentLinkedQueue<>(); + + + public void change() { + onChange.forEach( p -> p.accept( this ) ); + } + + + public ConnectedMap( Map allocations ) { + super( allocations ); + change(); + } + + + @Override + public V put( @NotNull K key, @NotNull V value ) { + V v = super.put( key, value ); + change(); + return v; + } + + + @Override + public void putAll( Map m ) { + super.putAll( m ); + change(); + } + + + @Override + public void clear() { + super.clear(); + change(); + } + + + @Override + public V putIfAbsent( K key, V value ) { + V v = super.putIfAbsent( key, value ); + change(); + return v; + } + + + @Override + public boolean remove( Object key, Object value ) { + boolean b = super.remove( key, value ); + change(); + return b; + } + + + @Override + public boolean replace( K key, V oldValue, V newValue ) { + boolean b = super.replace( key, oldValue, newValue ); + change(); + return b; + } + + + @Override + public V replace( K key, V value ) { + V v = super.replace( key, value ); + change(); + return v; + } + + + @Override + public void replaceAll( BiFunction function ) { + super.replaceAll( function ); + change(); + } + + + public void addConnection( Consumer> onChange ) { + this.onChange.add( onChange ); + } + + + public void addRowConnection( Map target, BiFunction keyTransformer, BiFunction valueTransformer ) { + addConnection( o -> { + target.clear(); + this.forEach( ( key, value ) -> target.put( keyTransformer.apply( key, value ), valueTransformer.apply( key, value ) ) ); + } ); + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java index c589d42435..c66b81014c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java @@ -24,13 +24,12 @@ public class IdBuilder { private final AtomicLong databaseId; private final AtomicLong namespaceId; private final AtomicLong entityId; + + private final AtomicLong allocId; private final AtomicLong fieldId; private final AtomicLong userId; - private final AtomicLong verticalId; - - private final AtomicLong horizontalId; private final AtomicLong indexId; private final AtomicLong keyId; @@ -39,8 +38,18 @@ public class IdBuilder { private final AtomicLong interfaceId; + private static IdBuilder INSTANCE; + + + public static IdBuilder getInstance() { + if ( INSTANCE == null ) { + INSTANCE = new IdBuilder(); + } + return new IdBuilder(); + } + - public IdBuilder() { + private IdBuilder() { this( new AtomicLong( 0 ), new AtomicLong( 0 ), @@ -52,7 +61,6 @@ public IdBuilder() { new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), - new AtomicLong( 0 ), new AtomicLong( 0 ) ); } @@ -64,8 +72,7 @@ public IdBuilder( AtomicLong entityId, AtomicLong fieldId, AtomicLong userId, - AtomicLong verticalId, - AtomicLong horizontalId, + AtomicLong allocId, AtomicLong indexId, AtomicLong keyId, AtomicLong adapterId, @@ -80,8 +87,7 @@ public IdBuilder( this.indexId = indexId; this.keyId = keyId; this.userId = userId; - this.verticalId = verticalId; - this.horizontalId = horizontalId; + this.allocId = allocId; this.adapterId = adapterId; this.interfaceId = interfaceId; @@ -118,13 +124,8 @@ public long getNewUserId() { } - public long getNewVerticalId() { - return verticalId.getAndIncrement(); - } - - - public long getNewHorizontalId() { - return horizontalId.getAndIncrement(); + public long getNewAllocId() { + return allocId.getAndIncrement(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java index 7092e8037c..6621d6bcb5 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java @@ -17,9 +17,9 @@ package org.polypheny.db.catalog; import io.activej.serializer.annotations.SerializeClass; -import org.polypheny.db.catalog.logical.document.DocumentCatalog; -import org.polypheny.db.catalog.logical.graph.GraphCatalog; -import org.polypheny.db.catalog.logical.relational.RelationalCatalog; +import org.polypheny.db.catalog.logical.DocumentCatalog; +import org.polypheny.db.catalog.logical.GraphCatalog; +import org.polypheny.db.catalog.logical.RelationalCatalog; import org.polypheny.db.catalog.logistic.NamespaceType; @SerializeClass(subclasses = { GraphCatalog.class, RelationalCatalog.class, DocumentCatalog.class }) // required for deserialization diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 8e94f810c8..53c2965960 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -52,9 +52,9 @@ import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; -import org.polypheny.db.catalog.logical.document.DocumentCatalog; -import org.polypheny.db.catalog.logical.graph.GraphCatalog; -import org.polypheny.db.catalog.logical.relational.RelationalCatalog; +import org.polypheny.db.catalog.logical.DocumentCatalog; +import org.polypheny.db.catalog.logical.GraphCatalog; +import org.polypheny.db.catalog.logical.RelationalCatalog; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.FullSnapshot; @@ -93,7 +93,7 @@ public class PolyCatalog extends Catalog implements Serializable { @Serialize public final Map interfaces; - private final IdBuilder idBuilder = new IdBuilder(); + private final IdBuilder idBuilder = IdBuilder.getInstance(); private FullSnapshot fullSnapshot; protected final PropertyChangeSupport listeners = new PropertyChangeSupport( this ); @@ -157,44 +157,63 @@ private void validateNamespaceType( long id, NamespaceType type ) { @Override - public LogicalRelationalCatalog getLogicalRel( long id ) { - validateNamespaceType( id, NamespaceType.RELATIONAL ); - return (LogicalRelationalCatalog) logicalCatalogs.get( id ); + public LogicalRelationalCatalog getLogicalRel( long namespaceId ) { + validateNamespaceType( namespaceId, NamespaceType.RELATIONAL ); + return (LogicalRelationalCatalog) logicalCatalogs.get( namespaceId ); } @Override - public LogicalDocumentCatalog getLogicalDoc( long id ) { - validateNamespaceType( id, NamespaceType.DOCUMENT ); - return (LogicalDocumentCatalog) logicalCatalogs.get( id ); + public LogicalDocumentCatalog getLogicalDoc( long namespaceId ) { + validateNamespaceType( namespaceId, NamespaceType.DOCUMENT ); + return (LogicalDocumentCatalog) logicalCatalogs.get( namespaceId ); } @Override - public LogicalGraphCatalog getLogicalGraph( long id ) { - validateNamespaceType( id, NamespaceType.GRAPH ); - return (LogicalGraphCatalog) logicalCatalogs.get( id ); + public LogicalGraphCatalog getLogicalGraph( long namespaceId ) { + validateNamespaceType( namespaceId, NamespaceType.GRAPH ); + return (LogicalGraphCatalog) logicalCatalogs.get( namespaceId ); } @Override - public AllocationRelationalCatalog getAllocRel( long id ) { - validateNamespaceType( id, NamespaceType.RELATIONAL ); - return (AllocationRelationalCatalog) allocationCatalogs.get( id ); + public AllocationRelationalCatalog getAllocRel( long namespaceId ) { + validateNamespaceType( namespaceId, NamespaceType.RELATIONAL ); + return (AllocationRelationalCatalog) allocationCatalogs.get( namespaceId ); } @Override - public AllocationDocumentCatalog getAllocDoc( long id ) { - validateNamespaceType( id, NamespaceType.DOCUMENT ); - return (AllocationDocumentCatalog) allocationCatalogs.get( id ); + public AllocationDocumentCatalog getAllocDoc( long namespaceId ) { + validateNamespaceType( namespaceId, NamespaceType.DOCUMENT ); + return (AllocationDocumentCatalog) allocationCatalogs.get( namespaceId ); } @Override - public AllocationGraphCatalog getAllocGraph( long id ) { - validateNamespaceType( id, NamespaceType.GRAPH ); - return (AllocationGraphCatalog) allocationCatalogs.get( id ); + public AllocationGraphCatalog getAllocGraph( long namespaceId ) { + validateNamespaceType( namespaceId, NamespaceType.GRAPH ); + return (AllocationGraphCatalog) allocationCatalogs.get( namespaceId ); + } + + + @Override + public PhysicalCatalog getPhysical( long namespaceId ) { + return physicalCatalogs.get( namespaceId ); + } + + + // move to Snapshot + @Override + public PhysicalEntity getPhysicalEntity( long id ) { + for ( PhysicalCatalog catalog : physicalCatalogs.values() ) { + PhysicalEntity entity = catalog.getPhysicalEntity( id ); + if ( entity != null ) { + return entity; + } + } + return null; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java deleted file mode 100644 index b37e7c9044..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/AllocationCatalog.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.allocation; - -import io.activej.serializer.BinarySerializer; -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import lombok.Getter; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.NCatalog; -import org.polypheny.db.catalog.Serializable; - -public class AllocationCatalog implements NCatalog, Serializable { - - - @Getter - public BinarySerializer serializer = Serializable.builder.get().build( AllocationCatalog.class ); - - - @Serialize - public final Map horizontals; // "rows" 1,1,1;2,2,2 -> 1,1,1 + 2,2,2 - - @Serialize - public final Map verticals; // "split-placements" a,b,c -> a,b + (a,)c - - - public final IdBuilder idBuilder = new IdBuilder(); - - - public AllocationCatalog() { - this( new HashMap<>(), new HashMap<>() ); - } - - - public AllocationCatalog( - @Deserialize("horizontals") Map horizontals, - @Deserialize("verticals") Map verticals ) { - this.horizontals = new ConcurrentHashMap<>( horizontals ); - this.verticals = new ConcurrentHashMap<>( verticals ); - } - - - @Override - public void commit() { - - } - - - @Override - public void rollback() { - - } - - - @Override - public boolean hasUncommittedChanges() { - return false; - } - - - @Override - public NamespaceType getType() { - return null; - } - - - @Override - public AllocationCatalog copy() { - return deserialize( serialize(), AllocationCatalog.class ); - } - - - public long addVerticalPlacement( long logicalId ) { - long id = idBuilder.getNewVerticalId(); - verticals.put( id, new VerticalPartition( id, logicalId ) ); - return id; - } - - - public long addHorizontalPlacement( long logicalId ) { - long id = idBuilder.getNewHorizontalId(); - - horizontals.put( id, new HorizontalPartition( id, logicalId ) ); - - return id; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/HorizontalPartition.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/HorizontalPartition.java deleted file mode 100644 index 454e17fe71..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/HorizontalPartition.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.allocation; - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import lombok.Value; - -@Value -public class HorizontalPartition { - - @Serialize - public long id; - - @Serialize - public long logicalId; - - - public HorizontalPartition( - @Deserialize("id") long id, - @Deserialize("logicalId") long logicalId ) { - this.id = id; - this.logicalId = logicalId; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java new file mode 100644 index 0000000000..e0485d0289 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.allocation; + +import io.activej.serializer.BinarySerializer; +import java.util.List; +import lombok.Getter; +import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; +import org.polypheny.db.catalog.entity.CatalogCollectionMapping; +import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.logistic.PlacementType; + +public class PolyAllocDocCatalog implements Serializable, AllocationDocumentCatalog { + + @Getter + public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocDocCatalog.class ); + + + @Override + public PolyAllocDocCatalog copy() { + return deserialize( serialize(), PolyAllocDocCatalog.class ); + } + + + @Override + public List> getAllocationsOnAdapter( long id ) { + return null; + } + + + @Override + public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { + return 0; + } + + + @Override + public List getCollectionPlacementsByAdapter( long id ) { + return null; + } + + + @Override + public void addCollectionPlacement( long namespaceId, long adapterId, long id, PlacementType placementType ) { + + } + + + @Override + public CatalogCollectionMapping getCollectionMapping( long id ) { + return null; + } + + + @Override + public void dropCollectionPlacement( long id, long adapterId ) { + + } + + + @Override + public CatalogCollectionPlacement getCollectionPlacement( long id, int placementId ) { + return null; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java new file mode 100644 index 0000000000..ea823e045d --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java @@ -0,0 +1,68 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.allocation; + +import io.activej.serializer.BinarySerializer; +import java.util.List; +import lombok.Getter; +import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; +import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; + +public class PolyAllocGraphCatalog implements Serializable, AllocationGraphCatalog { + + @Getter + public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocGraphCatalog.class ); + + + @Override + public long addGraphPlacement( long adapterId, long graphId ) { + return 0; + } + + + @Override + public List getGraphPlacements( long adapterId ) { + return null; + } + + + @Override + public void deleteGraphPlacement( long adapterId, long graphId ) { + + } + + + @Override + public CatalogGraphPlacement getGraphPlacement( long graphId, long adapterId ) { + return null; + } + + + @Override + public PolyAllocGraphCatalog copy() { + return deserialize( serialize(), PolyAllocGraphCatalog.class ); + } + + + @Override + public List> getAllocationsOnAdapter( long id ) { + return null; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java new file mode 100644 index 0000000000..ef0b2b7b87 --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -0,0 +1,545 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.allocation; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.activej.serializer.BinarySerializer; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.Nullable; +import org.polypheny.db.catalog.ConnectedMap; +import org.polypheny.db.catalog.IdBuilder; +import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.CatalogPartitionGroup; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.partition.properties.PartitionProperty; +import org.polypheny.db.util.Pair; + +@Slf4j +public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Serializable { + + + private final IdBuilder idBuilder = IdBuilder.getInstance(); + @Getter + public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocRelCatalog.class ); + + @Serialize + public final ConnectedMap allocations; + + private final ConcurrentHashMap, Long> adapterLogicalToAllocId; + private final ConcurrentHashMap, AllocationTable> adapterLogicalToAlloc; + + private final ConcurrentHashMap> logicalIdToPlacements; + + + public PolyAllocRelCatalog( + @Deserialize("allocations") Map allocations ) { + this.allocations = new ConnectedMap<>( allocations ); + this.adapterLogicalToAllocId = new ConcurrentHashMap<>(); + this.allocations.addRowConnection( this.adapterLogicalToAllocId, ( k, v ) -> Pair.of( v.adapterId, v.logical.id ), ( k, v ) -> k ); + this.adapterLogicalToAlloc = new ConcurrentHashMap<>(); + this.allocations.addRowConnection( this.adapterLogicalToAlloc, ( k, v ) -> Pair.of( v.adapterId, v.logical.id ), ( k, v ) -> v ); + this.logicalIdToPlacements = new ConcurrentHashMap<>(); + this.allocations.addConnection( a -> { + logicalIdToPlacements.clear(); + a.forEach( ( k, v ) -> { + v.placements.forEach( p -> { + if ( logicalIdToPlacements.containsKey( p.columnId ) ) { + logicalIdToPlacements.get( p.columnId ).add( p ); + } else { + logicalIdToPlacements.put( p.columnId, new ArrayList<>( List.of( p ) ) ); + } + } ); + } ); + } ); + } + + + @Override + public PolyAllocRelCatalog copy() { + return deserialize( serialize(), PolyAllocRelCatalog.class ); + } + + + // move to Snapshot + @Override + public List getAllocationsOnAdapter( long id ) { + return null; + } + + + @Nullable + private Long getAllocId( long adapterId, long tableId ) { + Long allocId = adapterLogicalToAllocId.get( Pair.of( adapterId, tableId ) ); + if ( allocId == null ) { + log.warn( "AllocationEntity does not yet exist." ); + return null; + } + return allocId; + } + + + @Override + public void addColumnPlacement( long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { + allocations.put( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ), adapterLogicalToAlloc.get( Pair.of( adapterId, columnId ) ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); + } + + + // might replace above one with this + private void addColumnPlacementAlloc( long allocTableId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { + allocations.put( allocTableId, allocations.get( allocTableId ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); + } + + + @Override + public void deleteColumnPlacement( long adapterId, long columnId, boolean columnOnly ) { + allocations.put( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ), adapterLogicalToAlloc.get( Pair.of( adapterId, columnId ) ).withRemovedColumn( columnId ) ); + } + + + // might replace above one with this + private void deleteColumnPlacementAlloc( long allocTableId, long columnId, boolean columnOnly ) { + allocations.put( allocTableId, allocations.get( allocTableId ).withRemovedColumn( columnId ) ); + } + + + @Override + public CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ) { + return allocations.get( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ) ).placements.stream().filter( p -> p.columnId == columnId ).findFirst().orElse( null ); + } + + + @Override + public boolean checkIfExistsColumnPlacement( long adapterId, long columnId ) { + return allocations.get( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ) ).placements.stream().anyMatch( p -> p.columnId == columnId ); + } + + + @Override + public List getColumnPlacements( long columnId ) { + return logicalIdToPlacements.get( columnId ); + } + + + @Override + public List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getColumnPlacementsOnAdapter( long adapterId ) { + return null; + } + + + @Override + public List getColumnPlacementsByColumn( long columnId ) { + return null; + } + + + @Override + public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { + return null; + } + + + @Override + public long getPartitionGroupByPartition( long partitionId ) { + return 0; + } + + + @Override + public List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ) { + return null; + } + + + @Override + public void updateColumnPlacementType( long adapterId, long columnId, PlacementType placementType ) { + + } + + + @Override + public void updateColumnPlacementPhysicalPosition( long adapterId, long columnId, long position ) { + + } + + + @Override + public void updateColumnPlacementPhysicalPosition( long adapterId, long columnId ) { + + } + + + @Override + public void updateColumnPlacementPhysicalNames( long adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ) { + + } + + + @Override + public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { + return 0; + } + + + @Override + public void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ) { + + } + + + @Override + public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) { + return null; + } + + + @Override + public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { + return 0; + } + + + @Override + public void deletePartition( long tableId, long schemaId, long partitionId ) { + + } + + + @Override + public CatalogPartition getPartition( long partitionId ) { + return null; + } + + + @Override + public List getPartitionsByTable( long tableId ) { + return null; + } + + + @Override + public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { + + } + + + @Override + public void mergeTable( long tableId ) { + + } + + + @Override + public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { + + } + + + @Override + public List getPartitionGroups( long tableId ) { + return null; + } + + + @Override + public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public void updatePartitionGroup( long partitionGroupId, List partitionIds ) { + + } + + + @Override + public void addPartitionToGroup( long partitionGroupId, Long partitionId ) { + + } + + + @Override + public void removePartitionFromGroup( long partitionGroupId, Long partitionId ) { + + } + + + @Override + public void updatePartition( long partitionId, Long partitionGroupId ) { + + } + + + @Override + public List getPartitions( long partitionGroupId ) { + return null; + } + + + @Override + public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public List getPartitionGroupNames( long tableId ) { + return null; + } + + + @Override + public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { + return null; + } + + + @Override + public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { + return null; + } + + + @Override + public List getPartitionGroupsOnDataPlacement( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getPartitionsOnDataPlacement( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ) { + return null; + } + + + @Override + public CatalogDataPlacement getDataPlacement( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllColumnFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllPartitionFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { + return null; + } + + + @Override + public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { + return null; + } + + + @Override + public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { + return null; + } + + + @Override + public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { + return false; + } + + + @Override + public void addPartitionPlacement( long namespaceId, long adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { + + } + + + @Override + public void addDataPlacement( long adapterId, long tableId ) { + + } + + + @Override + public CatalogDataPlacement addDataPlacementIfNotExists( long adapterId, long tableId ) { + return null; + } + + + @Override + public void modifyDataPlacement( long adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ) { + + } + + + @Override + public void removeDataPlacement( long adapterId, long tableId ) { + + } + + + @Override + public void addSingleDataPlacementToTable( long adapterId, long tableId ) { + + } + + + @Override + public void removeSingleDataPlacementFromTable( long adapterId, long tableId ) { + + } + + + @Override + public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { + + } + + + @Override + public void addColumnsToDataPlacement( long adapterId, long tableId, List columnIds ) { + + } + + + @Override + public void removeColumnsFromDataPlacement( long adapterId, long tableId, List columnIds ) { + + } + + + @Override + public void addPartitionsToDataPlacement( long adapterId, long tableId, List partitionIds ) { + + } + + + @Override + public void removePartitionsFromDataPlacement( long adapterId, long tableId, List partitionIds ) { + + } + + + @Override + public void updateDataPlacement( long adapterId, long tableId, List columnIds, List partitionIds ) { + + } + + + @Override + public void deletePartitionPlacement( long adapterId, long partitionId ) { + + } + + + @Override + public CatalogPartitionPlacement getPartitionPlacement( long adapterId, long partitionId ) { + return null; + } + + + @Override + public List getPartitionPlacementsByAdapter( long adapterId ) { + return null; + } + + + @Override + public List getPartitionPlacementsByTableOnAdapter( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getAllPartitionPlacementsByTable( long tableId ) { + return null; + } + + + @Override + public List getPartitionPlacements( long partitionId ) { + return null; + } + + + @Override + public void addTableToPeriodicProcessing( long tableId ) { + + } + + + @Override + public void removeTableFromPeriodicProcessing( long tableId ) { + + } + + + @Override + public boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ) { + return false; + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/VerticalPartition.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/VerticalPartition.java deleted file mode 100644 index 467b8a3488..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/VerticalPartition.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.allocation; - -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import lombok.Value; - -@Value -public class VerticalPartition { - - @Serialize - public long id; - - @Serialize - public long logicalId; - - - public VerticalPartition( - @Deserialize("id") long id, - @Deserialize("logicalId") long logicalId ) { - this.id = id; - this.logicalId = logicalId; - } - -} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java similarity index 82% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java index a9c50c47d8..235523b900 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/document/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.logical.document; +package org.polypheny.db.catalog.logical; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; @@ -26,19 +26,18 @@ import lombok.Value; import lombok.With; import lombok.experimental.NonFinal; +import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; @Value @With -public class DocumentCatalog implements NCatalog, Serializable, LogicalDocumentCatalog { +public class DocumentCatalog implements Serializable, LogicalDocumentCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( DocumentCatalog.class ); @@ -72,35 +71,6 @@ public DocumentCatalog( boolean openChanges = false; - @Override - public void commit() { - openChanges = false; - } - - - @Override - public void rollback() { - - openChanges = false; - } - - - @Override - public boolean hasUncommittedChanges() { - return false; - } - - - @Override - public NamespaceType getType() { - return NamespaceType.DOCUMENT; - } - - - public void addCollection( long id, String name, long namespaceId ) { - - } - @Override public DocumentCatalog copy() { @@ -143,4 +113,10 @@ public void deleteCollection( long id ) { } + + @Override + public long addCollectionLogistics( String name, List stores, boolean placementOnly ) { + return 0; + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java similarity index 83% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java index 8e28105759..40753a4e2a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/graph/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.logical.graph; +package org.polypheny.db.catalog.logical; import io.activej.serializer.BinarySerializer; import java.util.List; @@ -24,7 +24,6 @@ import lombok.experimental.NonFinal; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; @@ -32,12 +31,11 @@ import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; @Value @With -public class GraphCatalog implements NCatalog, Serializable, LogicalGraphCatalog { +public class GraphCatalog implements Serializable, LogicalGraphCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( GraphCatalog.class ); @@ -57,31 +55,6 @@ public GraphCatalog( LogicalNamespace logicalNamespace, IdBuilder idBuilder ) { } - @Override - public void commit() { - openChanges = false; - } - - - @Override - public void rollback() { - - openChanges = false; - } - - - @Override - public boolean hasUncommittedChanges() { - return openChanges; - } - - - @Override - public NamespaceType getType() { - return NamespaceType.GRAPH; - } - - @Override public GraphCatalog copy() { return deserialize( serialize(), GraphCatalog.class ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java similarity index 96% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 302b9f2020..a9dc42f859 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/relational/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.logical.relational; +package org.polypheny.db.catalog.logical; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; @@ -34,7 +34,6 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.NCatalog; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; @@ -60,14 +59,13 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.IndexType; -import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.type.PolyType; @Value @With -public class RelationalCatalog implements NCatalog, Serializable, LogicalRelationalCatalog { +public class RelationalCatalog implements Serializable, LogicalRelationalCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( RelationalCatalog.class ); @@ -118,37 +116,11 @@ public RelationalCatalog( LogicalNamespace namespace, IdBuilder idBuilder ) { } - @Override - public void commit() { - - openChanges = false; - } - - - @Override - public void rollback() { - - openChanges = false; - } - - public void change() { openChanges = true; } - @Override - public boolean hasUncommittedChanges() { - return openChanges; - } - - - @Override - public NamespaceType getType() { - return NamespaceType.RELATIONAL; - } - - @Override public RelationalCatalog copy() { return deserialize( serialize(), RelationalCatalog.class ); From aef1c1a4816a6f18c8be0b79b2baaeb4a561390a Mon Sep 17 00:00:00 2001 From: datomo Date: Thu, 2 Mar 2023 23:02:18 +0100 Subject: [PATCH 034/436] adjusting existing catalog calls --- .../org/polypheny/db/adapter/Adapter.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 7 +- .../catalogs/AllocationRelationalCatalog.java | 3 + .../db/catalog/catalogs/LogicalCatalog.java | 3 + .../catalogs/LogicalRelationalCatalog.java | 32 ++----- .../org/polypheny/db/catalog/MockCatalog.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 77 ++++++++-------- .../db/routing/routers/BaseRouter.java | 3 +- .../db/languages/mql/MqlAddPlacement.java | 7 +- .../org/polypheny/db/catalog/PolyCatalog.java | 20 ++++- .../allocation/PolyAllocRelCatalog.java | 88 ++++++++++++++----- .../db/catalog/logical/DocumentCatalog.java | 18 +++- .../db/catalog/logical/GraphCatalog.java | 8 +- .../db/catalog/logical/RelationalCatalog.java | 53 +++++++---- .../polypheny/db/sql/SqlProcessorImpl.java | 35 +++----- .../org/polypheny/db/sql/language/SqlDdl.java | 34 +++---- .../ddl/SqlCreateMaterializedView.java | 23 ++--- .../db/sql/language/ddl/SqlCreateTable.java | 24 +++-- .../db/sql/language/ddl/SqlCreateView.java | 23 ++--- .../java/org/polypheny/db/webui/Crud.java | 82 ++++++++--------- 20 files changed, 295 insertions(+), 249 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 83d3e55ffa..45623842ab 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -413,7 +413,7 @@ public Map getCurrentSettings() { if ( deployMode == DeployMode.DOCKER ) { Map dockerSettings = RuntimeConfig.DOCKER_INSTANCES .getWithId( ConfigDocker.class, Integer.parseInt( settings.get( "instanceId" ) ) ).getSettings(); - settings.forEach( dockerSettings::put ); + dockerSettings.putAll( settings ); return dockerSettings; } return settings; diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 3bb1a21248..49749125c2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -40,6 +40,7 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; @@ -103,6 +104,9 @@ public static Catalog getInstance() { public abstract AllocationGraphCatalog getAllocGraph( long namespaceId ); + // move to Snapshot + public abstract LogicalEntity getLogicalEntity( String entityName ); + public abstract PhysicalCatalog getPhysical( long namespaceId ); public abstract PhysicalEntity getPhysicalEntity( long id ); @@ -193,7 +197,7 @@ protected final boolean isValidIdentifier( final String str ) { * @return The schema * @throws UnknownSchemaException If there is no schema with this name in the specified database. */ - public abstract LogicalNamespace getNamespace( String name ) throws UnknownSchemaException; + public abstract LogicalNamespace getNamespace( String name ); /** * Adds a schema in a specified database @@ -364,4 +368,5 @@ protected final boolean isValidIdentifier( final String str ) { public abstract List getTablesForPeriodicProcessing(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 24b8f2f4a2..a290ffa2f6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -25,6 +25,7 @@ import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; @@ -662,4 +663,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ); + List getAllocationsFromLogical( long logicalId ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java index e160ecde2e..2eee28e9a6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.catalogs; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; public interface LogicalCatalog { @@ -41,4 +42,6 @@ public interface LogicalCatalog { LogicalCatalog withLogicalNamespace( LogicalNamespace logicalNamespace ); + LogicalEntity getEntity( String name ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index c171db0581..58a493385c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.Map; +import javax.annotation.Nullable; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; @@ -52,31 +53,10 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * Get all tables of the specified schema which fit to the specified filters. * getTables(xid, databaseName, null, null, null) returns all tables of the database. * - * @param schemaId The id of the schema - * @param tableNamePattern Pattern for the table name. null returns all. - * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. - */ - public abstract List getTables( long schemaId, Pattern tableNamePattern ); - - /** - * Get all tables of the specified database which fit to the specified filters. - * getTables(xid, databaseName, null, null, null) returns all tables of the database. - * - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns all. + * @param name Pattern for the table name. null returns all. * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. */ - public abstract List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ); - - /** - * Returns the table with the given name in the specified database and schema. - * - * @param schemaName The name of the schema - * @param tableName The name of the table - * @return The table - */ - public abstract LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException; - + public abstract List getTables( @Nullable Pattern name ); /** * Returns the table with the given id @@ -89,12 +69,11 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { /** * Returns the table with the given name in the specified schema. * - * @param schemaId The id of the schema * @param tableName The name of the table * @return The table * @throws UnknownTableException If there is no table with this name in the specified database and schema. */ - public abstract LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException; + public abstract LogicalTable getTable( String tableName ) throws UnknownTableException; /** * Returns the table which is associated with a given partitionId @@ -212,12 +191,11 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * Get all columns of the specified database which fit to the specified filter patterns. * getColumns(xid, databaseName, null, null, null) returns all columns of the database. * - * @param schemaNamePattern Pattern for the schema name. null returns all. * @param tableNamePattern Pattern for the table name. null returns all. * @param columnNamePattern Pattern for the column name. null returns all. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ); + public abstract List getColumns( @Nullable Pattern tableNamePattern, @Nullable Pattern columnNamePattern ); /** * Returns the column with the specified id. diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 847c6b9a8c..90316bf7fe 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -122,7 +122,7 @@ public LogicalNamespace getNamespace( long id ) { @Override - public LogicalNamespace getNamespace( String name ) throws UnknownSchemaException { + public LogicalNamespace getNamespace( String name ) { throw new NotImplementedException(); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 06d560dbd6..028780dc37 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -69,6 +69,7 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -209,11 +210,7 @@ public long createNamespace( String name, long databaseId, NamespaceType type, i if ( catalog.checkIfExistsNamespace( name ) ) { if ( ifNotExists ) { // It is ok that there is already a schema with this name because "IF NOT EXISTS" was specified - try { - return catalog.getNamespace( name ).id; - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( "The catalog seems to be corrupt, as it was impossible to retrieve an existing namespace." ); - } + return catalog.getNamespace( name ).id; } else if ( replace ) { throw new RuntimeException( "Replacing namespace is not yet supported." ); } else { @@ -443,7 +440,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys throw new RuntimeException( "The table has an unexpected number of placements!" ); } - int adapterId = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( catalogTable.columns.get( 0 ).id ).get( 0 ).adapterId; + long adapterId = catalog.getAllocRel( catalogTable.namespaceId ).getAllocationsFromLogical( catalogTable.id ).get( 0 ).adapterId; DataSource dataSource = (DataSource) AdapterManager.getInstance().getAdapter( adapterId ); String physicalTableName = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( adapterId, catalogTable.partitionProperty.partitionIds.get( 0 ) ).physicalTableName; @@ -1007,11 +1004,19 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } // Delete column from underlying data stores - for ( CatalogColumnPlacement dp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByColumn( column.id ) ) { + /*for ( CatalogColumnPlacement dp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByColumn( column.id ) ) { if ( catalogTable.entityType == EntityType.ENTITY ) { AdapterManager.getInstance().getStore( dp.adapterId ).dropColumn( statement.getPrepareContext(), dp ); } catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( dp.adapterId, dp.columnId, true ); + }*/ + for ( AllocationTable table : catalog.getAllocRel( catalogTable.namespaceId ).getAllocationsFromLogical( catalogTable.id ) ) { + for ( CatalogColumnPlacement placement : table.placements ) { + if ( catalogTable.entityType == EntityType.ENTITY ) { + AdapterManager.getInstance().getStore( table.adapterId ).dropColumn( statement.getPrepareContext(), placement ); + } + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( placement.adapterId, placement.columnId, true ); + } } // Delete from catalog @@ -1345,7 +1350,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Remove columns physically for ( long columnId : columnsToRemove ) { // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( columnId ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); // Drop column placement catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), columnId, true ); } @@ -1407,7 +1412,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { for ( long cid : columnIds ) { if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { - CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( cid ); + CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), cid ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); @@ -1560,7 +1565,7 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da // Make sure that this store does not contain a placement of this column if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { - CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( logicalColumn.id ); + CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( @@ -1625,7 +1630,7 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D throw new PlacementIsPrimaryException(); } // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( logicalColumn.id ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); // Drop column placement catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id, false ); @@ -1690,7 +1695,7 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC if ( catalog.getLogicalRel( namespaceId ).checkIfExistsEntity( viewName ) ) { if ( replace ) { try { - dropView( catalog.getLogicalRel( namespaceId ).getTable( namespaceId, viewName ), statement ); + dropView( catalog.getLogicalRel( namespaceId ).getTable( viewName ), statement ); } catch ( UnknownTableException | DdlOnSourceException e ) { throw new RuntimeException( "Unable tp drop the existing View with this name." ); } @@ -2897,36 +2902,32 @@ public void addConstraint( long namespaceId, String constraintName, ConstraintTy @Override public void dropNamespace( String schemaName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException { - try { - schemaName = schemaName.toLowerCase(); - // Check if there is a schema with this name - if ( catalog.checkIfExistsNamespace( schemaName ) ) { - LogicalNamespace logicalNamespace = catalog.getNamespace( schemaName ); - - // Drop all collections in this namespace - List collections = catalog.getLogicalDoc( logicalNamespace.id ).getCollections( null ); - for ( LogicalCollection collection : collections ) { - dropCollection( collection, statement ); - } + schemaName = schemaName.toLowerCase(); + // Check if there is a schema with this name + if ( catalog.checkIfExistsNamespace( schemaName ) ) { + LogicalNamespace logicalNamespace = catalog.getNamespace( schemaName ); + + // Drop all collections in this namespace + List collections = catalog.getLogicalDoc( logicalNamespace.id ).getCollections( null ); + for ( LogicalCollection collection : collections ) { + dropCollection( collection, statement ); + } - // Drop all tables in this schema - List catalogEntities = catalog.getLogicalRel( logicalNamespace.id ).getTables( logicalNamespace.id, null ); - for ( LogicalTable catalogTable : catalogEntities ) { - dropTable( catalogTable, statement ); - } + // Drop all tables in this schema + List catalogEntities = catalog.getLogicalRel( logicalNamespace.id ).getTables( null ); + for ( LogicalTable catalogTable : catalogEntities ) { + dropTable( catalogTable, statement ); + } - // Drop schema - catalog.deleteNamespace( logicalNamespace.id ); + // Drop schema + catalog.deleteNamespace( logicalNamespace.id ); + } else { + if ( ifExists ) { + // This is ok because "IF EXISTS" was specified + return; } else { - if ( ifExists ) { - // This is ok because "IF EXISTS" was specified - return; - } else { - throw new SchemaNotExistException(); - } + throw new SchemaNotExistException(); } - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index c00c6de6c7..924727ace9 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -67,7 +67,6 @@ import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; @@ -431,7 +430,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Integer placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = catalog.getLogicalRel( namespace.id ).getTables( new Pattern( namespace.name ), null ); + List tables = catalog.getLogicalRel( namespace.id ).getTables( null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, selectPlacement( t ) ) ) ) .collect( Collectors.toList() ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java index 1c80652cbe..0790c259e2 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java @@ -23,7 +23,6 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -47,11 +46,7 @@ public void execute( Context context, Statement statement, QueryParameters param AdapterManager adapterManager = AdapterManager.getInstance(); long namespaceId; - try { - namespaceId = catalog.getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( "The used document database (Polypheny Schema) is not available." ); - } + namespaceId = catalog.getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; List collections = catalog.getLogicalDoc( namespaceId ).getCollections( new Pattern( getCollection() ) ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 53c2965960..f0067c47e6 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -46,11 +46,11 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logical.DocumentCatalog; import org.polypheny.db.catalog.logical.GraphCatalog; @@ -198,6 +198,18 @@ public AllocationGraphCatalog getAllocGraph( long namespaceId ) { } + @Override + public LogicalEntity getLogicalEntity( String entityName ) { + for ( LogicalCatalog catalog : logicalCatalogs.values() ) { + LogicalEntity entity = catalog.getEntity( entityName ); + if ( entity != null ) { + return entity; + } + } + return null; + } + + @Override public PhysicalCatalog getPhysical( long namespaceId ) { return physicalCatalogs.get( namespaceId ); @@ -280,6 +292,10 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case @Override public @NonNull List getNamespaces( Pattern name ) { + if ( name == null ) { + return logicalCatalogs.values().stream().map( LogicalCatalog::getLogicalNamespace ).collect( Collectors.toList() ); + } + return logicalCatalogs.values().stream().filter( c -> c.getLogicalNamespace().caseSensitive ? c.getLogicalNamespace().name.toLowerCase( Locale.ROOT ).matches( name.pattern ) @@ -295,7 +311,7 @@ public LogicalNamespace getNamespace( long id ) { @Override - public LogicalNamespace getNamespace( String name ) throws UnknownSchemaException { + public LogicalNamespace getNamespace( String name ) { List namespaces = getNamespaces( Pattern.of( name ) ); if ( namespaces.isEmpty() ) { return null; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index ef0b2b7b87..3f89294872 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.Nullable; @@ -52,6 +53,7 @@ public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Seriali private final IdBuilder idBuilder = IdBuilder.getInstance(); + @Getter public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocRelCatalog.class ); @@ -59,9 +61,13 @@ public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Seriali public final ConnectedMap allocations; private final ConcurrentHashMap, Long> adapterLogicalToAllocId; - private final ConcurrentHashMap, AllocationTable> adapterLogicalToAlloc; + private final ConcurrentHashMap, AllocationTable> adapterLogicalColumnToAlloc; + private final ConcurrentHashMap> logicalColumnToPlacements; + private final ConcurrentHashMap, List> adapterLogicalTableToAllocs; + + private final ConcurrentHashMap> adapterToAllocs; - private final ConcurrentHashMap> logicalIdToPlacements; + private final ConcurrentHashMap> logicalTableToAllocs; public PolyAllocRelCatalog( @@ -69,20 +75,55 @@ public PolyAllocRelCatalog( this.allocations = new ConnectedMap<>( allocations ); this.adapterLogicalToAllocId = new ConcurrentHashMap<>(); this.allocations.addRowConnection( this.adapterLogicalToAllocId, ( k, v ) -> Pair.of( v.adapterId, v.logical.id ), ( k, v ) -> k ); - this.adapterLogicalToAlloc = new ConcurrentHashMap<>(); - this.allocations.addRowConnection( this.adapterLogicalToAlloc, ( k, v ) -> Pair.of( v.adapterId, v.logical.id ), ( k, v ) -> v ); - this.logicalIdToPlacements = new ConcurrentHashMap<>(); + this.adapterLogicalColumnToAlloc = new ConcurrentHashMap<>(); + this.allocations.addRowConnection( this.adapterLogicalColumnToAlloc, ( k, v ) -> Pair.of( v.adapterId, v.logical.id ), ( k, v ) -> v ); + //// + this.logicalColumnToPlacements = new ConcurrentHashMap<>(); + this.allocations.addConnection( a -> { + logicalColumnToPlacements.clear(); + a.forEach( ( k, v ) -> v.placements.forEach( p -> { + if ( logicalColumnToPlacements.containsKey( p.columnId ) ) { + logicalColumnToPlacements.get( p.columnId ).add( p ); + } else { + logicalColumnToPlacements.put( p.columnId, new ArrayList<>( List.of( p ) ) ); + } + } ) ); + } ); + + //// + this.adapterLogicalTableToAllocs = new ConcurrentHashMap<>(); + this.allocations.addConnection( a -> a.forEach( ( k, v ) -> { + if ( adapterLogicalTableToAllocs.containsKey( Pair.of( v.adapterId, v.logical.id ) ) ) { + adapterLogicalTableToAllocs.get( Pair.of( v.adapterId, v.logical.id ) ).add( v ); + } else { + adapterLogicalTableToAllocs.put( Pair.of( v.adapterId, v.logical.id ), new ArrayList<>( List.of( v ) ) ); + } + } ) ); + + //// + this.adapterToAllocs = new ConcurrentHashMap<>(); + this.allocations.addConnection( a -> { + adapterToAllocs.clear(); + for ( AllocationTable value : a.values() ) { + if ( adapterToAllocs.containsKey( value.adapterId ) ) { + adapterToAllocs.get( value.adapterId ).add( value ); + } else { + adapterToAllocs.put( value.adapterId, new ArrayList<>( List.of( value ) ) ); + } + } + } ); + + //// + this.logicalTableToAllocs = new ConcurrentHashMap<>(); this.allocations.addConnection( a -> { - logicalIdToPlacements.clear(); - a.forEach( ( k, v ) -> { - v.placements.forEach( p -> { - if ( logicalIdToPlacements.containsKey( p.columnId ) ) { - logicalIdToPlacements.get( p.columnId ).add( p ); - } else { - logicalIdToPlacements.put( p.columnId, new ArrayList<>( List.of( p ) ) ); - } - } ); - } ); + logicalTableToAllocs.clear(); + for ( AllocationTable table : a.values() ) { + if ( logicalTableToAllocs.containsKey( table.logical.id ) ) { + logicalTableToAllocs.get( table.logical.id ).add( table ); + } else { + logicalTableToAllocs.put( table.logical.id, new ArrayList<>( List.of( table ) ) ); + } + } } ); } @@ -113,7 +154,7 @@ private Long getAllocId( long adapterId, long tableId ) { @Override public void addColumnPlacement( long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - allocations.put( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ), adapterLogicalToAlloc.get( Pair.of( adapterId, columnId ) ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); + allocations.put( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ), adapterLogicalColumnToAlloc.get( Pair.of( adapterId, columnId ) ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); } @@ -125,7 +166,7 @@ private void addColumnPlacementAlloc( long allocTableId, long columnId, Placemen @Override public void deleteColumnPlacement( long adapterId, long columnId, boolean columnOnly ) { - allocations.put( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ), adapterLogicalToAlloc.get( Pair.of( adapterId, columnId ) ).withRemovedColumn( columnId ) ); + allocations.put( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ), adapterLogicalColumnToAlloc.get( Pair.of( adapterId, columnId ) ).withRemovedColumn( columnId ) ); } @@ -149,19 +190,19 @@ public boolean checkIfExistsColumnPlacement( long adapterId, long columnId ) { @Override public List getColumnPlacements( long columnId ) { - return logicalIdToPlacements.get( columnId ); + return logicalColumnToPlacements.get( columnId ); } @Override public List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ) { - return null; + return adapterLogicalTableToAllocs.get( Pair.of( adapterId, tableId ) ).stream().flatMap( a -> a.placements.stream() ).collect( Collectors.toList() ); } @Override public List getColumnPlacementsOnAdapter( long adapterId ) { - return null; + return adapterToAllocs.get( adapterId ).stream().flatMap( a -> a.placements.stream() ).collect( Collectors.toList() ); } @@ -542,4 +583,11 @@ public boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId return false; } + + @Override + public List getAllocationsFromLogical( long logicalId ) { + return logicalTableToAllocs.get( logicalId ); + } + + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java index 235523b900..c75b238963 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java @@ -27,11 +27,13 @@ import lombok.With; import lombok.experimental.NonFinal; import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.ConnectedMap; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.Pattern; @@ -45,7 +47,9 @@ public class DocumentCatalog implements Serializable, LogicalDocumentCatalog { @Serialize public IdBuilder idBuilder; @Serialize - public Map collections; + public ConnectedMap collections; + + private ConcurrentHashMap names; @Getter @Serialize public LogicalNamespace logicalNamespace; @@ -61,9 +65,12 @@ public DocumentCatalog( @Deserialize("idBuilder") IdBuilder idBuilder, @Deserialize("collections") Map collections ) { this.logicalNamespace = logicalNamespace; - this.collections = collections; + this.collections = new ConnectedMap<>( collections ); this.idBuilder = idBuilder; + + this.names = new ConcurrentHashMap<>(); + this.collections.addRowConnection( this.names, ( k, v ) -> logicalNamespace.caseSensitive ? v.name : v.name.toLowerCase(), ( k, v ) -> v ); } @@ -71,7 +78,6 @@ public DocumentCatalog( boolean openChanges = false; - @Override public DocumentCatalog copy() { return deserialize( serialize(), DocumentCatalog.class ); @@ -90,6 +96,12 @@ public boolean checkIfExistsEntity( long tableId ) { } + @Override + public LogicalEntity getEntity( String name ) { + return names.get( name ); + } + + @Override public LogicalCollection getCollection( long collectionId ) { return null; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java index 40753a4e2a..8e6559b863 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java @@ -27,6 +27,7 @@ import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -49,7 +50,6 @@ public class GraphCatalog implements Serializable, LogicalGraphCatalog { public GraphCatalog( LogicalNamespace logicalNamespace, IdBuilder idBuilder ) { - this.logicalNamespace = logicalNamespace; this.idBuilder = idBuilder; } @@ -73,6 +73,12 @@ public boolean checkIfExistsEntity( long tableId ) { } + @Override + public LogicalEntity getEntity( String name ) { + return null; + } + + @Override public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index a9dc42f859..74053be322 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -25,7 +25,9 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.Getter; import lombok.Value; import lombok.With; @@ -33,6 +35,7 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.ConnectedMap; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; @@ -46,6 +49,7 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -71,7 +75,10 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog public BinarySerializer serializer = Serializable.builder.get().build( RelationalCatalog.class ); @Serialize - public Map tables; + public ConnectedMap tables; + + @Serialize + public ConnectedMap columns; @Getter public LogicalNamespace logicalNamespace; @@ -86,6 +93,7 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog @Serialize public IdBuilder idBuilder; + ConcurrentHashMap names; @NonFinal boolean openChanges = false; @@ -97,22 +105,27 @@ public RelationalCatalog( @Deserialize("logicalNamespace") LogicalNamespace logicalNamespace, @Deserialize("idBuilder") IdBuilder idBuilder, @Deserialize("tables") Map tables, + @Deserialize("columns") Map columns, @Deserialize("indexes") Map indexes, @Deserialize("keys") Map keys, @Deserialize("keyColumns") Map keyColumns ) { this.logicalNamespace = logicalNamespace; - this.tables = new HashMap<>( tables ); + this.tables = new ConnectedMap<>( tables ); + this.columns = new ConnectedMap<>( columns ); this.indexes = indexes; this.keys = keys; this.keyColumns = keyColumns; + this.names = new ConcurrentHashMap<>(); + this.tables.addRowConnection( this.names, ( k, v ) -> logicalNamespace.caseSensitive ? v.name : v.name.toLowerCase(), ( k, v ) -> v ); + this.idBuilder = idBuilder; } public RelationalCatalog( LogicalNamespace namespace, IdBuilder idBuilder ) { - this( namespace, idBuilder, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); + this( namespace, idBuilder, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); } @@ -140,20 +153,22 @@ public boolean checkIfExistsEntity( long tableId ) { @Override - public List getTables( long schemaId, Pattern tableNamePattern ) { - return null; - } - - - @Override - public List getTables( Pattern schemaNamePattern, Pattern tableNamePattern ) { - return null; + public LogicalEntity getEntity( String name ) { + return names.get( name ); } @Override - public LogicalTable getTable( String schemaName, String tableName ) throws UnknownTableException, UnknownSchemaException { - return null; + public List getTables( @Nullable Pattern name ) { + if ( name == null ) { + return List.copyOf( tables.values() ); + } + return tables + .values() + .stream() + .filter( t -> logicalNamespace.caseSensitive ? + t.name.toLowerCase().matches( name.toRegex() ) : + t.name.matches( name.toRegex() ) ).collect( Collectors.toList() ); } @@ -164,7 +179,7 @@ public LogicalTable getTable( long tableId ) { @Override - public LogicalTable getTable( long schemaId, String tableName ) throws UnknownTableException { + public LogicalTable getTable( String tableName ) throws UnknownTableException { return null; } @@ -365,14 +380,18 @@ public List getColumns( long tableId ) { @Override - public List getColumns( Pattern schemaNamePattern, Pattern tableNamePattern, Pattern columnNamePattern ) { - return null; + public List getColumns( @Nullable Pattern tableNamePattern, @Nullable Pattern columnNamePattern ) { + List tables = getTables( tableNamePattern ); + if ( columnNamePattern == null ) { + return tables.stream().flatMap( t -> t.columns.stream() ).collect( Collectors.toList() ); + } + return tables.stream().flatMap( t -> t.columns.stream() ).filter( c -> c.name.matches( columnNamePattern.toRegex() ) ).collect( Collectors.toList() ); } @Override public LogicalColumn getColumn( long columnId ) { - return null; + return columns.get( columnId ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index 7ac29a4cf1..9da57ca390 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -17,8 +17,6 @@ package org.polypheny.db.sql; -import static org.polypheny.db.util.Static.RESOURCE; - import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.Collections; @@ -40,8 +38,6 @@ import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; @@ -78,7 +74,6 @@ import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionImpl; import org.polypheny.db.util.Conformance; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.DeadlockException; import org.polypheny.db.util.Pair; import org.polypheny.db.util.SourceStringReader; @@ -362,25 +357,19 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tableName ) { LogicalTable catalogTable; - try { - long schemaId; - String tableOldName; - if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = Catalog.getInstance().getNamespace( tableName.names.get( 1 ) ).id; - tableOldName = tableName.names.get( 2 ); - } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = Catalog.getInstance().getSchema( transaction.getDefaultSchema().databaseId, tableName.names.get( 0 ) ).id; - tableOldName = tableName.names.get( 1 ); - } else { // TableName - schemaId = Catalog.getInstance().getSchema( transaction.getDefaultSchema().databaseId, transaction.getDefaultSchema().name ).id; - tableOldName = tableName.names.get( 0 ); - } - catalogTable = Catalog.getInstance().getTable( schemaId, tableOldName ); - } catch ( UnknownSchemaException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.schemaNotFound( tableName.toString() ) ); - } catch ( UnknownTableException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.tableNotFound( tableName.toString() ) ); + long schemaId; + String tableOldName; + if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName + schemaId = Catalog.getInstance().getNamespace( tableName.names.get( 1 ) ).id; + tableOldName = tableName.names.get( 2 ); + } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName + schemaId = Catalog.getInstance().getSchema( transaction.getDefaultSchema().databaseId, tableName.names.get( 0 ) ).id; + tableOldName = tableName.names.get( 1 ); + } else { // TableName + schemaId = Catalog.getInstance().getSchema( transaction.getDefaultSchema().databaseId, transaction.getDefaultSchema().name ).id; + tableOldName = tableName.names.get( 0 ); } + catalogTable = Catalog.getInstance().getTable( schemaId, tableOldName ); return catalogTable; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 854e6de21f..9c74aefada 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -29,8 +29,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Operator; import org.polypheny.db.prepare.Context; @@ -67,26 +65,20 @@ public Operator getOperator() { protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName ) { LogicalTable catalogTable; - try { - long schemaId; - String tableOldName; - Catalog catalog = Catalog.getInstance(); - if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getNamespace( tableName.names.get( 1 ) ).id; - tableOldName = tableName.names.get( 2 ); - } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), tableName.names.get( 0 ) ).id; - tableOldName = tableName.names.get( 1 ); - } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; - tableOldName = tableName.names.get( 0 ); - } - catalogTable = catalog.getTable( schemaId, tableOldName ); - } catch ( UnknownSchemaException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.schemaNotFound( tableName.toString() ) ); - } catch ( UnknownTableException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.tableNotFound( tableName.toString() ) ); + long schemaId; + String tableOldName; + Catalog catalog = Catalog.getInstance(); + if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName + schemaId = catalog.getNamespace( tableName.names.get( 1 ) ).id; + tableOldName = tableName.names.get( 2 ); + } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName + schemaId = catalog.getSchema( context.getDatabaseId(), tableName.names.get( 0 ) ).id; + tableOldName = tableName.names.get( 1 ); + } else { // TableName + schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + tableOldName = tableName.names.get( 0 ); } + catalogTable = catalog.getTable( schemaId, tableOldName ); return catalogTable; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java index 8e3961b27a..d81947f6f0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java @@ -35,7 +35,6 @@ import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; @@ -120,19 +119,15 @@ public void execute( Context context, Statement statement, QueryParameters param MaterializedViewManager.getInstance().isCreatingMaterialized = true; - try { - if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; - viewName = name.names.get( 2 ); - } else if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; - viewName = name.names.get( 1 ); - } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; - viewName = name.names.get( 0 ); - } - } catch ( UnknownSchemaException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.schemaNotFound( name.toString() ) ); + if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName + schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; + viewName = name.names.get( 2 ); + } else if ( name.names.size() == 2 ) { // SchemaName.TableName + schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; + viewName = name.names.get( 1 ); + } else { // TableName + schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + viewName = name.names.get( 0 ); } List stores; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index a999b0727f..56a92ca84a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -208,20 +208,16 @@ public void execute( Context context, Statement statement, QueryParameters param String tableName; long schemaId; - try { - // Cannot use getLogicalTable() here since table does not yet exist - if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; - tableName = name.names.get( 2 ); - } else if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; - tableName = name.names.get( 1 ); - } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; - tableName = name.names.get( 0 ); - } - } catch ( UnknownSchemaException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.schemaNotFound( name.toString() ) ); + // Cannot use getLogicalTable() here since table does not yet exist + if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName + schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; + tableName = name.names.get( 2 ); + } else if ( name.names.size() == 2 ) { // SchemaName.TableName + schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; + tableName = name.names.get( 1 ); + } else { // TableName + schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + tableName = name.names.get( 0 ); } List stores = store != null ? ImmutableList.of( getDataStoreInstance( store ) ) : null; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java index 9116684ef4..b4c6de00c3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java @@ -32,7 +32,6 @@ import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; @@ -103,19 +102,15 @@ public void execute( Context context, Statement statement, QueryParameters param String viewName; long schemaId; - try { - if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; - viewName = name.names.get( 2 ); - } else if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; - viewName = name.names.get( 1 ); - } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; - viewName = name.names.get( 0 ); - } - } catch ( UnknownSchemaException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.schemaNotFound( name.toString() ) ); + if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName + schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; + viewName = name.names.get( 2 ); + } else if ( name.names.size() == 2 ) { // SchemaName.TableName + schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; + viewName = name.names.get( 1 ); + } else { // TableName + schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + viewName = name.names.get( 0 ); } PlacementType placementType = PlacementType.AUTOMATIC; diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 6be4cf825f..bcbda3c343 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -111,13 +111,13 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -302,7 +302,7 @@ Result getTable( final UIRequest request ) { // determine if it is a view or a table LogicalTable catalogTable; try { - catalogTable = catalog.getTable( t[0], t[1] ); + catalogTable = catalog.getLogicalRel( catalog.getNamespace( t[0] ).id ).getTable( t[1] ); result.setNamespaceType( catalogTable.getNamespaceType() ); if ( catalogTable.modifiable ) { result.setType( ResultType.TABLE ); @@ -312,20 +312,18 @@ Result getTable( final UIRequest request ) { } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); return result.setError( "Could not retrieve type of Result (table/view)." ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } //get headers with default values ArrayList cols = new ArrayList<>(); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); } - for ( LogicalColumn logicalColumn : catalog.getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : catalog.getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( @@ -376,7 +374,7 @@ void getSchemaTree( final Context ctx ) { ctx.json( new ArrayList<>() ); } - List schemas = catalog.getNamespaces( databaseId, null ); + List schemas = catalog.getNamespaces( null ); // remove unwanted namespaces schemas = schemas.stream().filter( s -> request.dataModels.contains( s.namespaceType ) ).collect( Collectors.toList() ); for ( LogicalNamespace schema : schemas ) { @@ -386,7 +384,7 @@ void getSchemaTree( final Context ctx ) { ArrayList tableTree = new ArrayList<>(); ArrayList viewTree = new ArrayList<>(); ArrayList collectionTree = new ArrayList<>(); - List tables = catalog.getTables( schema.id, null ); + List tables = catalog.getLogicalRel( schema.id ).getTables( null ); for ( LogicalTable table : tables ) { String icon = "fa fa-table"; if ( table.entityType == EntityType.SOURCE ) { @@ -400,7 +398,7 @@ void getSchemaTree( final Context ctx ) { SidebarElement tableElement = new SidebarElement( schema.name + "." + table.name, table.name, schema.namespaceType, request.routerLinkRoot, icon ); if ( request.depth > 2 ) { - List columns = catalog.getColumns( table.id ); + List columns = catalog.getLogicalRel( table.namespaceId ).getColumns( table.id ); for ( LogicalColumn column : columns ) { tableElement.addChild( new SidebarElement( schema.name + "." + table.name + "." + column.name, column.name, schema.namespaceType, request.routerLinkRoot, icon ).setCssClass( "sidebarColumn" ) ); } @@ -473,7 +471,7 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getTables( new org.polypheny.db.catalog.logistic.Pattern( requestedSchema ), null ); + List tables = catalog.getLogicalRel( schemaId ).getTables( null ); ArrayList result = new ArrayList<>(); for ( LogicalTable t : tables ) { result.add( new DbTable( t.name, t.getNamespaceName(), t.modifiable, t.entityType ) ); @@ -659,7 +657,7 @@ void insertRow( final Context ctx ) { StringJoiner columns = new StringJoiner( ",", "(", ")" ); StringJoiner values = new StringJoiner( ",", "(", ")" ); - List logicalColumns = catalog.getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[0] ), new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getLogicalRel( catalog.getLogicalEntity( tableId ).namespaceId ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); try { int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -950,24 +948,18 @@ private String computeWherePK( final String tableName, final String columnName, StringJoiner joiner = new StringJoiner( " AND ", "", "" ); Map catalogColumns = getCatalogColumns( tableName, columnName ); LogicalTable catalogTable; - try { - catalogTable = catalog.getTable( tableName, columnName ); - CatalogPrimaryKey pk = catalog.getPrimaryKey( catalogTable.primaryKey ); - for ( long colId : pk.columnIds ) { - String colName = catalog.getColumn( colId ).name; - String condition; - if ( filter.containsKey( colName ) ) { - String val = filter.get( colName ); - LogicalColumn col = catalogColumns.get( colName ); - condition = uiValueToSql( val, col.type, col.collectionsType ); - condition = String.format( "\"%s\" = %s", colName, condition ); - joiner.add( condition ); - } + catalogTable = catalog.getLogicalEntity( tableName ).unwrap( LogicalTable.class ); + CatalogPrimaryKey pk = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + for ( long colId : pk.columnIds ) { + String colName = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( colId ).name; + String condition; + if ( filter.containsKey( colName ) ) { + String val = filter.get( colName ); + LogicalColumn col = catalogColumns.get( colName ); + condition = uiValueToSql( val, col.type, col.collectionsType ); + condition = String.format( "\"%s\" = %s", colName, condition ); + joiner.add( condition ); } - } catch ( UnknownTableException e ) { - throw new RuntimeException( "Error while deriving PK WHERE condition", e ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } return " WHERE " + joiner.toString(); } @@ -1029,7 +1021,9 @@ void updateRow( final Context ctx ) throws ServletException, IOException { Statement statement = transaction.createStatement(); StringJoiner setStatements = new StringJoiner( ",", "", "" ); - List logicalColumns = catalog.getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[0] ), new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + LogicalNamespace namespace = catalog.getNamespace( split[0] ); + + List logicalColumns = catalog.getLogicalRel( namespace.id ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -1126,15 +1120,16 @@ void getColumns( final Context ctx ) { ArrayList cols = new ArrayList<>(); try { - LogicalTable catalogTable = catalog.getTable( t[0], t[1] ); + LogicalNamespace namespace = catalog.getNamespace( t[0] ); + LogicalTable catalogTable = catalog.getLogicalRel( namespace.id ).getTable( t[1] ); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getLogicalRel( namespace.id ).getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); } - for ( LogicalColumn logicalColumn : catalog.getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : catalog.getLogicalRel( namespace.id ).getColumns( catalogTable.id ) ) { String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( @@ -1162,25 +1157,23 @@ void getColumns( final Context ctx ) { log.error( "Caught exception while getting a column", e ); ctx.status( 400 ).json( new Result( e ) ); return; - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } ctx.json( result ); } - void getDataSourceColumns( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { + void getDataSourceColumns( final Context ctx ) throws UnknownTableException, UnknownSchemaException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalTable catalogTable = catalog.getTable( request.getSchemaName(), request.getTableName() ); + LogicalNamespace namespace = catalog.getNamespace( request.getSchemaName() ); + LogicalTable catalogTable = catalog.getLogicalRel( namespace.id ).getTable( request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { ImmutableMap> underlyingTable = ((CatalogView) catalogTable).getUnderlyingTables(); List columns = new ArrayList<>(); - for ( Long columnIds : catalogTable.fieldIds ) { - LogicalColumn col = catalog.getColumn( columnIds ); + for ( LogicalColumn col : catalogTable.columns ) { columns.add( new DbColumn( col.name, col.type.getName(), @@ -1197,16 +1190,17 @@ void getDataSourceColumns( final Context ctx ) throws UnknownDatabaseException, } ctx.json( new Result( columns.toArray( new DbColumn[0] ), null ).setType( ResultType.VIEW ) ); } else { - if ( catalog.getColumnPlacement( catalogTable.fieldIds.get( 0 ) ).size() != 1 ) { + List allocs = catalog.getAllocRel( catalogTable.namespaceId ).getAllocationsFromLogical( catalogTable.id ); + if ( catalog.getAllocRel( catalogTable.namespaceId ).getAllocationsFromLogical( catalogTable.id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } - int adapterId = catalog.getColumnPlacement( catalogTable.fieldIds.get( 0 ) ).get( 0 ).adapterId; - CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + long adapterId = allocs.get( 0 ).adapterId; + CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); List pkColumnNames = primaryKey.getColumnNames(); List columns = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : catalog.getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { - LogicalColumn col = catalog.getColumn( ccp.columnId ); + for ( CatalogColumnPlacement ccp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { + LogicalColumn col = catalog.getLogicalRel( namespace.id ).getColumn( ccp.columnId ); columns.add( new DbColumn( col.name, col.type.getName(), @@ -1789,7 +1783,7 @@ void getIndexes( final Context ctx ) { } // Get functional indexes - for ( Integer storeId : catalogTable.dataPlacements ) { + for ( long storeId : catalogTable.dataPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( storeId ); DataStore store; if ( adapter instanceof DataStore ) { From b8761a3c59762e3479ff59ff2f91955c92ec3119 Mon Sep 17 00:00:00 2001 From: datomo Date: Mon, 6 Mar 2023 21:30:24 +0100 Subject: [PATCH 035/436] fixing startup of new builder methods --- .../org/polypheny/db/adapter/DataStore.java | 3 +- .../db/adapter/java/ReflectiveSchema.java | 4 +- .../db/algebra/AlgCollationImpl.java | 2 +- .../common/LogicalConstraintEnforcer.java | 13 +-- .../org/polypheny/db/catalog/Adapter.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 2 + .../catalogs/AllocationRelationalCatalog.java | 4 +- .../db/catalog/catalogs/LogicalCatalog.java | 2 + .../db/catalog/catalogs/PhysicalCatalog.java | 5 ++ .../entity/CatalogColumnPlacement.java | 19 +++-- .../catalog/entity/CatalogDataPlacement.java | 14 ++-- .../db/catalog/entity/CatalogEntity.java | 7 +- .../db/catalog/entity/CatalogForeignKey.java | 14 ++-- .../db/catalog/entity/CatalogKey.java | 9 +- .../entity/CatalogMaterializedView.java | 15 ++-- .../catalog/entity/CatalogPartitionGroup.java | 2 +- .../db/catalog/entity/CatalogPrimaryKey.java | 7 +- .../db/catalog/entity/CatalogView.java | 24 ++++-- .../allocation/AllocationCollection.java | 7 +- .../entity/allocation/AllocationGraph.java | 4 +- .../entity/allocation/AllocationTable.java | 15 +++- .../catalog/entity/logical/LogicalEntity.java | 17 +++- .../catalog/entity/logical/LogicalTable.java | 42 ++++++---- .../exception/ColumnNotExistsException.java | 4 +- .../db/iface/QueryInterfaceManager.java | 2 +- .../processing/LogicalAlgAnalyzeShuttle.java | 11 +-- .../db/schema/LogicalCollection.java | 4 +- .../polypheny/db/schema/LogicalEntity.java | 3 +- .../polypheny/db/schema/LogicalRelView.java | 3 +- .../db/schema/PolyphenyDbSchema.java | 28 ++++--- .../org/polypheny/db/tools/AlgBuilder.java | 2 +- .../polypheny/db/tools/RoutedAlgBuilder.java | 9 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 31 ++----- .../db/partition/FrequencyMapImpl.java | 2 - .../CreateAllPlacementStrategy.java | 2 +- .../strategies/CreatePlacementStrategy.java | 2 +- .../CreateSinglePlacementStrategy.java | 2 +- .../polypheny/db/catalog/CatalogPlugin.java | 20 +---- .../org/polypheny/db/catalog/NCatalog.java | 12 --- .../org/polypheny/db/catalog/PolyCatalog.java | 24 +++++- .../{ConnectedMap.java => PusherMap.java} | 8 +- .../allocation/PolyAllocRelCatalog.java | 11 ++- .../db/catalog/logical/DocumentCatalog.java | 12 ++- .../db/catalog/logical/GraphCatalog.java | 6 ++ .../db/catalog/logical/RelationalCatalog.java | 16 ++-- .../catalog/physical/PolyPhysicalCatalog.java | 82 +++++++++++++++++++ .../db/catalog/snapshot/FullSnapshot.java | 7 ++ .../db/sql/language/ddl/SqlCreateTable.java | 12 +-- settings.gradle | 2 +- 49 files changed, 365 insertions(+), 185 deletions(-) rename plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/{ConnectedMap.java => PusherMap.java} (90%) create mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index 38d26bad57..5248b194fe 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -222,7 +222,8 @@ public static class FunctionalIndexInfo { public List getColumnNames() { List columnNames = new ArrayList<>( columnIds.size() ); for ( long columnId : columnIds ) { - columnNames.add( Catalog.getInstance().getLogicalRel( names ).getColumn( columnId ).name ); + // columnNames.add( Catalog.getInstance().getLogicalRel( columnNames ).getColumn( columnId ).name ); + // todo dl } return columnNames; } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index 65135baaf8..bc42e9740d 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -51,6 +51,7 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.Primitive; import org.apache.calcite.linq4j.tree.Types; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgReferentialConstraint; import org.polypheny.db.catalog.entity.CatalogEntity; @@ -247,9 +248,10 @@ private static class ReflectiveEntity extends LogicalTable implements ScannableE ReflectiveEntity( Type elementType, Enumerable enumerable, Long id, Long partitionId, Long adapterId ) { //super( elementType, id, partitionId, adapterId ); - super( id, "test", null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), false, null ); + super( id, "test", List.of(), -1, "", EntityType.ENTITY, null, ImmutableList.of(), false, null ); this.elementType = elementType; this.enumerable = enumerable; + throw new NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgCollationImpl.java b/core/src/main/java/org/polypheny/db/algebra/AlgCollationImpl.java index cac772bbd1..adf424d0f8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgCollationImpl.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgCollationImpl.java @@ -64,7 +64,7 @@ protected AlgCollationImpl( ImmutableList fieldCollations ) { @Override - public AlgTraitDef getTraitDef() { + public AlgTraitDef getTraitDef() { return AlgCollationTraitDef.INSTANCE; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 7446fbfca1..0ffafa276a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -94,23 +94,25 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem final RexBuilder rexBuilder = modify.getCluster().getRexBuilder(); EnforcementTime enforcementTime = EnforcementTime.ON_QUERY; - final List constraints = new ArrayList<>( Catalog.getInstance().getConstraints( table.id ) ) + final List constraints = new ArrayList<>( Catalog.getInstance().getLogicalRel( table.namespaceId ).getConstraints( table.id ) ) .stream() .filter( f -> f.key.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); final List foreignKeys = Catalog.getInstance() + .getLogicalRel( table.namespaceId ) .getForeignKeys( table.id ) .stream() .filter( f -> f.enforcementTime == enforcementTime ) .collect( Collectors.toList() ); final List exportedKeys = Catalog.getInstance() + .getLogicalRel( table.namespaceId ) .getExportedKeys( table.id ) .stream() .filter( f -> f.enforcementTime == enforcementTime ) .collect( Collectors.toList() ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = Catalog.getInstance().getPrimaryKey( table.primaryKey ); + CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); if ( pk.enforcementTime == enforcementTime ) { final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); @@ -222,21 +224,22 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s final RexBuilder rexBuilder = builder.getRexBuilder(); final List constraints = Catalog.getInstance() + .getLogicalRel( table.namespaceId ) .getConstraints( table.id ) .stream() .filter( c -> c.key.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); - final List foreignKeys = Catalog.getInstance().getForeignKeys( table.id ) + final List foreignKeys = Catalog.getInstance().getLogicalRel( table.namespaceId ).getForeignKeys( table.id ) .stream() .filter( c -> c.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); - final List exportedKeys = Catalog.getInstance().getExportedKeys( table.id ) + final List exportedKeys = Catalog.getInstance().getLogicalRel( table.namespaceId ).getExportedKeys( table.id ) .stream() .filter( c -> c.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = Catalog.getInstance().getPrimaryKey( table.primaryKey ); + CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); if ( pk.enforcementTime == enforcementTime ) { final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Adapter.java b/core/src/main/java/org/polypheny/db/catalog/Adapter.java index f1b0413aa8..301fcbd537 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Adapter.java +++ b/core/src/main/java/org/polypheny/db/catalog/Adapter.java @@ -75,7 +75,7 @@ public static void addAdapter( Class clazz, String adapterName, Map clazz, String adapterName ) { - if ( Catalog.getInstance().getAdapters().stream().anyMatch( a -> a.getAdapterTypeName().equals( adapterName ) ) ) { + if ( Catalog.getInstance().getAdapters().stream().anyMatch( a -> a.adapterName.equals( adapterName ) ) ) { throw new RuntimeException( "Adapter is still deployed!" ); } REGISTER.remove( getKey( clazz, adapterName ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 49749125c2..072c6e44b6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -107,6 +107,8 @@ public static Catalog getInstance() { // move to Snapshot public abstract LogicalEntity getLogicalEntity( String entityName ); + public abstract LogicalEntity getLogicalEntity( long id ); + public abstract PhysicalCatalog getPhysical( long namespaceId ); public abstract PhysicalEntity getPhysicalEntity( long id ); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index a290ffa2f6..4eec0f9fc7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -476,10 +476,8 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The table for which a partition placement shall be created * @param partitionId The id of a specific partition that shall create a new placement * @param placementType The type of placement - * @param physicalSchemaName The schema name on the adapter - * @param physicalTableName The table name on the adapter */ - void addPartitionPlacement( long namespaceId, long adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ); + void addPartitionPlacement( long namespaceId, long adapterId, long tableId, long partitionId, PlacementType placementType, DataPlacementRole role ); /** * Adds a new DataPlacement for a given table on a specific store diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java index 2eee28e9a6..77c70c358e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java @@ -44,4 +44,6 @@ public interface LogicalCatalog { LogicalEntity getEntity( String name ); + LogicalEntity getEntity( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index 831471c9ff..c1fadc1536 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.catalogs; import java.util.List; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; public interface PhysicalCatalog { @@ -25,4 +26,8 @@ public interface PhysicalCatalog { PhysicalEntity getPhysicalEntity( long id ); + void addPhysicalEntity( PhysicalEntity physicalEntity ); + +

      PhysicalEntity getFromLogical( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java index ad6d322d23..0ef2a838e9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java @@ -16,10 +16,12 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.SneakyThrows; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.PlacementType; @@ -29,16 +31,23 @@ public class CatalogColumnPlacement implements CatalogObject { private static final long serialVersionUID = -1909757888176291095L; + @Serialize public final long namespaceId; + @Serialize public final long tableId; + @Serialize public final long columnId; + @Serialize public final long adapterId; + @Serialize public final String adapterUniqueName; + @Serialize public final PlacementType placementType; - + @Serialize public final long physicalPosition; - + @Serialize public final String physicalSchemaName; + @Serialize public final String physicalColumnName; @@ -66,19 +75,19 @@ public CatalogColumnPlacement( @SneakyThrows public String getLogicalSchemaName() { - return Catalog.getInstance().getTable( tableId ).getNamespaceName(); + throw new NotImplementedException(); } @SneakyThrows public String getLogicalTableName() { - return Catalog.getInstance().getTable( tableId ).name; + return Catalog.getInstance().getLogicalRel( namespaceId ).getTable( tableId ).name; } @SneakyThrows public String getLogicalColumnName() { - return Catalog.getInstance().getColumn( columnId ).name; + return Catalog.getInstance().getLogicalRel( namespaceId ).getColumn( columnId ).name; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java index de4abb657d..8623a927a1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java @@ -30,6 +30,7 @@ import lombok.Value; import lombok.With; import lombok.experimental.NonFinal; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PlacementType; @@ -89,7 +90,7 @@ public CatalogDataPlacement( @SneakyThrows public String getTableName() { - return Catalog.getInstance().getTable( tableId ).name; + throw new RuntimeException( "" ); } @@ -105,12 +106,14 @@ public boolean hasFullPlacement() { public boolean hasColumnFullPlacement() { - return Catalog.getInstance().getTable( this.tableId ).fieldIds.size() == columnPlacementsOnAdapter.size(); + throw new NotImplementedException(); + // return Catalog.getInstance().getTable( this.tableId ).fieldIds.size() == columnPlacementsOnAdapter.size(); } public boolean hasPartitionFullPlacement() { - return Catalog.getInstance().getTable( this.tableId ).partitionProperty.partitionIds.size() == getAllPartitionIds().size(); + throw new NotImplementedException(); + //return Catalog.getInstance().getTable( this.tableId ).partitionProperty.partitionIds.size() == getAllPartitionIds().size(); } @@ -131,7 +134,7 @@ public Serializable[] getParameterArray() { private Map> structurizeDataPlacements( @NonNull final List unsortedPartitionIds ) { // Since this shall only be called after initialization of dataPlacement object, // we need to clear the contents of partitionPlacementsOnAdapterByRole - Map> partitionsPerRole = new HashMap<>(); + /*Map> partitionsPerRole = new HashMap<>(); try { Catalog catalog = Catalog.getInstance(); @@ -160,7 +163,8 @@ private Map> structurizeDataPlacements( @ } // Finally, overwrite entire partitionPlacementsOnAdapterByRole at Once - return partitionsPerRole; + return partitionsPerRole;*/ + throw new NotImplementedException(); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index 22473d6191..02afdf2729 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -16,6 +16,7 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.List; import lombok.Value; @@ -39,11 +40,15 @@ @Value @NonFinal public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializable, CatalogType, Expressible { - + @Serialize public long id; + @Serialize public EntityType entityType; + @Serialize public NamespaceType namespaceType; + @Serialize public String name; + @Serialize public long namespaceId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java index 2e13345823..cf9f684eb1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java @@ -25,6 +25,7 @@ import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.ForeignKeyOption; @@ -72,18 +73,19 @@ public String getReferencedKeySchemaName() { @SneakyThrows public String getReferencedKeyTableName() { - return Catalog.getInstance().getTable( referencedKeyTableId ).name; + return Catalog.getInstance().getLogicalRel( referencedKeySchemaId ).getTable( referencedKeyTableId ).name; } @SneakyThrows public List getReferencedKeyColumnNames() { - Catalog catalog = Catalog.getInstance(); + /*Catalog catalog = Catalog.getInstance(); List columnNames = new LinkedList<>(); for ( long columnId : referencedKeyColumnIds ) { columnNames.add( catalog.getColumn( columnId ).name ); } - return columnNames; + return columnNames;*/ + throw new NotImplementedException(); } @@ -134,9 +136,11 @@ public static class CatalogForeignKeyColumn implements CatalogObject { @SneakyThrows @Override public Serializable[] getParameterArray() { - return Catalog.getInstance() + /*return Catalog.getInstance() + .getLogicalRel( ) .getForeignKey( tableId, foreignKeyName ) - .getParameterArray( referencedKeyColumnName, foreignKeyColumnName, keySeq ); + .getParameterArray( referencedKeyColumnName, foreignKeyColumnName, keySeq );*/ + throw new NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java index e421a72ed1..1a5cc2efef 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java @@ -22,6 +22,7 @@ import java.util.List; import lombok.EqualsAndHashCode; import lombok.SneakyThrows; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; @@ -59,18 +60,20 @@ public String getSchemaName() { @SneakyThrows public String getTableName() { - return Catalog.getInstance().getTable( tableId ).name; + // return Catalog.getInstance().getTable( tableId ).name; + throw new NotImplementedException(); } @SneakyThrows public List getColumnNames() { - Catalog catalog = Catalog.getInstance(); + /*Catalog catalog = Catalog.getInstance(); List columnNames = new LinkedList<>(); for ( long columnId : columnIds ) { columnNames.add( catalog.getColumn( columnId ).name ); } - return columnNames; + return columnNames;*/ + throw new NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java index ea10a0da0f..95ec61473b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java @@ -25,6 +25,7 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.partition.properties.PartitionProperty; @@ -50,13 +51,13 @@ public class CatalogMaterializedView extends CatalogView { public CatalogMaterializedView( long id, String name, - List columnIds, - long schemaId, - int ownerId, + List columns, + long namespaceId, + String namespaceName, EntityType entityType, String query, Long primaryKey, - @NonNull List dataPlacements, + @NonNull List dataPlacements, boolean modifiable, PartitionProperty partitionProperty, AlgCollation algCollation, @@ -69,9 +70,9 @@ public CatalogMaterializedView( super( id, name, - columnIds, - schemaId, - ownerId, + namespaceName, + columns, + namespaceId, entityType, query, primaryKey, diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java index fc82a2a760..804de5b715 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java @@ -68,7 +68,7 @@ public CatalogPartitionGroup( @SneakyThrows public String getTableName() { - return Catalog.getInstance().getTable( tableId ).name; + return Catalog.getInstance().getLogicalRel( schemaId ).getTable( tableId ).name; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java index 351748037b..e765ed08bc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java @@ -23,6 +23,7 @@ import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.RequiredArgsConstructor; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; @@ -35,7 +36,6 @@ public CatalogPrimaryKey( @NonNull final CatalogKey catalogKey ) { catalogKey.id, catalogKey.tableId, catalogKey.schemaId, - catalogKey.databaseId, catalogKey.columnIds, EnforcementTime.ON_QUERY ); } @@ -53,7 +53,7 @@ public List getCatalogPrimaryKeyColumns() { public Serializable[] getParameterArray( String columnName, int keySeq ) { - return new Serializable[]{ getDatabaseName(), getSchemaName(), getTableName(), columnName, keySeq, null }; + return new Serializable[]{ getSchemaName(), getTableName(), columnName, keySeq, null }; } @@ -71,7 +71,8 @@ public static class CatalogPrimaryKeyColumn implements CatalogObject { @Override public Serializable[] getParameterArray() { - return Catalog.getInstance().getPrimaryKey( pkId ).getParameterArray( columnName, keySeq ); + throw new NotImplementedException(); + //return Catalog.getInstance().getPrimaryKey( pkId ).getParameterArray( columnName, keySeq ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index b129c184d8..612acd73cb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -34,6 +34,8 @@ import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.languages.QueryLanguage; @@ -58,21 +60,31 @@ public class CatalogView extends LogicalTable { public CatalogView( long id, String name, - List columnIds, - long schemaId, - int ownerId, + String namespaceName, + List columns, + long namespaceId, EntityType entityType, String query, Long primaryKey, - List dataPlacements, + List dataPlacements, boolean modifiable, PartitionProperty partitionProperty, AlgCollation algCollation, List connectedViews, Map> underlyingTables, String language ) { - super( id, name, columnIds, schemaId, ownerId, entityType, primaryKey, dataPlacements, - modifiable, partitionProperty, connectedViews ); + super( + id, + name, + columns, + namespaceId, + namespaceName, + entityType, + primaryKey, + dataPlacements, + modifiable, + partitionProperty, + connectedViews ); this.query = query; this.algCollation = algCollation; this.underlyingTables = ImmutableMap.copyOf( underlyingTables.entrySet().stream().collect( Collectors.toMap( Entry::getKey, t -> ImmutableList.copyOf( t.getValue() ) ) ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java index 9951bdd5a3..ea6fbb4a8f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java @@ -23,16 +23,17 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class AllocationCollection extends AllocationEntity { +public class AllocationCollection extends AllocationEntity { - public AllocationCollection( long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( id, name, type, namespaceType, adapterId ); + public AllocationCollection( LogicalCollection collection, long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( collection, id, name, type, namespaceType, adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java index 24c7dbc480..742f636171 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -28,7 +28,7 @@ @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class AllocationGraph extends AllocationEntity { +public class AllocationGraph extends AllocationEntity { public LogicalGraph logical; @@ -36,7 +36,7 @@ public class AllocationGraph extends AllocationEntity { public AllocationGraph( long id, LogicalGraph graph, long adapterId ) { - super( id, graph.name, graph.entityType, graph.namespaceType, adapterId ); + super( graph, id, graph.name, graph.entityType, graph.namespaceType, adapterId ); this.id = id; this.logical = graph; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index a8a2efff38..219a226a68 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -16,6 +16,8 @@ package org.polypheny.db.catalog.entity.allocation; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.ArrayList; import java.util.List; @@ -39,14 +41,23 @@ @Value @With public class AllocationTable extends AllocationEntity { - + @Serialize public List placements; + @Serialize public long adapterId; + @Serialize public LogicalTable logicalTable; + @Serialize public String adapterName; - public AllocationTable( LogicalTable logicalTable, long id, String name, long adapterId, String adapterName, List placements ) { + public AllocationTable( + @Deserialize("logicalTable") LogicalTable logicalTable, + @Deserialize( "id" ) long id, + @Deserialize( "name" ) String name, + @Deserialize( "adapterId" ) long adapterId, + @Deserialize( "adapterName" ) String adapterName, + @Deserialize( "placements" ) List placements ) { super( logicalTable, id, name, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.logicalTable = logicalTable; this.adapterId = adapterId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java index 41fb68897a..9acd83d85d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java @@ -16,23 +16,36 @@ package org.polypheny.db.catalog.entity.logical; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import io.activej.serializer.annotations.SerializeClass; import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +@SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = true) @Value @NonFinal +@SerializeClass(subclasses = { LogicalTable.class }) public abstract class LogicalEntity extends CatalogEntity { - + @Serialize public String namespaceName; + @Serialize public long namespaceId; - protected LogicalEntity( long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType ) { + protected LogicalEntity( + @Deserialize( "id" ) long id, + @Deserialize( "name" ) String name, + @Deserialize( "namespaceId" ) long namespaceId, + @Deserialize( "namespaceName" ) String namespaceName, + @Deserialize( "type" ) EntityType type, + @Deserialize( "namespaceType" ) NamespaceType namespaceType ) { super( id, name, namespaceId, type, namespaceType ); this.namespaceName = namespaceName; this.namespaceId = namespaceId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index a995d87907..a18a5267f8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -20,6 +20,7 @@ import com.google.common.collect.ImmutableList; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; +import io.activej.serializer.annotations.SerializeClass; import java.io.Serializable; import java.util.List; import java.util.stream.Collectors; @@ -40,50 +41,42 @@ import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.schema.ColumnStrategy; -@Value +//@Value @With @EqualsAndHashCode(callSuper = false) @NonFinal public class LogicalTable extends LogicalEntity implements Comparable { private static final long serialVersionUID = 4653390333258552102L; - @Serialize public long id; @Serialize + public EntityType entityType; + @Serialize public String name; + @Serialize public ImmutableList columns; + @Serialize public long namespaceId; - @Serialize - public EntityType entityType; + @Serialize public Long primaryKey; + @Serialize public boolean modifiable; + @Serialize public PartitionProperty partitionProperty; + @Serialize public ImmutableList dataPlacements; + @Serialize public ImmutableList connectedViews; - public LogicalTable( - final long id, - @NonNull final String name, - final ImmutableList fieldIds, - final long namespaceId, - final String namespaceName, - @NonNull final EntityType type, - final Long primaryKey, - @NonNull final List dataPlacements, - boolean modifiable, - PartitionProperty partitionProperty ) { - this( id, name, fieldIds, namespaceId, namespaceName, type, primaryKey, dataPlacements, modifiable, partitionProperty, ImmutableList.of() ); - } - public LogicalTable( @Deserialize("id") final long id, @@ -117,6 +110,19 @@ public LogicalTable( } } + public LogicalTable( + final long id, + @NonNull final String name, + final List fieldIds, + final long namespaceId, + final String namespaceName, + @NonNull final EntityType type, + final Long primaryKey, + @NonNull final List dataPlacements, + boolean modifiable, + PartitionProperty partitionProperty ) { + this( id, name, fieldIds, namespaceId, namespaceName, type, primaryKey, dataPlacements, modifiable, partitionProperty, ImmutableList.of() ); + } public List getColumnNames() { return columns.stream().map( c -> c.name ).collect( Collectors.toList() ); diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/ColumnNotExistsException.java b/core/src/main/java/org/polypheny/db/ddl/exception/ColumnNotExistsException.java index a5201a97f1..6625409480 100644 --- a/core/src/main/java/org/polypheny/db/ddl/exception/ColumnNotExistsException.java +++ b/core/src/main/java/org/polypheny/db/ddl/exception/ColumnNotExistsException.java @@ -16,6 +16,7 @@ package org.polypheny.db.ddl.exception; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; public class ColumnNotExistsException extends Exception { @@ -31,8 +32,9 @@ public ColumnNotExistsException( String tableName, String columnName ) { public ColumnNotExistsException( long tableId, String columnName ) { - this.tableName = Catalog.getInstance().getTable( tableId ).name; + //this.tableName = Catalog.getInstance().getTable( tableId ).name; this.columnName = columnName; + throw new NotImplementedException(); } } diff --git a/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java b/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java index 780738b54d..ed83036050 100644 --- a/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java +++ b/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java @@ -164,7 +164,7 @@ public QueryInterface addQueryInterface( Catalog catalog, String clazzName, Stri throw new RuntimeException( "There is already a query interface with this unique name" ); } QueryInterface instance; - int ifaceId = -1; + long ifaceId = -1; try { String[] split = clazzName.split( "\\$" ); split = split[split.length - 1].split( "\\." ); diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 126e145b32..dabb956157 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -60,6 +60,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.transaction.Statement; @@ -393,13 +394,13 @@ private void getAvailableColumns( AlgNode scan ) { this.entityId.add( scan.getEntity().id ); final LogicalTable table = scan.getEntity().unwrap( LogicalTable.class ); if ( table != null ) { - final List ids = table.fieldIds; + final List columns = table.columns; final List names = table.getColumnNames(); final String baseName = table.getNamespaceName() + "." + table.name + "."; - for ( int i = 0; i < ids.size(); i++ ) { - this.availableColumns.putIfAbsent( ids.get( i ), baseName + names.get( i ) ); - this.availableColumnsWithTable.putIfAbsent( ids.get( i ), table.id ); + for ( int i = 0; i < columns.size(); i++ ) { + this.availableColumns.putIfAbsent( columns.get( i ).id, baseName + names.get( i ) ); + this.availableColumnsWithTable.putIfAbsent( columns.get( i ).id, table.id ); } } } @@ -420,7 +421,7 @@ private void handleIfPartitioned( AlgNode node, LogicalTable catalogTable ) { if ( catalogTable.partitionProperty.isPartitioned ) { WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, - catalogTable.fieldIds.indexOf( catalogTable.partitionProperty.partitionColumnId ) ); + catalogTable.columns.stream().map( c -> c.id ).collect( Collectors.toList()).indexOf( catalogTable.partitionProperty.partitionColumnId ) ); node.accept( whereClauseVisitor ); int scanId = node.getInput( 0 ).getId(); diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java b/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java index b3a5e82efe..fc8be17c4e 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalCollection.java @@ -23,8 +23,8 @@ public class LogicalCollection extends LogicalEntity { - protected LogicalCollection( long tableId, String logicalSchemaName, String logicalTableName, AlgProtoDataType protoRowType ) { - super( tableId, logicalSchemaName, logicalTableName, List.of( 0L ), List.of( "d" ), protoRowType, NamespaceType.DOCUMENT ); + protected LogicalCollection( long tableId, String logicalSchemaName, long collectionId, String logicalTableName, AlgProtoDataType protoRowType ) { + super( tableId, logicalSchemaName, logicalTableName, List.of( 0L ), List.of( "d" ), collectionId, protoRowType, NamespaceType.DOCUMENT ); } } diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java index 1bbf515605..d69a030900 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalEntity.java @@ -69,9 +69,10 @@ public LogicalEntity( String logicalTableName, List columnIds, List logicalColumnNames, + long namespaceId, AlgProtoDataType protoRowType, NamespaceType namespaceType ) { - super( tableId, logicalTableName, EntityType.ENTITY, NamespaceType.RELATIONAL ); + super( tableId, logicalTableName,namespaceId, EntityType.ENTITY, NamespaceType.RELATIONAL ); this.logicalSchemaName = logicalSchemaName; this.logicalTableName = logicalTableName; this.columnIds = columnIds; diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java b/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java index e2119f8f5f..8bbc16127d 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalRelView.java @@ -27,10 +27,11 @@ protected LogicalRelView( long tableId, String logicalSchemaName, String logicalTableName, + long namespaceId, List columnIds, List logicalColumnNames, AlgProtoDataType protoRowType ) { - super( tableId, logicalSchemaName, logicalTableName, columnIds, logicalColumnNames, protoRowType, NamespaceType.RELATIONAL ); + super( tableId, logicalSchemaName, logicalTableName, columnIds, logicalColumnNames, namespaceId, protoRowType, NamespaceType.RELATIONAL ); } } diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 0dc79999d9..019caccfa2 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.stream.Collectors; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; @@ -36,7 +37,8 @@ public interface PolyphenyDbSchema { default LogicalTable getTable( List names ) { - switch ( names.size() ) { + throw new NotImplementedException(); + /*switch ( names.size() ) { case 3: return Catalog.getInstance().getTables( Pattern.of( names.get( 1 ) ), Pattern.of( names.get( 2 ) ) ).get( 0 ); case 2: @@ -45,11 +47,12 @@ default LogicalTable getTable( List names ) { return Catalog.getInstance().getTables( null, Pattern.of( names.get( 0 ) ) ).get( 0 ); default: return null; - } + }*/ } default LogicalTable getTable( long id ) { - return Catalog.getInstance().getTable( id ); + throw new NotImplementedException(); + //return Catalog.getInstance().getTable( id ); } default AllocationTable getAllocTable( long id ){ @@ -62,7 +65,7 @@ default PhysicalTable getPhysicalTable( long id ){ default LogicalCollection getCollection( List names ) { - CatalogNamespace namespace; + /*CatalogNamespace namespace; switch ( names.size() ) { case 3: namespace = Catalog.getInstance().getNamespaces( Pattern.of( names.get( 1 ) ) ).get( 0 ); @@ -76,11 +79,13 @@ default LogicalCollection getCollection( List names ) { return Catalog.getInstance().getCollections( namespace.id, Pattern.of( names.get( 0 ) ) ).get( 0 ); default: return null; - } + }*/ + throw new NotImplementedException(); } default LogicalCollection getCollection( long id ) { - return Catalog.getInstance().getCollection( id ); + //return Catalog.getInstance().getCollection( id ); + throw new NotImplementedException(); } default AllocationCollection getAllocCollection( long id ){ @@ -92,14 +97,16 @@ default PhysicalCollection getPhysicalCollection( long id ){ } default LogicalGraph getGraph( List names ) { - if ( names.size() == 1 ) {// TODO add methods + /*if ( names.size() == 1 ) {// TODO add methods return Catalog.getInstance().getGraphs( Pattern.of( names.get( 0 ) ) ).get( 0 ); } - return null; + return null;*/ + throw new NotImplementedException(); } default LogicalGraph getGraph( long id ) { - return Catalog.getInstance().getGraph( id ); + // return Catalog.getInstance().getGraph( id ); + throw new NotImplementedException(); } default AllocationGraph getAllocGraph( long id ){ @@ -111,7 +118,8 @@ default PhysicalGraph getPhysicalGraph( long id ){ } default List getNamespaceNames() { - return Catalog.getInstance().getNamespaces( Catalog.defaultDatabaseId, null ).stream().map( t -> t.name ).collect( Collectors.toList() ); + // return Catalog.getInstance().getNamespaces( Catalog.defaultDatabaseId, null ).stream().map( t -> t.name ).collect( Collectors.toList() ); + throw new NotImplementedException(); } default boolean isPartitioned( long id ){ diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index d6b523371e..b5881132e0 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -1375,7 +1375,7 @@ public AlgBuilder documentProject( List projects, List>> physicalPlacementsOfPartitions = new HashMap<>(); // PartitionId -> List + protected Map>> physicalPlacementsOfPartitions = new HashMap<>(); // PartitionId -> List public RoutedAlgBuilder( Context context, AlgOptCluster cluster, Snapshot snapshot ) { @@ -98,13 +99,13 @@ public RoutedAlgBuilder documents( ImmutableList tuples, AlgDataType public void addPhysicalInfo( Map> physicalPlacements ) { - final Map>> map = physicalPlacements.entrySet().stream() - .collect( Collectors.toMap( Map.Entry::getKey, entry -> map( entry.getValue() ) ) ); + final Map>> map = physicalPlacements.entrySet().stream() + .collect( Collectors.toMap( Entry::getKey, entry -> map( entry.getValue() ) ) ); physicalPlacementsOfPartitions.putAll( map ); } - private List> map( List catalogCols ) { + private List> map( List catalogCols ) { return catalogCols.stream().map( col -> new Pair<>( col.adapterId, col.columnId ) ).collect( Collectors.toList() ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 028780dc37..9361a9f75c 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -299,8 +299,6 @@ private void handleSource( DataSource adapter ) { catalogTable.id, catalogTable.partitionProperty.partitionIds.get( 0 ), PlacementType.AUTOMATIC, - physicalSchemaName, - physicalTableName, DataPlacementRole.UPTODATE ); } catch ( GenericCatalogException e ) { throw new RuntimeException( "Exception while adding primary key" ); @@ -891,8 +889,6 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { catalogTable.id, partitionId, PlacementType.AUTOMATIC, - null, - null, DataPlacementRole.UPTODATE ); } @@ -1460,8 +1456,6 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { catalogTable.id, partitionId, PlacementType.MANUAL, - null, - null, DataPlacementRole.UPTODATE ) ); storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); @@ -1514,8 +1508,6 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part catalogTable.id, partitionId, PlacementType.AUTOMATIC, - null, - null, DataPlacementRole.UPTODATE ); } @@ -1764,7 +1756,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a if ( stores == null ) { // Ask router on which store(s) the table should be placed - stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewTable(); + stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); } AlgDataType fieldList = algRoot.alg.getRowType(); @@ -1858,8 +1850,6 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a tableId, catalogMaterializedView.partitionProperty.partitionIds.get( 0 ), PlacementType.AUTOMATIC, - null, - null, DataPlacementRole.UPTODATE ); store.createPhysicalTable( statement.getPrepareContext(), catalogMaterializedView, null ); @@ -1898,7 +1888,7 @@ public long createGraph( String graphName, boolean modifiable, @Nullable List f if ( stores == null ) { // Ask router on which store(s) the table should be placed - stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewTable(); + stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); } long tableId = catalog.getLogicalRel( namespaceId ).addTable( @@ -2218,7 +2208,6 @@ public void createTable( long namespaceId, String name, List f addConstraint( namespaceId, constraint.name, constraint.type, constraint.columnNames, tableId ); } - //catalog.getLogicalRel( catalogTable.namespaceId ).updateTablePartitionProperties(tableId, partitionProperty); LogicalTable catalogTable = catalog.getLogicalRel( namespaceId ).getTable( tableId ); // Trigger rebuild of schema; triggers schema creation on adapters @@ -2226,15 +2215,15 @@ public void createTable( long namespaceId, String name, List f for ( DataStore store : stores ) { catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( - catalogTable.namespaceId, store.getAdapterId(), + catalogTable.namespaceId, + store.getAdapterId(), catalogTable.id, catalogTable.partitionProperty.partitionIds.get( 0 ), PlacementType.AUTOMATIC, - null, - null, DataPlacementRole.UPTODATE ); - store.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); + catalog.getPhysical( catalogTable.namespaceId ).addPhysicalEntity( + store.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ) ); } } catch ( GenericCatalogException | UnknownColumnException | UnknownCollationException e ) { @@ -2253,7 +2242,7 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists if ( stores == null ) { // Ask router on which store(s) the table should be placed - stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewTable(); + stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); } long collectionId; @@ -2663,8 +2652,6 @@ public void addPartitioning( PartitionInformation partitionInfo, List partitionedTable.id, partitionId, PlacementType.AUTOMATIC, - null, - null, DataPlacementRole.UPTODATE ); } @@ -2774,8 +2761,6 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme mergedTable.id, mergedTable.partitionProperty.partitionIds.get( 0 ), PlacementType.AUTOMATIC, - null, - null, DataPlacementRole.UPTODATE ); // First create new tables diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index a64e5b05de..283da82977 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -339,8 +339,6 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT table.id, partitionId, PlacementType.AUTOMATIC, - null, - null, DataPlacementRole.UPTODATE ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java index e5e9911d74..74a6a94f60 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java @@ -40,7 +40,7 @@ public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { @Override - public List getDataStoresForNewTable() { + public List getDataStoresForNewEntity() { return AdapterManager.getInstance().getStores().values().asList(); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreatePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreatePlacementStrategy.java index 1969633648..0b181fdb17 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreatePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreatePlacementStrategy.java @@ -28,6 +28,6 @@ public interface CreatePlacementStrategy { List getDataStoresForNewColumn( LogicalColumn addedColumn ); - List getDataStoresForNewTable(); + List getDataStoresForNewEntity(); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index efc70f25c4..25741413c9 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -36,7 +36,7 @@ public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { @Override - public List getDataStoresForNewTable() { + public List getDataStoresForNewEntity() { Map availableStores = AdapterManager.getInstance().getStores(); for ( DataStore store : availableStores.values() ) { return ImmutableList.of( store ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java index caa8ba4fd2..db4f20966c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java @@ -18,7 +18,7 @@ import org.pf4j.Plugin; import org.pf4j.PluginWrapper; -import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.plugins.PolyPluginManager; public class CatalogPlugin extends Plugin { @@ -36,22 +36,8 @@ public CatalogPlugin( PluginWrapper wrapper ) { @Override public void start() { - PolyCatalog catalog = new PolyCatalog(); - - catalog.addNamespace( "test", NamespaceType.RELATIONAL, false ); - long namespaceId = catalog.addNamespace( "test2", NamespaceType.RELATIONAL, false ); - - long tableId = catalog.addTable( "testTable", namespaceId ); - catalog.addColumn( "testColumn", namespaceId, tableId, null ); - catalog.commit(); - - byte[] buffer = catalog.serialize(); - - PolyCatalog catalog1 = catalog.deserialize( buffer, PolyCatalog.class ); - - catalog1.addColumn( "testColumn2", namespaceId, tableId, null ); - catalog1.rollback(); - + log.info( "PolyCatalog catalog was loaded" ); + PolyPluginManager.setCatalogsSupplier( () -> Catalog.setAndGetInstance( new PolyCatalog() ) ); } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java index 6621d6bcb5..ea3b6e2d49 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/NCatalog.java @@ -33,18 +33,6 @@ public interface NCatalog { NamespaceType getType(); - default RelationalCatalog asRelational() { - return unwrap( RelationalCatalog.class ); - } - - default DocumentCatalog asDocument() { - return unwrap( DocumentCatalog.class ); - } - - default GraphCatalog asGraph() { - return unwrap( GraphCatalog.class ); - } - default T unwrap( Class clazz ) { if ( !this.getClass().isAssignableFrom( clazz ) ) { throw new RuntimeException( String.format( "Error on retrieval the %s catalog.", clazz.getSimpleName() ) ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index f0067c47e6..120ad43206 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -30,6 +30,9 @@ import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.allocation.PolyAllocDocCatalog; +import org.polypheny.db.catalog.allocation.PolyAllocGraphCatalog; +import org.polypheny.db.catalog.allocation.PolyAllocRelCatalog; import org.polypheny.db.catalog.catalogs.AllocationCatalog; import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; @@ -57,6 +60,7 @@ import org.polypheny.db.catalog.logical.RelationalCatalog; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.physical.PolyPhysicalCatalog; import org.polypheny.db.catalog.snapshot.FullSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Transaction; @@ -210,6 +214,18 @@ public LogicalEntity getLogicalEntity( String entityName ) { } + @Override + public LogicalEntity getLogicalEntity( long id ) { + for ( LogicalCatalog catalog : logicalCatalogs.values() ) { + LogicalEntity entity = catalog.getEntity( id ); + if( entity != null ) { + return entity; + } + } + return null; + } + + @Override public PhysicalCatalog getPhysical( long namespaceId ) { return physicalCatalogs.get( namespaceId ); @@ -277,14 +293,18 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case switch ( namespaceType ) { case RELATIONAL: logicalCatalogs.put( id, new RelationalCatalog( namespace, idBuilder ) ); + allocationCatalogs.put( id, new PolyAllocRelCatalog() ); break; case DOCUMENT: logicalCatalogs.put( id, new DocumentCatalog( namespace, idBuilder ) ); + allocationCatalogs.put( id, new PolyAllocDocCatalog() ); break; case GRAPH: logicalCatalogs.put( id, new GraphCatalog( namespace, idBuilder ) ); + allocationCatalogs.put( id, new PolyAllocGraphCatalog() ); break; } + physicalCatalogs.put( id, new PolyPhysicalCatalog() ); change(); return id; } @@ -473,13 +493,13 @@ public List> getPhysicalsOnAdapter( long id ) { @Override public List getIndexes() { - return null; + return List.of(); } @Override public List getTablesForPeriodicProcessing() { - return null; + return List.of(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ConnectedMap.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PusherMap.java similarity index 90% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ConnectedMap.java rename to plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PusherMap.java index d2717a6aad..58d68c77a3 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/ConnectedMap.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PusherMap.java @@ -23,9 +23,9 @@ import java.util.function.Consumer; import org.jetbrains.annotations.NotNull; -public class ConnectedMap extends ConcurrentHashMap { +public class PusherMap extends ConcurrentHashMap { - ConcurrentLinkedQueue>> onChange = new ConcurrentLinkedQueue<>(); + ConcurrentLinkedQueue>> onChange = new ConcurrentLinkedQueue<>(); public void change() { @@ -33,7 +33,7 @@ public void change() { } - public ConnectedMap( Map allocations ) { + public PusherMap( Map allocations ) { super( allocations ); change(); } @@ -100,7 +100,7 @@ public void replaceAll( BiFunction function ) } - public void addConnection( Consumer> onChange ) { + public void addConnection( Consumer> onChange ) { this.onChange.add( onChange ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index 3f89294872..e921cc7081 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -29,7 +29,7 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.Nullable; -import org.polypheny.db.catalog.ConnectedMap; +import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; @@ -58,7 +58,7 @@ public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Seriali public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocRelCatalog.class ); @Serialize - public final ConnectedMap allocations; + public final PusherMap allocations; private final ConcurrentHashMap, Long> adapterLogicalToAllocId; private final ConcurrentHashMap, AllocationTable> adapterLogicalColumnToAlloc; @@ -69,10 +69,13 @@ public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Seriali private final ConcurrentHashMap> logicalTableToAllocs; + public PolyAllocRelCatalog(){ + this( new ConcurrentHashMap<>() ); + } public PolyAllocRelCatalog( @Deserialize("allocations") Map allocations ) { - this.allocations = new ConnectedMap<>( allocations ); + this.allocations = new PusherMap<>( allocations ); this.adapterLogicalToAllocId = new ConcurrentHashMap<>(); this.allocations.addRowConnection( this.adapterLogicalToAllocId, ( k, v ) -> Pair.of( v.adapterId, v.logical.id ), ( k, v ) -> k ); this.adapterLogicalColumnToAlloc = new ConcurrentHashMap<>(); @@ -453,7 +456,7 @@ public boolean validateDataPlacementsConstraints( long tableId, long adapterId, @Override - public void addPartitionPlacement( long namespaceId, long adapterId, long tableId, long partitionId, PlacementType placementType, String physicalSchemaName, String physicalTableName, DataPlacementRole role ) { + public void addPartitionPlacement( long namespaceId, long adapterId, long tableId, long partitionId, PlacementType placementType, DataPlacementRole role ) { } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java index c75b238963..262a827a60 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java @@ -27,7 +27,7 @@ import lombok.With; import lombok.experimental.NonFinal; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.ConnectedMap; +import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; @@ -47,7 +47,7 @@ public class DocumentCatalog implements Serializable, LogicalDocumentCatalog { @Serialize public IdBuilder idBuilder; @Serialize - public ConnectedMap collections; + public PusherMap collections; private ConcurrentHashMap names; @Getter @@ -65,7 +65,7 @@ public DocumentCatalog( @Deserialize("idBuilder") IdBuilder idBuilder, @Deserialize("collections") Map collections ) { this.logicalNamespace = logicalNamespace; - this.collections = new ConnectedMap<>( collections ); + this.collections = new PusherMap<>( collections ); this.idBuilder = idBuilder; @@ -102,6 +102,12 @@ public LogicalEntity getEntity( String name ) { } + @Override + public LogicalEntity getEntity( long id ) { + return collections.get( id ); + } + + @Override public LogicalCollection getCollection( long collectionId ) { return null; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java index 8e6559b863..ec5cf1fe68 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java @@ -79,6 +79,12 @@ public LogicalEntity getEntity( String name ) { } + @Override + public LogicalEntity getEntity( long id ) { + return null; + } + + @Override public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 74053be322..fbcd224eff 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -35,7 +35,7 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.ConnectedMap; +import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; @@ -75,10 +75,10 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog public BinarySerializer serializer = Serializable.builder.get().build( RelationalCatalog.class ); @Serialize - public ConnectedMap tables; + public PusherMap tables; @Serialize - public ConnectedMap columns; + public PusherMap columns; @Getter public LogicalNamespace logicalNamespace; @@ -111,8 +111,8 @@ public RelationalCatalog( @Deserialize("keyColumns") Map keyColumns ) { this.logicalNamespace = logicalNamespace; - this.tables = new ConnectedMap<>( tables ); - this.columns = new ConnectedMap<>( columns ); + this.tables = new PusherMap<>( tables ); + this.columns = new PusherMap<>( columns ); this.indexes = indexes; this.keys = keys; this.keyColumns = keyColumns; @@ -158,6 +158,12 @@ public LogicalEntity getEntity( String name ) { } + @Override + public LogicalEntity getEntity( long id ) { + return tables.get( id ); + } + + @Override public List getTables( @Nullable Pattern name ) { if ( name == null ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java new file mode 100644 index 0000000000..44900b4e2a --- /dev/null +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java @@ -0,0 +1,82 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.physical; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import org.polypheny.db.catalog.PusherMap; +import org.polypheny.db.catalog.catalogs.PhysicalCatalog; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; + +public class PolyPhysicalCatalog implements PhysicalCatalog { + + private final PusherMap> physicals; + + private final ConcurrentHashMap> logicalPhysical; + private final ConcurrentHashMap>> physicalsPerAdapter; + + + public PolyPhysicalCatalog() { + this( new ConcurrentHashMap<>() ); + } + + + public PolyPhysicalCatalog( Map> physicals ) { + this.physicals = new PusherMap<>( physicals ); + + this.logicalPhysical = new ConcurrentHashMap<>(); + this.physicals.addRowConnection( this.logicalPhysical, ( k, v ) -> v.logical.id, ( k, v ) -> v ); + this.physicalsPerAdapter = new ConcurrentHashMap<>(); + this.physicals.addConnection( m -> { + physicalsPerAdapter.clear(); + m.forEach( ( k, v ) -> { + if ( physicalsPerAdapter.containsKey( v.adapterId ) ) { + physicalsPerAdapter.get( v.adapterId ).add( v ); + } else { + physicalsPerAdapter.put( v.adapterId, new ArrayList<>( List.of( v ) ) ); + } + } ); + } ); + } + + + @Override + public List> getPhysicalsOnAdapter( long id ) { + return physicalsPerAdapter.get( id ); + } + + + @Override + public PhysicalEntity getPhysicalEntity( long id ) { + return physicals.get( id ); + } + + + @Override + public void addPhysicalEntity( PhysicalEntity physicalEntity ) { + physicals.put( physicalEntity.id, physicalEntity ); + } + + + @Override + public PhysicalEntity getFromLogical( long id ) { + return logicalPhysical.get( id ); + } + +} diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java index 1a61afa23b..95b80cde95 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java @@ -26,6 +26,7 @@ import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; @@ -124,6 +125,12 @@ public List getLogicalTables( long namespaceId, Pattern name ) { } + @Override + public LogicalColumn getLogicalColumn( long id ) { + return null; + } + + @Override public LogicalCollection getLogicalCollection( long id ) { return null; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 56a92ca84a..61aab71300 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -32,7 +32,6 @@ import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -209,14 +208,11 @@ public void execute( Context context, Statement statement, QueryParameters param long schemaId; // Cannot use getLogicalTable() here since table does not yet exist - if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; - tableName = name.names.get( 2 ); - } else if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; + if ( name.names.size() == 2 ) { // SchemaName.TableName + schemaId = catalog.getNamespace( name.names.get( 0 ) ).id; tableName = name.names.get( 1 ); } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + schemaId = catalog.getNamespace( context.getDefaultSchemaName() ).id; tableName = name.names.get( 0 ); } @@ -270,7 +266,7 @@ public void execute( Context context, Statement statement, QueryParameters param } catch ( GenericCatalogException | UnknownColumnException e ) { // We just added the table/column so it has to exist or we have an internal problem throw new RuntimeException( e ); - } catch ( UnknownDatabaseException | UnknownTableException | TransactionException | UnknownSchemaException | UnknownUserException | UnknownKeyException e ) { + } catch ( UnknownTableException | TransactionException | UnknownSchemaException | UnknownUserException | UnknownKeyException e ) { throw new RuntimeException( e ); } } diff --git a/settings.gradle b/settings.gradle index c80a44a38e..e3214d0cda 100644 --- a/settings.gradle +++ b/settings.gradle @@ -12,7 +12,7 @@ include 'dbms' include 'plugins' // catalog -include 'plugins:mapdb-catalog' +// include 'plugins:mapdb-catalog' include 'plugins:poly-catalog' include 'plugins:mapdb-monitoring' From a85776b91a746500f19a00f5dc6ac7977b4c1239 Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 7 Mar 2023 18:08:50 +0100 Subject: [PATCH 036/436] adjusting types of catalog types and test implementing some needed methods --- .../org/polypheny/db/adapter/Adapter.java | 2 +- .../polypheny/db/adapter/AdapterManager.java | 2 +- .../org/polypheny/db/adapter/DataSource.java | 2 +- .../org/polypheny/db/adapter/DataStore.java | 2 +- .../db/adapter/index/IndexManager.java | 6 +- .../db/adapter/java/ReflectiveSchema.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 4 +- .../catalogs/AllocationDocumentCatalog.java | 2 +- .../catalogs/AllocationRelationalCatalog.java | 9 +- .../db/catalog/catalogs/LogicalCatalog.java | 5 +- .../catalogs/LogicalDocumentCatalog.java | 3 +- .../catalogs/LogicalRelationalCatalog.java | 14 +- .../db/catalog/entity/CatalogAdapter.java | 22 ++- .../entity/CatalogColumnPlacement.java | 40 ++-- .../catalog/entity/CatalogDataPlacement.java | 3 +- .../catalog/entity/CatalogDefaultValue.java | 14 +- .../db/catalog/entity/CatalogEntity.java | 4 +- .../db/catalog/entity/CatalogIndex.java | 56 +++--- .../db/catalog/entity/CatalogKey.java | 39 ++-- .../entity/CatalogMaterializedView.java | 3 +- .../db/catalog/entity/CatalogPrimaryKey.java | 3 +- .../catalog/entity/CatalogQueryInterface.java | 15 +- .../db/catalog/entity/CatalogUser.java | 19 +- .../db/catalog/entity/CatalogView.java | 3 +- .../db/catalog/entity/LogicalNamespace.java | 15 +- .../entity/allocation/AllocationEntity.java | 4 + .../entity/allocation/AllocationTable.java | 23 +-- .../entity/logical/LogicalCollection.java | 17 +- .../catalog/entity/logical/LogicalColumn.java | 10 +- .../catalog/entity/logical/LogicalEntity.java | 21 +-- .../catalog/entity/logical/LogicalGraph.java | 8 +- .../catalog/entity/logical/LogicalTable.java | 47 ++--- .../db/catalog/snapshot/Snapshot.java | 4 + .../java/org/polypheny/db/ddl/DdlManager.java | 4 +- .../db/partition/PartitionManager.java | 2 +- .../properties/PartitionProperty.java | 53 +++++- .../org/polypheny/db/prepare/Context.java | 4 +- .../org/polypheny/db/prepare/ContextImpl.java | 7 +- .../polypheny/db/processing/DataMigrator.java | 2 +- .../org/polypheny/db/routing/DmlRouter.java | 4 +- .../db/routing/ProposedRoutingPlan.java | 2 +- .../org/polypheny/db/routing/RoutingPlan.java | 2 +- .../java/org/polypheny/db/schema/Schemas.java | 8 +- .../org/polypheny/db/tools/Frameworks.java | 1 - .../db/transaction/TransactionManager.java | 5 +- .../org/polypheny/db/catalog/MockCatalog.java | 2 +- .../java/org/polypheny/db/PolyphenyDb.java | 19 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 80 ++++---- .../partition/AbstractPartitionManager.java | 4 +- .../db/partition/ListPartitionManager.java | 2 +- .../db/partition/RangePartitionManager.java | 2 +- .../TemperatureAwarePartitionManager.java | 2 +- .../db/processing/AbstractQueryProcessor.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 26 +-- .../db/processing/DataMigratorImpl.java | 81 +++----- .../db/routing/UiRoutingPageUtil.java | 13 +- .../dto/CachedProposedRoutingPlan.java | 8 +- .../routing/dto/ProposedRoutingPlanImpl.java | 9 +- .../db/routing/routers/AbstractDqlRouter.java | 2 +- .../db/routing/routers/BaseRouter.java | 32 ++-- .../db/routing/routers/CachedPlanRouter.java | 2 +- .../db/routing/routers/DmlRouterImpl.java | 26 +-- .../routers/FullPlacementQueryRouter.java | 8 +- .../db/routing/routers/IcarusRouter.java | 16 +- .../CreateAllPlacementStrategy.java | 2 +- .../CreateSinglePlacementStrategy.java | 2 +- .../db/transaction/EntityAccessMap.java | 2 +- .../db/transaction/StatementImpl.java | 1 - .../db/transaction/TransactionImpl.java | 4 - .../transaction/TransactionManagerImpl.java | 24 ++- .../db/view/MaterializedViewManagerImpl.java | 53 +++--- .../statistics/AlphabeticStatisticColumn.java | 2 +- .../statistics/NumericalStatisticColumn.java | 2 +- .../db/monitoring/statistics/QueryResult.java | 2 +- .../statistics/StatisticColumn.java | 6 +- .../statistics/StatisticQueryProcessor.java | 44 ++--- .../statistics/StatisticRepository.java | 10 +- .../monitoring/statistics/StatisticTable.java | 6 +- .../statistics/StatisticsManagerImpl.java | 75 ++++---- .../statistics/TemporalStatisticColumn.java | 2 +- .../org/polypheny/db/avatica/DbmsMeta.java | 71 +++---- .../avatica/PolyphenyDbConnectionHandle.java | 2 +- .../org/polypheny/db/cql/ColumnIndex.java | 7 +- .../polypheny/db/cql/Cql2RelConverter.java | 9 +- .../org/polypheny/db/cql/CqlQueryBuilder.java | 2 +- .../java/org/polypheny/db/cql/TableIndex.java | 9 +- .../polypheny/db/cql/utils/CombinerTest.java | 4 +- .../org/polypheny/db/cql/utils/IndexTest.java | 4 +- .../cql/utils/helper/AlgBuildTestHelper.java | 4 +- .../polypheny/db/adapter/csv/CsvSource.java | 2 +- .../admin/CypherAlterDatabaseAlias.java | 3 +- .../admin/CypherCreateDatabaseAlias.java | 3 +- .../db/cypher/admin/CypherDropAlias.java | 3 +- .../db/cypher/admin/CypherDropDatabase.java | 3 +- .../cypher2alg/CypherToAlgConverter.java | 10 +- .../db/cypher/ddl/CypherAddPlacement.java | 3 +- .../db/cypher/ddl/CypherDropPlacement.java | 3 +- .../ExploreQueryProcessor.java | 3 +- .../db/hsqldb/stores/HsqldbStore.java | 10 +- .../jdbc/sources/AbstractJdbcSource.java | 4 +- .../jdbc/stores/AbstractJdbcStore.java | 16 +- .../db/languages/MqlProcessorImpl.java | 4 +- .../db/languages/mql/MqlCreateCollection.java | 7 +- .../db/languages/mql/MqlCreateView.java | 7 +- .../db/languages/mql/MqlDeletePlacement.java | 9 +- .../polypheny/db/languages/mql/MqlDrop.java | 6 +- .../db/languages/mql/MqlRenameCollection.java | 6 +- .../db/languages/mql/MqlUseDatabase.java | 3 +- .../org/polypheny/db/catalog/IdBuilder.java | 24 +-- .../org/polypheny/db/catalog/PolyCatalog.java | 178 +++++++++++++++++- .../org/polypheny/db/catalog/PusherMap.java | 4 +- .../allocation/PolyAllocDocCatalog.java | 2 +- .../allocation/PolyAllocRelCatalog.java | 27 ++- .../db/catalog/logical/DocumentCatalog.java | 29 +-- .../db/catalog/logical/GraphCatalog.java | 17 +- .../db/catalog/logical/RelationalCatalog.java | 54 ++++-- .../db/catalog/snapshot/FullSnapshot.java | 6 + .../polypheny/db/restapi/RequestParser.java | 26 +-- .../java/org/polypheny/db/restapi/Rest.java | 54 ++++-- .../polypheny/db/sql/SqlProcessorImpl.java | 16 +- .../org/polypheny/db/sql/language/SqlDdl.java | 17 +- .../ddl/SqlCreateMaterializedView.java | 4 +- .../db/sql/language/ddl/SqlCreateSchema.java | 2 +- .../db/sql/language/ddl/SqlCreateView.java | 4 +- .../SqlAlterMaterializedViewAddIndex.java | 3 +- .../altertable/SqlAlterTableAddColumn.java | 2 +- .../ddl/altertable/SqlAlterTableAddIndex.java | 3 +- .../SqlAlterTableAddPartitions.java | 3 +- .../altertable/SqlAlterTableAddPlacement.java | 2 +- .../SqlAlterTableMergePartitions.java | 3 +- .../SqlAlterTableModifyPartitions.java | 8 +- .../SqlAlterTableModifyPlacement.java | 4 +- .../language/validate/EntityNamespace.java | 5 - .../db/sql/web/SchemaToJsonMapper.java | 4 +- .../java/org/polypheny/db/webui/Crud.java | 102 +++++----- .../org/polypheny/db/webui/HttpServer.java | 2 - .../org/polypheny/db/webui/WebSocket.java | 2 +- .../polypheny/db/webui/crud/LanguageCrud.java | 47 +++-- .../db/webui/crud/StatisticCrud.java | 6 +- .../polypheny/db/webui/models/Placement.java | 4 +- .../models/requests/BatchUpdateRequest.java | 9 +- 141 files changed, 1099 insertions(+), 881 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 45623842ab..dfa17fd5a3 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -291,7 +291,7 @@ public abstract class Adapter { private ConfigListener listener; - public Adapter( int adapterId, String uniqueName, Map settings ) { + public Adapter( long adapterId, String uniqueName, Map settings ) { this.properties = getClass().getAnnotation( AdapterProperties.class ); if ( getClass().getAnnotation( AdapterProperties.class ) == null ) { throw new RuntimeException( "The used adapter does not annotate its properties correctly." ); diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index ef1181780c..4dbdd6e1ad 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -117,7 +117,7 @@ public DataSource getSource( String uniqueName ) { } - public DataSource getSource( int id ) { + public DataSource getSource( long id ) { Adapter adapter = getAdapter( id ); if ( adapter instanceof DataSource ) { return (DataSource) adapter; diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index c236ccd28a..0a2efb50cc 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -35,7 +35,7 @@ public abstract class DataSource extends Adapter implements ExtensionPoint { private final boolean dataReadOnly; - protected DataSource( final int adapterId, final String uniqueName, final Map settings, boolean dataReadOnly ) { + protected DataSource( final long adapterId, final String uniqueName, final Map settings, boolean dataReadOnly ) { super( adapterId, uniqueName, settings ); this.dataReadOnly = dataReadOnly; diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index 5248b194fe..f7791fe442 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -51,7 +51,7 @@ public abstract class DataStore extends Adapter implements ExtensionPoint { protected final transient Catalog catalog = Catalog.getInstance(); - public DataStore( final int adapterId, final String uniqueName, final Map settings, final boolean persistent ) { + public DataStore( final long adapterId, final String uniqueName, final Map settings, final boolean persistent ) { super( adapterId, uniqueName, settings ); this.persistent = persistent; diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index f89a57209d..d40dfc38d2 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -173,15 +173,15 @@ protected void addIndex( final long id, final String name, final CatalogKey key, .filter( it -> it.canProvide( method, unique, persistent ) ) .findFirst() .orElseThrow( IllegalArgumentException::new ); - final LogicalTable table = Catalog.getInstance().getLogicalRel( key.schemaId ).getTable( key.tableId ); - final CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( key.schemaId ).getPrimaryKey( table.primaryKey ); + final LogicalTable table = Catalog.getInstance().getLogicalRel( key.namespaceId ).getTable( key.tableId ); + final CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( key.namespaceId ).getPrimaryKey( table.primaryKey ); final Index index = factory.create( id, name, method, unique, persistent, - Catalog.getInstance().getNamespace( key.schemaId ), + Catalog.getInstance().getNamespace( key.namespaceId ), table, key.getColumnNames(), pk.getColumnNames() ); diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index bc42e9740d..5d81c9354d 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -248,7 +248,7 @@ private static class ReflectiveEntity extends LogicalTable implements ScannableE ReflectiveEntity( Type elementType, Enumerable enumerable, Long id, Long partitionId, Long adapterId ) { //super( elementType, id, partitionId, adapterId ); - super( id, "test", List.of(), -1, "", EntityType.ENTITY, null, ImmutableList.of(), false, null ); + super( id, "test", List.of(), -1, "", EntityType.ENTITY, null, ImmutableList.of(), false, null, List.of() ); this.elementType = elementType; this.enumerable = enumerable; throw new NotImplementedException(); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 072c6e44b6..16b380a8a0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -57,7 +57,7 @@ public abstract class Catalog implements ExtensionPoint { public static Adapter defaultStore; public static Adapter defaultSource; - public static int defaultUserId = 0; + public static long defaultUserId = 0; public static long defaultDatabaseId = 0; public static boolean resetDocker; protected final PropertyChangeSupport listeners = new PropertyChangeSupport( this ); @@ -172,7 +172,7 @@ protected final boolean isValidIdentifier( final String str ) { * @param password of the user * @return the id of the created user */ - public abstract int addUser( String name, String password ); + public abstract long addUser( String name, String password ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java index 9ece5287e6..d0b78c2f54 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java @@ -44,6 +44,6 @@ public interface AllocationDocumentCatalog extends AllocationCatalog { void dropCollectionPlacement( long id, long adapterId ); - CatalogCollectionPlacement getCollectionPlacement( long id, int placementId ); + CatalogCollectionPlacement getCollectionPlacement( long id, long placementId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 4eec0f9fc7..e03d196706 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -26,6 +26,7 @@ import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; @@ -39,14 +40,16 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { /** * Adds a placement for a column. * + * @param table * @param adapterId The adapter on which the table should be placed on * @param columnId The id of the column to be placed * @param placementType The type of placement * @param physicalSchemaName The schema name on the adapter * @param physicalTableName The table name on the adapter * @param physicalColumnName The column name on the adapter + * @param position */ - void addColumnPlacement( long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ); + void addColumnPlacement( LogicalTable table, long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ); /** * Deletes all dependent column placements @@ -116,7 +119,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The id of the table for the requested column placements * @return The requested collection */ - ImmutableMap> getColumnPlacementsByAdapter( long tableId ); + ImmutableMap> getColumnPlacementsByAdapter( long tableId ); /** * Gets the partition group sorted by partition. @@ -384,7 +387,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId The unique id of the table * @return List of partitionId Indices */ - List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ); + List getPartitionGroupsIndexOnDataPlacement( long adapterId, long tableId ); /** * Returns a specific DataPlacement of a given table. diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java index 77c70c358e..228f1b1cdb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java @@ -19,6 +19,7 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; + public interface LogicalCatalog { /** @@ -40,10 +41,10 @@ public interface LogicalCatalog { LogicalNamespace getLogicalNamespace(); - LogicalCatalog withLogicalNamespace( LogicalNamespace logicalNamespace ); - LogicalEntity getEntity( String name ); LogicalEntity getEntity( long id ); + LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java index 587d02ebfc..b31cac25e6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java @@ -45,12 +45,11 @@ public interface LogicalDocumentCatalog extends LogicalCatalog { * * @param id ID of the collection to add, null if a new one needs to be generated * @param name The name of the collection - * @param currentUserId The user, which adds the collection * @param entity The type of entity of the collection * @param modifiable If the collection is modifiable * @return The id of the added collection */ - public abstract long addCollection( Long id, String name, int currentUserId, EntityType entity, boolean modifiable ); + public abstract long addCollection( Long id, String name, EntityType entity, boolean modifiable ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 58a493385c..422d0d6c11 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -87,12 +87,11 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * Adds a table to a specified schema. * * @param name The name of the table to add - * @param ownerId The if of the owner * @param entityType The table type * @param modifiable Whether the content of the table can be modified * @return The id of the inserted table */ - public abstract long addTable( String name, int ownerId, EntityType entityType, boolean modifiable ); + public abstract long addTable( String name, EntityType entityType, boolean modifiable ); /** @@ -100,7 +99,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * * @param name The name of the view to add * @param namespaceId The id of the schema - * @param ownerId The if of the owner * @param entityType The table type * @param modifiable Whether the content of the table can be modified * @param definition {@link AlgNode} used to create Views @@ -108,14 +106,13 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param fieldList all columns used within the View * @return The id of the inserted table */ - public abstract long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ); + public abstract long addView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ); /** * Adds a materialized view to a specified schema. * * @param name of the view to add * @param namespaceId id of the schema - * @param ownerId id of the owner * @param entityType type of table * @param modifiable Whether the content of the table can be modified * @param definition {@link AlgNode} used to create Views @@ -128,7 +125,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param ordered if materialized view is ordered or not * @return id of the inserted materialized view */ - public abstract long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException; + public abstract long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException; /** * Renames a table @@ -151,7 +148,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param tableId The if of the table * @param ownerId ID of the new owner */ - public abstract void setTableOwner( long tableId, int ownerId ); + public abstract void setTableOwner( long tableId, long ownerId ); /** * Set the primary key of a table @@ -218,12 +215,11 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { /** * Returns the column with the specified name in the specified table of the specified database and schema. * - * @param schemaName The name of the schema * @param tableName The name of the table * @param columnName The name of the column * @return A CatalogColumn */ - public abstract LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; + public abstract LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; /** * Adds a column. diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java index 588dbbacb7..088b12b389 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogAdapter.java @@ -19,6 +19,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.List; import java.util.Map; @@ -26,24 +28,32 @@ import lombok.NonNull; import lombok.Value; import lombok.With; +import lombok.experimental.SuperBuilder; import org.polypheny.db.adapter.Adapter.AdapterProperties; import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode @Value -@With +@SuperBuilder(toBuilder = true) public class CatalogAdapter implements CatalogObject { private static final long serialVersionUID = -6140489767408917639L; + @Serialize public long id; + @Serialize public String uniqueName; + @Serialize public String adapterName; + @Serialize public AdapterType type; + @Serialize public ImmutableMap settings; + @Serialize public ImmutableList supportedNamespaces; + @Serialize public String adapterTypeName; @@ -51,11 +61,11 @@ public enum AdapterType {STORE, SOURCE} public CatalogAdapter( - final long id, - @NonNull final String uniqueName, - @NonNull final String adapterName, - @NonNull final AdapterType adapterType, - @NonNull final Map settings ) { + @Deserialize("id") final long id, + @Deserialize("uniqueName") @NonNull final String uniqueName, + @Deserialize("adapterName") @NonNull final String adapterName, + @Deserialize("type") @NonNull final AdapterType adapterType, + @Deserialize("settings") @NonNull final Map settings ) { this.id = id; this.uniqueName = uniqueName; this.adapterName = adapterName; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java index 0ef2a838e9..09fb31f851 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java @@ -16,56 +16,55 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.SneakyThrows; +import lombok.Value; import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.PlacementType; @EqualsAndHashCode +@Value public class CatalogColumnPlacement implements CatalogObject { private static final long serialVersionUID = -1909757888176291095L; @Serialize - public final long namespaceId; + public long namespaceId; @Serialize - public final long tableId; + public long tableId; @Serialize - public final long columnId; + public long columnId; @Serialize - public final long adapterId; + public long adapterId; @Serialize - public final String adapterUniqueName; + public PlacementType placementType; @Serialize - public final PlacementType placementType; + public long physicalPosition; @Serialize - public final long physicalPosition; + public String physicalSchemaName; @Serialize - public final String physicalSchemaName; - @Serialize - public final String physicalColumnName; + public String physicalColumnName; public CatalogColumnPlacement( - final long namespaceId, - final long tableId, - final long columnId, - final long adapterId, - @NonNull final String adapterUniqueName, - @NonNull final PlacementType placementType, - final String physicalSchemaName, - final String physicalColumnName, - final long physicalPosition ) { + @Deserialize("namespaceId") final long namespaceId, + @Deserialize("tableId") final long tableId, + @Deserialize("columnId") final long columnId, + @Deserialize("adapterId") final long adapterId, + @Deserialize("placementType") @NonNull final PlacementType placementType, + @Deserialize("physicalSchemaName") final String physicalSchemaName, + @Deserialize("physicalColumnName") final String physicalColumnName, + @Deserialize("physicalPosition") final long physicalPosition ) { this.namespaceId = namespaceId; this.tableId = tableId; this.columnId = columnId; this.adapterId = adapterId; - this.adapterUniqueName = adapterUniqueName; this.placementType = placementType; this.physicalSchemaName = physicalSchemaName; this.physicalColumnName = physicalColumnName; @@ -102,7 +101,6 @@ public String getAdapterUniqueName() { public Serializable[] getParameterArray() { return new Serializable[]{ getLogicalTableName(), - adapterUniqueName, placementType.name(), physicalSchemaName, physicalColumnName }; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java index 8623a927a1..d6f7934d4d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java @@ -30,6 +30,7 @@ import lombok.Value; import lombok.With; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.DataPlacementRole; @@ -39,7 +40,7 @@ /** * Serves as a container, which holds all information related to a table entity placed on physical store. */ -@With +@SuperBuilder(toBuilder = true) @Value public class CatalogDataPlacement implements CatalogObject { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDefaultValue.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDefaultValue.java index 259c4ea38d..a745cffff0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDefaultValue.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDefaultValue.java @@ -17,6 +17,8 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.NonNull; @@ -28,17 +30,21 @@ public class CatalogDefaultValue implements Serializable { private static final long serialVersionUID = 6085682952587659184L; + @Serialize public final long columnId; + @Serialize public final PolyType type; + @Serialize public final String value; + @Serialize public final String functionName; public CatalogDefaultValue( - final long columnId, - @NonNull final PolyType type, - final String value, - final String functionName ) { + @Deserialize("columnId") final long columnId, + @Deserialize("type") @NonNull final PolyType type, + @Deserialize("value") final String value, + @Deserialize("functionName") final String functionName ) { this.columnId = columnId; this.type = type; this.value = value; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index 02afdf2729..f4f3f17cfb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -19,6 +19,8 @@ import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.List; +import lombok.AllArgsConstructor; +import lombok.NoArgsConstructor; import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; @@ -52,7 +54,7 @@ public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializa public long namespaceId; - protected CatalogEntity( long id, String name, long namespaceId, EntityType type, NamespaceType namespaceType ) { + public CatalogEntity( long id, String name, long namespaceId, EntityType type, NamespaceType namespaceType ) { this.id = id; this.namespaceId = namespaceId; this.name = name; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java index 4ea85591bc..398b413750 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java @@ -17,6 +17,8 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.LinkedList; import java.util.List; @@ -28,34 +30,44 @@ @EqualsAndHashCode(callSuper = false) -public final class CatalogIndex implements Serializable { +@Value +public class CatalogIndex implements Serializable { private static final long serialVersionUID = -318228681682792406L; - public final long id; - public final String name; - public final String physicalName; - public final boolean unique; - public final IndexType type; - public final long location; - public final String method; - public final String methodDisplayName; - - public final CatalogKey key; - public final long keyId; + @Serialize + public long id; + @Serialize + public String name; + @Serialize + public String physicalName; + @Serialize + public boolean unique; + @Serialize + public IndexType type; + @Serialize + public long location; + @Serialize + public String method; + @Serialize + public String methodDisplayName; + @Serialize + public CatalogKey key; + @Serialize + public long keyId; public CatalogIndex( - final long id, - @NonNull final String name, - final boolean unique, - final String method, - final String methodDisplayName, - final IndexType type, - final Long location, - final long keyId, - final CatalogKey key, - final String physicalName ) { + @Deserialize("id") final long id, + @Deserialize("name") @NonNull final String name, + @Deserialize("unique") final boolean unique, + @Deserialize("method") final String method, + @Deserialize("methodDisplayName") final String methodDisplayName, + @Deserialize("type") final IndexType type, + @Deserialize("location") final Long location, + @Deserialize("keyId") final long keyId, + @Deserialize("key") final CatalogKey key, + @Deserialize("physicalName") final String physicalName ) { this.id = id; this.name = name; this.unique = unique; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java index 1a5cc2efef..af639785ee 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java @@ -17,36 +17,47 @@ package org.polypheny.db.catalog.entity; import com.google.common.collect.ImmutableList; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; -import java.util.LinkedList; import java.util.List; import lombok.EqualsAndHashCode; +import lombok.NonNull; import lombok.SneakyThrows; +import lombok.Value; +import lombok.experimental.NonFinal; import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; @EqualsAndHashCode +@Value +@NonFinal public class CatalogKey implements CatalogObject, Comparable { private static final long serialVersionUID = -5803762884192662540L; - public final long id; - public final long tableId; - public final long schemaId; - public final ImmutableList columnIds; - public final EnforcementTime enforcementTime; + @Serialize + public long id; + @Serialize + public long tableId; + @Serialize + public long namespaceId; + @Serialize + public ImmutableList columnIds; + @Serialize + public EnforcementTime enforcementTime; public CatalogKey( - final long id, - final long tableId, - final long schemaId, - final List columnIds, - EnforcementTime enforcementTime ) { + @Deserialize("id") final long id, + @Deserialize("tableId") final long tableId, + @Deserialize("namespaceId") final long namespaceId, + @Deserialize("columnIds") final List columnIds, + @Deserialize("enforcementTime") EnforcementTime enforcementTime ) { this.id = id; this.tableId = tableId; - this.schemaId = schemaId; + this.namespaceId = namespaceId; this.columnIds = ImmutableList.copyOf( columnIds ); this.enforcementTime = enforcementTime; } @@ -54,7 +65,7 @@ public CatalogKey( @SneakyThrows public String getSchemaName() { - return Catalog.getInstance().getNamespace( schemaId ).name; + return Catalog.getInstance().getNamespace( namespaceId ).name; } @@ -79,7 +90,7 @@ public List getColumnNames() { @Override public Serializable[] getParameterArray() { - return new Serializable[]{ id, tableId, getTableName(), schemaId, getSchemaName(), null, null }; + return new Serializable[]{ id, tableId, getTableName(), namespaceId, getSchemaName(), null, null }; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java index 95ec61473b..1843fbb39c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java @@ -22,6 +22,7 @@ import lombok.NonNull; import lombok.Value; import lombok.With; +import lombok.experimental.SuperBuilder; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; @@ -31,7 +32,7 @@ import org.polypheny.db.partition.properties.PartitionProperty; @EqualsAndHashCode(callSuper = true) -@With +@SuperBuilder(toBuilder = true) @Value public class CatalogMaterializedView extends CatalogView { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java index e765ed08bc..44f146bf3e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java @@ -24,7 +24,6 @@ import lombok.NonNull; import lombok.RequiredArgsConstructor; import org.apache.commons.lang.NotImplementedException; -import org.polypheny.db.catalog.Catalog; @EqualsAndHashCode(callSuper = true) @@ -35,7 +34,7 @@ public CatalogPrimaryKey( @NonNull final CatalogKey catalogKey ) { super( catalogKey.id, catalogKey.tableId, - catalogKey.schemaId, + catalogKey.namespaceId, catalogKey.columnIds, EnforcementTime.ON_QUERY ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java index 5227502381..c00643c800 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogQueryInterface.java @@ -18,28 +18,39 @@ import com.google.common.collect.ImmutableMap; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.Map; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.Value; import lombok.With; +import lombok.experimental.SuperBuilder; @EqualsAndHashCode @Value -@With +@SuperBuilder(toBuilder = true) public class CatalogQueryInterface implements CatalogObject { private static final long serialVersionUID = 7212289724539530050L; + @Serialize public long id; + @Serialize public String name; + @Serialize public String clazz; + @Serialize public ImmutableMap settings; - public CatalogQueryInterface( final long id, @NonNull final String uniqueName, @NonNull final String clazz, @NonNull final Map settings ) { + public CatalogQueryInterface( + @Deserialize("id") final long id, + @Deserialize("name") @NonNull final String uniqueName, + @Deserialize("clazz") @NonNull final String clazz, + @Deserialize("settings") @NonNull final Map settings ) { this.id = id; this.name = uniqueName; this.clazz = clazz; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogUser.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogUser.java index 311b8a633b..16acf5d69b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogUser.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogUser.java @@ -17,21 +17,28 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import lombok.EqualsAndHashCode; +import lombok.Value; @EqualsAndHashCode -public final class CatalogUser implements CatalogObject, Comparable { +@Value +public class CatalogUser implements CatalogObject, Comparable { private static final long serialVersionUID = 5022567585804699491L; - public final int id; - public final String name; - public final String password; + @Serialize + public long id; + @Serialize + public String name; + @Serialize + public String password; - public CatalogUser( final int id, final String name, final String password ) { + public CatalogUser( @Deserialize("id") final long id, @Deserialize("name") final String name, @Deserialize("password") final String password ) { this.id = id; this.name = name; this.password = password; @@ -48,7 +55,7 @@ public Serializable[] getParameterArray() { @Override public int compareTo( CatalogUser o ) { if ( o != null ) { - return this.id - o.id; + return Math.toIntExact( this.id - o.id ); } return -1; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index 612acd73cb..50a8602c25 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -27,6 +27,7 @@ import lombok.Value; import lombok.With; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; @@ -44,7 +45,7 @@ import org.polypheny.db.view.ViewManager.ViewVisitor; @EqualsAndHashCode(callSuper = true) -@With +@SuperBuilder(toBuilder = true) @Value @NonFinal public class CatalogView extends LogicalTable { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java index 724a2f0eb2..7c38ff66cd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java @@ -17,6 +17,8 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -34,21 +36,24 @@ public class LogicalNamespace extends CatalogNamespace implements CatalogObject, private static final long serialVersionUID = 3090632164988970558L; + @Serialize public long id; + @Serialize @Getter public String name; + @Serialize @Getter @EqualsAndHashCode.Exclude public NamespaceType namespaceType; - + @Serialize public boolean caseSensitive; public LogicalNamespace( - final long id, - @NonNull final String name, - @NonNull final NamespaceType namespaceType, - boolean caseSensitive ) { + @Deserialize("id") final long id, + @Deserialize("name") @NonNull final String name, + @Deserialize("namespaceType") @NonNull final NamespaceType namespaceType, + @Deserialize("caseSensitive") boolean caseSensitive ) { super( id, name, namespaceType ); this.id = id; this.name = name; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java index aa1dffecd9..cd78db8ef1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -16,9 +16,12 @@ package org.polypheny.db.catalog.entity.allocation; +import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Value; +import lombok.With; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -26,6 +29,7 @@ @EqualsAndHashCode(callSuper = true) @Value @NonFinal +@SuperBuilder(toBuilder = true) public abstract class AllocationEntity extends LogicalEntity { public long adapterId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 219a226a68..2857586ef1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -26,7 +26,7 @@ import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Value; -import lombok.With; +import lombok.experimental.SuperBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; @@ -39,30 +39,27 @@ @EqualsAndHashCode(callSuper = true) @Value -@With +@SuperBuilder(toBuilder = true) public class AllocationTable extends AllocationEntity { + @Serialize public List placements; @Serialize public long adapterId; @Serialize public LogicalTable logicalTable; - @Serialize - public String adapterName; public AllocationTable( @Deserialize("logicalTable") LogicalTable logicalTable, - @Deserialize( "id" ) long id, - @Deserialize( "name" ) String name, - @Deserialize( "adapterId" ) long adapterId, - @Deserialize( "adapterName" ) String adapterName, - @Deserialize( "placements" ) List placements ) { + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("adapterId") long adapterId, + @Deserialize("placements") List placements ) { super( logicalTable, id, name, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.logicalTable = logicalTable; this.adapterId = adapterId; this.placements = placements; - this.adapterName = adapterName; } @@ -100,15 +97,15 @@ public String getNamespaceName() { public AllocationTable withAddedColumn( long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { List placements = new ArrayList<>( this.placements ); - placements.add( new CatalogColumnPlacement( logical.namespaceId, id, columnId, adapterId, adapterName, placementType, physicalSchemaName, physicalColumnName, 0 ) ); + placements.add( new CatalogColumnPlacement( logical.namespaceId, id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, 0 ) ); - return withPlacements( placements ); + return toBuilder().placements( placements ).build(); } public AllocationTable withRemovedColumn( long columnId ) { List placements = new ArrayList<>( this.placements ); - return withPlacements( placements.stream().filter( p -> p.columnId != columnId ).collect( Collectors.toList() ) ); + return toBuilder().placements( placements.stream().filter( p -> p.columnId != columnId ).collect( Collectors.toList() ) ).build(); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index 410835c91b..e2bacf4dc8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -36,14 +36,14 @@ @EqualsAndHashCode(callSuper = true) @Value -@With +//@With public class LogicalCollection extends LogicalEntity implements CatalogObject { private static final long serialVersionUID = -6490762948368178584L; @Getter public long id; - public ImmutableList placements; + public ImmutableList placements; public String name; public long namespaceId; public EntityType entityType; @@ -55,7 +55,7 @@ public LogicalCollection( String namespaceName, long id, String name, - @NonNull Collection placements, + @NonNull Collection placements, EntityType type, String physicalName ) { super( id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.DOCUMENT ); @@ -74,22 +74,19 @@ public Serializable[] getParameterArray() { } - public LogicalCollection addPlacement( int adapterId ) { - List placements = new ArrayList<>( this.placements ); + public LogicalCollection addPlacement( Long adapterId ) { + List placements = new ArrayList<>( this.placements ); placements.add( adapterId ); return new LogicalCollection( id, name, namespaceId, namespaceName, placements, EntityType.ENTITY, physicalName ); } - public LogicalCollection removePlacement( int adapterId ) { - List placements = this.placements.stream().filter( id -> id != adapterId ).collect( Collectors.toList() ); + public LogicalCollection removePlacement( long adapterId ) { + List placements = this.placements.stream().filter( id -> id != adapterId ).collect( Collectors.toList() ); return new LogicalCollection( id, name, namespaceId, namespaceName, placements, EntityType.ENTITY, physicalName ); } - - - @Override public Expression asExpression() { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getCollection", Expressions.constant( id ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java index c8154b832e..e6419a08be 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java @@ -52,7 +52,7 @@ public class LogicalColumn implements CatalogObject, Comparable { public long tableId; @Serialize - public long schemaId; + public long namespaceId; @Serialize public int position; @@ -95,7 +95,7 @@ public LogicalColumn( @Deserialize("id") final long id, @Deserialize("name") @NonNull final String name, @Deserialize("tableId") final long tableId, - @Deserialize("schemaId") final long schemaId, + @Deserialize("namespaceId") final long namespaceId, @Deserialize("position") final int position, @Deserialize("type") @NonNull final PolyType type, @Deserialize("collectionsType") final PolyType collectionsType, @@ -109,7 +109,7 @@ public LogicalColumn( this.id = id; this.name = name; this.tableId = tableId; - this.schemaId = schemaId; + this.namespaceId = namespaceId; this.position = position; this.type = type; this.collectionsType = collectionsType; @@ -145,7 +145,7 @@ public AlgDataType getAlgDataType( final AlgDataTypeFactory typeFactory ) { @SneakyThrows public String getSchemaName() { - return Catalog.getInstance().getNamespace( schemaId ).name; + return Catalog.getInstance().getNamespace( namespaceId ).name; } @@ -181,7 +181,7 @@ public Serializable[] getParameterArray() { @Override public int compareTo( LogicalColumn o ) { - int comp = (int) (this.schemaId - o.schemaId); + int comp = (int) (this.namespaceId - o.namespaceId); if ( comp == 0 ) { comp = (int) (this.tableId - o.tableId); if ( comp == 0 ) { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java index 9acd83d85d..953034a73f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java @@ -19,7 +19,9 @@ import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import io.activej.serializer.annotations.SerializeClass; +import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; @@ -31,24 +33,21 @@ @EqualsAndHashCode(callSuper = true) @Value @NonFinal -@SerializeClass(subclasses = { LogicalTable.class }) public abstract class LogicalEntity extends CatalogEntity { + @Serialize public String namespaceName; - @Serialize - public long namespaceId; - protected LogicalEntity( - @Deserialize( "id" ) long id, - @Deserialize( "name" ) String name, - @Deserialize( "namespaceId" ) long namespaceId, - @Deserialize( "namespaceName" ) String namespaceName, - @Deserialize( "type" ) EntityType type, - @Deserialize( "namespaceType" ) NamespaceType namespaceType ) { + public LogicalEntity( + long id, + String name, + long namespaceId, + String namespaceName, + EntityType type, + NamespaceType namespaceType ) { super( id, name, namespaceId, type, namespaceType ); this.namespaceName = namespaceName; - this.namespaceId = namespaceId; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index 2c35a7ccb2..9196c2c0d6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -40,14 +40,14 @@ public class LogicalGraph extends LogicalEntity implements Comparable placements; + public ImmutableList placements; public int ownerId; public boolean modifiable; public boolean caseSensitive; - public LogicalGraph( long id, String name, long namespaceId, String namespaceName, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { + public LogicalGraph( long id, String name, long namespaceId, String namespaceName, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { super( id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.GRAPH ); this.ownerId = ownerId; this.modifiable = modifiable; @@ -76,8 +76,8 @@ public int compareTo( @NotNull LogicalGraph o ) { } - public LogicalGraph addPlacement( int adapterId ) { - List placements = new ArrayList<>( this.placements ); + public LogicalGraph addPlacement( long adapterId ) { + List placements = new ArrayList<>( this.placements ); placements.add( adapterId ); return toBuilder().placements( ImmutableList.copyOf( placements ) ).build(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index a18a5267f8..7e48d88666 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -20,16 +20,15 @@ import com.google.common.collect.ImmutableList; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; -import io.activej.serializer.annotations.SerializeClass; import java.io.Serializable; +import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import lombok.Value; -import lombok.With; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.type.AlgDataType; @@ -41,25 +40,16 @@ import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.schema.ColumnStrategy; -//@Value -@With @EqualsAndHashCode(callSuper = false) @NonFinal +@SuperBuilder(toBuilder = true) public class LogicalTable extends LogicalEntity implements Comparable { private static final long serialVersionUID = 4653390333258552102L; - @Serialize - public long id; - @Serialize - public EntityType entityType; - @Serialize - public String name; @Serialize public ImmutableList columns; - @Serialize - public long namespaceId; @Serialize public Long primaryKey; @@ -84,18 +74,14 @@ public LogicalTable( @Deserialize("columns") final List columns, @Deserialize("namespaceId") final long namespaceId, @Deserialize("namespaceName") final String namespaceName, - @Deserialize("type") @NonNull final EntityType type, + @Deserialize("entityType") @NonNull final EntityType type, @Deserialize("primaryKey") final Long primaryKey, @Deserialize("dataPlacements") @NonNull final List dataPlacements, @Deserialize("modifiable") boolean modifiable, @Deserialize("partitionProperty") PartitionProperty partitionProperty, @Deserialize("connectedViews") List connectedViews ) { super( id, name, namespaceId, namespaceName, type, NamespaceType.RELATIONAL ); - this.id = id; - this.name = name; this.columns = ImmutableList.copyOf( columns ); - this.namespaceId = namespaceId; - this.entityType = type; this.primaryKey = primaryKey; this.modifiable = modifiable; @@ -110,19 +96,6 @@ public LogicalTable( } } - public LogicalTable( - final long id, - @NonNull final String name, - final List fieldIds, - final long namespaceId, - final String namespaceName, - @NonNull final EntityType type, - final Long primaryKey, - @NonNull final List dataPlacements, - boolean modifiable, - PartitionProperty partitionProperty ) { - this( id, name, fieldIds, namespaceId, namespaceName, type, primaryKey, dataPlacements, modifiable, partitionProperty, ImmutableList.of() ); - } public List getColumnNames() { return columns.stream().map( c -> c.name ).collect( Collectors.toList() ); @@ -190,6 +163,18 @@ public List getColumnStrategies() { } + public List getConnectedViews() { + return null; + } + + + public LogicalTable withAddedColumn( LogicalColumn column ) { + List columns = new ArrayList<>( this.columns ); + columns.add( column ); + return toBuilder().columns( ImmutableList.copyOf( columns ) ).build(); + } + + @RequiredArgsConstructor public static class PrimitiveCatalogTable { diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index 38b64cf48f..c730fd08ae 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -25,6 +25,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogNamespace; +import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; @@ -141,4 +142,7 @@ default List getOperatorList() { return null; } + LogicalColumn getColumn( long columnId ); + + } diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index e374299f15..22faf1b019 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -111,13 +111,11 @@ public static DdlManager getInstance() { * Creates a schema with the provided options. * * @param name name of the new schema - * @param databaseId id of the database, to which the schema belongs * @param type the schema type, RELATIONAL, DOCUMENT, etc. - * @param userId the owner of the new schema * @param ifNotExists whether to silently ignore if the schema does already exist * @param replace whether the replace a existing schema */ - public abstract long createNamespace( String name, long databaseId, NamespaceType type, int userId, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException; + public abstract long createNamespace( String name, NamespaceType type, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException; /** * Adds a new adapter (data store or data source) diff --git a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java index be115016a6..91d2d394e8 100644 --- a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java +++ b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java @@ -33,7 +33,7 @@ public interface PartitionManager { boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ); - Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ); + Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ); boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, LogicalColumn partitionColumn ); diff --git a/core/src/main/java/org/polypheny/db/partition/properties/PartitionProperty.java b/core/src/main/java/org/polypheny/db/partition/properties/PartitionProperty.java index 935428a915..372299d235 100644 --- a/core/src/main/java/org/polypheny/db/partition/properties/PartitionProperty.java +++ b/core/src/main/java/org/polypheny/db/partition/properties/PartitionProperty.java @@ -17,25 +17,62 @@ package org.polypheny.db.partition.properties; import com.google.common.collect.ImmutableList; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; +import java.util.List; +import lombok.AllArgsConstructor; import lombok.Getter; +import lombok.Value; +import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; import org.polypheny.db.catalog.logistic.PartitionType; @SuperBuilder @Getter +@Value +@NonFinal public class PartitionProperty implements Serializable { - public final PartitionType partitionType; - public final boolean isPartitioned; - public final ImmutableList partitionGroupIds; - public final ImmutableList partitionIds; - public final long partitionColumnId; + @Serialize + public PartitionType partitionType; + @Serialize + public boolean isPartitioned; - public final long numPartitionGroups; - public final long numPartitions; + @Serialize + public ImmutableList partitionGroupIds; + @Serialize + public ImmutableList partitionIds; + @Serialize + public long partitionColumnId; - public final boolean reliesOnPeriodicChecks; + @Serialize + public long numPartitionGroups; + @Serialize + public long numPartitions; + + @Serialize + public boolean reliesOnPeriodicChecks; + + + public PartitionProperty( + @Deserialize("partitionType") PartitionType partitionType, + @Deserialize("isPartitioned") boolean isPartitioned, + @Deserialize("partitionGroupIds") List partitionGroupIds, + @Deserialize("partitionIds") List partitionIds, + @Deserialize("partitionColumnId") long partitionColumnId, + @Deserialize("numPartitionGroups") long numPartitionGroups, + @Deserialize("numPartitions") long numPartitions, + @Deserialize("reliesOnPeriodicChecks") boolean reliesOnPeriodicChecks ) { + this.partitionType = partitionType; + this.isPartitioned = isPartitioned; + this.partitionGroupIds = ImmutableList.copyOf( partitionGroupIds ); + this.partitionIds = ImmutableList.copyOf( partitionIds ); + this.partitionColumnId = partitionColumnId; + this.numPartitionGroups = numPartitionGroups; + this.numPartitions = numPartitions; + this.reliesOnPeriodicChecks = reliesOnPeriodicChecks; + } } diff --git a/core/src/main/java/org/polypheny/db/prepare/Context.java b/core/src/main/java/org/polypheny/db/prepare/Context.java index 5fbb51a40d..aca0d13e24 100644 --- a/core/src/main/java/org/polypheny/db/prepare/Context.java +++ b/core/src/main/java/org/polypheny/db/prepare/Context.java @@ -55,8 +55,6 @@ public interface Context { Statement getStatement(); - long getDatabaseId(); - - int getCurrentUserId(); + long getCurrentUserId(); } diff --git a/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java b/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java index e09bfcf0d9..26fa52b605 100644 --- a/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/ContextImpl.java @@ -42,19 +42,16 @@ public class ContextImpl implements Context { @Getter private final String defaultSchemaName; @Getter - private final long databaseId; - @Getter - private final int currentUserId; + private final long currentUserId; - public ContextImpl( Snapshot snapshot, DataContext dataContext, String defaultSchemaName, long databaseId, int currentUserId, Statement statement ) { + public ContextImpl( Snapshot snapshot, DataContext dataContext, String defaultSchemaName, long currentUserId, Statement statement ) { this.snapshot = snapshot; this.typeFactory = dataContext.getTypeFactory(); this.dataContext = dataContext; this.defaultSchemaName = defaultSchemaName; this.statement = statement; this.currentUserId = currentUserId; - this.databaseId = databaseId; } diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index af7d2d9172..48a716a758 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -86,6 +86,6 @@ void copyPartitionData( AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ); - void copyGraphData( LogicalGraph graph, Transaction transaction, Integer existingAdapterId, CatalogAdapter adapter ); + void copyGraphData( LogicalGraph graph, Transaction transaction, Long existingAdapterId, CatalogAdapter adapter ); } diff --git a/core/src/main/java/org/polypheny/db/routing/DmlRouter.java b/core/src/main/java/org/polypheny/db/routing/DmlRouter.java index 3908e86917..1453a6db95 100644 --- a/core/src/main/java/org/polypheny/db/routing/DmlRouter.java +++ b/core/src/main/java/org/polypheny/db/routing/DmlRouter.java @@ -46,8 +46,8 @@ public interface DmlRouter { AlgNode handleBatchIterator( AlgNode alg, Statement statement, LogicalQueryInformation queryInformation ); - AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Integer adapterId ); + AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Long adapterId ); - AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ); + AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ); } diff --git a/core/src/main/java/org/polypheny/db/routing/ProposedRoutingPlan.java b/core/src/main/java/org/polypheny/db/routing/ProposedRoutingPlan.java index ec76be4aed..652ed65194 100644 --- a/core/src/main/java/org/polypheny/db/routing/ProposedRoutingPlan.java +++ b/core/src/main/java/org/polypheny/db/routing/ProposedRoutingPlan.java @@ -63,7 +63,7 @@ public interface ProposedRoutingPlan extends RoutingPlan { * @return The physical placements of the necessary partitions: {@code PartitionId -> List} */ @Override - Map>> getPhysicalPlacementsOfPartitions(); // PartitionId -> List + Map>> getPhysicalPlacementsOfPartitions(); // PartitionId -> List /** * @return Optional pre costs. diff --git a/core/src/main/java/org/polypheny/db/routing/RoutingPlan.java b/core/src/main/java/org/polypheny/db/routing/RoutingPlan.java index 13b524cd2f..960b3f9de2 100644 --- a/core/src/main/java/org/polypheny/db/routing/RoutingPlan.java +++ b/core/src/main/java/org/polypheny/db/routing/RoutingPlan.java @@ -33,6 +33,6 @@ public interface RoutingPlan { Class getRouter(); // PartitionId -> List - Map>> getPhysicalPlacementsOfPartitions(); + Map>> getPhysicalPlacementsOfPartitions(); } diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 643a1a421f..f94994371f 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -326,13 +326,7 @@ public Statement getStatement() { @Override - public long getDatabaseId() { - return 0; - } - - - @Override - public int getCurrentUserId() { + public long getCurrentUserId() { return 0; } diff --git a/core/src/main/java/org/polypheny/db/tools/Frameworks.java b/core/src/main/java/org/polypheny/db/tools/Frameworks.java index e20445ca03..9bba938195 100644 --- a/core/src/main/java/org/polypheny/db/tools/Frameworks.java +++ b/core/src/main/java/org/polypheny/db/tools/Frameworks.java @@ -161,7 +161,6 @@ public JavaTypeFactory getTypeFactory() { }, "", 0, - 0, null ) ) .build(); return withPlanner( action, config ); diff --git a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java index 1631a0b583..b67b6e56bb 100644 --- a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java +++ b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java @@ -17,7 +17,6 @@ package org.polypheny.db.transaction; -import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -28,9 +27,9 @@ public interface TransactionManager { - Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin ); + Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, boolean analyze, String origin ); - Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin, MultimediaFlavor flavor ); + Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, boolean analyze, String origin, MultimediaFlavor flavor ); Transaction startTransaction( long userId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownSchemaException; diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 90316bf7fe..9231e40c2a 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -54,7 +54,7 @@ public void commit() throws NoTablePrimaryKeyException { @Override - public int addUser( String name, String password ) { + public long addUser( String name, String password ) { throw new NotImplementedException(); } diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index a662f7f17f..5ebaa23469 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -36,7 +36,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -346,22 +345,20 @@ public void join( final long millis ) throws InterruptedException { throw new RuntimeException( "There was no catalog submitted, aborting." ); } - trx = transactionManager.startTransaction( Catalog.defaultUserId, false, "Catalog Startup" ); + trx = transactionManager.startTransaction( Catalog.getInstance().getUser( Catalog.defaultUserId ), Catalog.getInstance().getNamespace( 0 ), false, "Catalog Startup" ); AdapterManager.getInstance().restoreAdapters(); loadDefaults(); QueryInterfaceManager.getInstance().restoreInterfaces( catalog ); trx.commit(); - trx = transactionManager.startTransaction( Catalog.defaultUserId, false, "Catalog Startup" ); + trx = transactionManager.startTransaction( Catalog.getInstance().getUser( Catalog.defaultUserId ), Catalog.getInstance().getNamespace( 0 ), false, "Catalog Startup" ); catalog.restoreColumnPlacements( trx ); catalog.restoreViews( trx ); trx.commit(); - } catch ( UnknownDatabaseException | UnknownUserException | UnknownSchemaException | TransactionException e ) { - if ( trx != null ) { - try { - trx.rollback(); - } catch ( TransactionException ex ) { - log.error( "Error while rolling back the transaction", e ); - } + } catch ( TransactionException e ) { + try { + trx.rollback(); + } catch ( TransactionException ex ) { + log.error( "Error while rolling back the transaction", e ); } throw new RuntimeException( "Something went wrong while restoring stores from the catalog.", e ); } @@ -399,7 +396,7 @@ public void join( final long millis ) throws InterruptedException { try { IndexManager.getInstance().initialize( transactionManager ); IndexManager.getInstance().restoreIndexes(); - } catch ( UnknownUserException | UnknownDatabaseException | UnknownSchemaException | UnknownTableException | TransactionException | UnknownKeyException e ) { + } catch ( UnknownUserException | UnknownSchemaException | UnknownTableException | TransactionException | UnknownKeyException e ) { throw new RuntimeException( "Something went wrong while initializing index manager.", e ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 9361a9f75c..576efa0493 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -204,7 +204,7 @@ private LogicalColumn getCatalogColumn( long namespaceId, long tableId, String c @Override - public long createNamespace( String name, long databaseId, NamespaceType type, int userId, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException { + public long createNamespace( String name, NamespaceType type, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException { name = name.toLowerCase(); // Check if there is already a schema with this name if ( catalog.checkIfExistsNamespace( name ) ) { @@ -253,7 +253,7 @@ private void handleSource( DataSource adapter ) { tableName += i; } - long tableId = catalog.getLogicalRel( defaultNamespaceId ).addTable( tableName, 1, EntityType.SOURCE, !(adapter).isDataReadOnly() ); + long tableId = catalog.getLogicalRel( defaultNamespaceId ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); List primaryKeyColIds = new ArrayList<>(); int colPos = 1; String physicalSchemaName = null; @@ -271,13 +271,12 @@ private void handleSource( DataSource adapter ) { exportedColumn.cardinality, exportedColumn.nullable, Collation.getDefaultCollation() ); - catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( + catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ), adapter.getAdapterId(), columnId, PlacementType.STATIC, exportedColumn.physicalSchemaName, - exportedColumn.physicalTableName, - exportedColumn.physicalColumnName ); // Not a valid partitionGroupID --> placeholder + exportedColumn.physicalTableName, exportedColumn.physicalColumnName, exportedColumn.physicalPosition ); // Not a valid partitionGroupID --> placeholder catalog.getAllocRel( defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), columnId, exportedColumn.physicalPosition ); if ( exportedColumn.primary ) { primaryKeyColIds.add( columnId ); @@ -483,13 +482,12 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys LogicalColumn addedColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ); // Add column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, adapterId, addedColumn.id, PlacementType.STATIC, exportedColumn.physicalSchemaName, - exportedColumn.physicalTableName, - exportedColumn.physicalColumnName );//Not a valid partitionID --> placeholder + exportedColumn.physicalTableName, exportedColumn.physicalColumnName, position );//Not a valid partitionID --> placeholder // Set column position catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, columnId, exportedColumn.physicalPosition ); @@ -557,13 +555,12 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo // Add column on underlying data stores and insert default value for ( DataStore store : stores ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, store.getAdapterId(), - addedColumn.id, - PlacementType.AUTOMATIC, // Will be set later - null, // Will be set later - null, // Will be set later - null );//Not a valid partitionID --> placeholder + addedColumn.id, // Will be set later + PlacementType.AUTOMATIC, // Will be set later + null, // Will be set later + null, null, position );//Not a valid partitionID --> placeholder AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); } @@ -858,26 +855,24 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { // Create column placements for ( long cid : columnIds ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, dataStore.getAdapterId(), cid, PlacementType.MANUAL, null, - null, - null ); + null, null, 0 ); addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); } // Check if placement includes primary key columns CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( long cid : primaryKey.columnIds ) { if ( !columnIds.contains( cid ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, dataStore.getAdapterId(), cid, PlacementType.AUTOMATIC, null, - null, - null ); + null, null, 0 ); addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); } } @@ -929,13 +924,12 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, for ( CatalogColumnPlacement ccp : oldPkPlacements ) { for ( long columnId : columnIds ) { if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, ccp.adapterId, - columnId, - PlacementType.AUTOMATIC, // Will be set later - null, // Will be set later - null, // Will be set later - null ); + columnId, // Will be set later + PlacementType.AUTOMATIC, // Will be set later + null, // Will be set later + null, null, 0 ); AdapterManager.getInstance().getStore( ccp.adapterId ).addColumn( statement.getPrepareContext(), catalog.getLogicalRel( catalogTable.namespaceId ).getTable( ccp.tableId ), @@ -1415,13 +1409,12 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { } } else { // Create column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, storeInstance.getAdapterId(), cid, PlacementType.MANUAL, null, - null, - null ); + null, null, 0 ); // Add column on store storeInstance.addColumn( statement.getPrepareContext(), catalogTable, catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); // Add to list of columns for which we need to copy data @@ -1569,13 +1562,12 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da } } else { // Create column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, storeInstance.getAdapterId(), logicalColumn.id, PlacementType.MANUAL, null, - null, - null ); + null, null, 0 ); // Add column on store storeInstance.addColumn( statement.getPrepareContext(), catalogTable, logicalColumn ); // Copy the data to the newly added column placements @@ -1710,7 +1702,6 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC long tableId = catalog.getLogicalRel( namespaceId ).addView( viewName, namespaceId, - statement.getPrepareContext().getCurrentUserId(), EntityType.VIEW, false, algNode, @@ -1778,7 +1769,6 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a long tableId = catalog.getLogicalRel( namespaceId ).addMaterializedView( viewName, namespaceId, - statement.getPrepareContext().getCurrentUserId(), EntityType.MATERIALIZED_VIEW, false, algRoot.alg, @@ -1818,13 +1808,12 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a for ( DataStore s : stores ) { long adapterId = s.getAdapterId(); - catalog.getAllocRel( namespaceId ).addColumnPlacement( + catalog.getAllocRel( namespaceId ).addColumnPlacement( catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ), s.getAdapterId(), columnId, placementType, null, - null, - null ); + null, null, 0 ); List logicalColumns; if ( addedColumns.containsKey( adapterId ) ) { @@ -1912,12 +1901,12 @@ public long addGraphPlacement( long graphId, List stores, boolean onl LogicalGraph graph = catalog.getLogicalGraph( graphId ).getGraph( graphId ); Catalog.getInstance().getSnapshot( 0 ); - List preExistingPlacements = graph.placements + List preExistingPlacements = graph.placements .stream() .filter( p -> !stores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) .collect( Collectors.toList() ); - Integer existingAdapterId = preExistingPlacements.isEmpty() ? null : preExistingPlacements.get( 0 ); + Long existingAdapterId = preExistingPlacements.isEmpty() ? null : preExistingPlacements.get( 0 ); for ( DataStore store : stores ) { catalog.getAllocGraph( graphId ).addGraphPlacement( store.getAdapterId(), graphId ); @@ -2056,7 +2045,7 @@ public void removeGraph( long graphId, boolean ifExists, Statement statement ) { return; } - for ( int adapterId : graph.placements ) { + for ( long adapterId : graph.placements ) { CatalogGraphPlacement placement = catalog.getAllocGraph( graphId ).getGraphPlacement( graphId, adapterId ); AdapterManager.getInstance().getStore( adapterId ).dropGraph( statement.getPrepareContext(), placement ); } @@ -2193,7 +2182,6 @@ public void createTable( long namespaceId, String name, List f long tableId = catalog.getLogicalRel( namespaceId ).addTable( name, - statement.getPrepareContext().getCurrentUserId(), EntityType.ENTITY, true ); @@ -2255,7 +2243,6 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists catalog.getLogicalDoc( namespaceId ).addCollection( collectionId, name, - statement.getPrepareContext().getCurrentUserId(), EntityType.ENTITY, true ); @@ -2298,7 +2285,7 @@ private boolean assertEntityExists( long namespaceId, String name, boolean ifNot public void dropCollection( LogicalCollection catalogCollection, Statement statement ) { AdapterManager manager = AdapterManager.getInstance(); - for ( Integer adapterId : catalogCollection.placements ) { + for ( long adapterId : catalogCollection.placements ) { DataStore store = (DataStore) manager.getAdapter( adapterId ); store.dropCollection( statement.getPrepareContext(), catalogCollection ); @@ -2725,7 +2712,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme Map> placementDistribution = new HashMap<>(); PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( partitionedTable.partitionProperty.partitionType ); - placementDistribution = partitionManager.getRelevantPlacements( partitionedTable, partitionedTable.partitionProperty.partitionIds, new ArrayList<>( Arrays.asList( -1 ) ) ); + placementDistribution = partitionManager.getRelevantPlacements( partitionedTable, partitionedTable.partitionProperty.partitionIds, new ArrayList<>( List.of( -1L ) ) ); // Update catalog table catalog.getAllocRel( partitionedTable.namespaceId ).mergeTable( tableId ); @@ -2856,13 +2843,12 @@ private void addColumn( long namespaceId, String columnName, ColumnTypeInformati addDefaultValue( namespaceId, defaultValue, addedColumnId ); for ( DataStore s : stores ) { - catalog.getAllocRel( namespaceId ).addColumnPlacement( + catalog.getAllocRel( namespaceId ).addColumnPlacement( catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ), s.getAdapterId(), addedColumnId, placementType, null, - null, - null ); + null, null, position ); } } diff --git a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java index 17e629e86f..9860b90452 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java @@ -59,7 +59,7 @@ public boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, @Override - public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { + public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { Catalog catalog = Catalog.getInstance(); Map> placementDistribution = new HashMap<>(); @@ -76,7 +76,7 @@ public Map> getRelevantPlacements( LogicalTab // Get first column placement which contains partition relevantCcps.add( ccps.get( 0 ) ); if ( log.isDebugEnabled() ) { - log.debug( "{} {} with part. {}", ccps.get( 0 ).adapterUniqueName, ccps.get( 0 ).getLogicalColumnName(), partitionId ); + log.debug( "{} with part. {}", ccps.get( 0 ).getLogicalColumnName(), partitionId ); } } } diff --git a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java index 67630ac829..01a1ab153f 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java @@ -45,7 +45,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue long selectedPartitionId = -1; // Process all accumulated CatalogPartitions - for ( CatalogPartition catalogPartition : Catalog.getInstance().getPartitionsByTable( catalogTable.id ) ) { + for ( CatalogPartition catalogPartition : Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionsByTable( catalogTable.id ) ) { if ( unboundPartitionId == -1 && catalogPartition.isUnbound ) { unboundPartitionId = catalogPartition.id; break; diff --git a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java index 89b5e96231..63b9937576 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java @@ -47,7 +47,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue long selectedPartitionId = -1; // Process all accumulated CatalogPartitions - for ( CatalogPartition catalogPartition : Catalog.getInstance().getPartitionsByTable( catalogTable.id ) ) { + for ( CatalogPartition catalogPartition : Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionsByTable( catalogTable.id ) ) { if ( unboundPartitionId == -1 && catalogPartition.isUnbound ) { unboundPartitionId = catalogPartition.id; break; diff --git a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java index b11de38836..0d51715c8b 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java @@ -50,7 +50,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue @Override - public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { + public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 9029c67274..cc9fcc0472 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -1328,7 +1328,7 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< "TableID: {} is partitioned on column: {} - {}", catalogTable.id, catalogTable.partitionProperty.partitionColumnId, - Catalog.getInstance().getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); + Catalog.getInstance().getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); } List identifiedPartitions = new ArrayList<>(); for ( String partitionValue : partitionValues ) { diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index f8b4346bc6..bf76dd2343 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -59,7 +59,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.ConstraintType; @@ -203,12 +202,12 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final List foreignKeys; final List exportedKeys; table = root.getEntity().unwrap( LogicalTable.class ); - primaryKey = catalog.getPrimaryKey( table.primaryKey ); - constraints = new ArrayList<>( Catalog.getInstance().getConstraints( table.id ) ); - foreignKeys = Catalog.getInstance().getForeignKeys( table.id ); - exportedKeys = Catalog.getInstance().getExportedKeys( table.id ); + primaryKey = catalog.getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); + constraints = new ArrayList<>( Catalog.getInstance().getLogicalRel( table.namespaceId ).getConstraints( table.id ) ); + foreignKeys = Catalog.getInstance().getLogicalRel( table.namespaceId ).getForeignKeys( table.id ); + exportedKeys = Catalog.getInstance().getLogicalRel( table.namespaceId ).getExportedKeys( table.id ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = Catalog.getInstance().getPrimaryKey( table.primaryKey ); + CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); @@ -481,14 +480,14 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme AlgNode input = root.getInput().accept( new DeepCopyShuttle() ); final List projects = new ArrayList<>( foreignKey.columnIds.size() ); final List foreignProjects = new ArrayList<>( foreignKey.columnIds.size() ); - final LogicalTable foreignTable = Catalog.getInstance().getTable( foreignKey.referencedKeyTableId ); + final LogicalTable foreignTable = Catalog.getInstance().getLogicalRel( table.namespaceId ).getTable( foreignKey.referencedKeyTableId ); builder.push( input ); for ( int i = 0; i < foreignKey.columnIds.size(); ++i ) { final String columnName = foreignKey.getColumnNames().get( i ); final String foreignColumnName = foreignKey.getReferencedKeyColumnNames().get( i ); final LogicalColumn foreignColumn; try { - foreignColumn = Catalog.getInstance().getColumn( foreignTable.id, foreignColumnName ); + foreignColumn = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( foreignTable.id, foreignColumnName ); } catch ( UnknownColumnException e ) { throw new RuntimeException( e ); } @@ -557,14 +556,14 @@ public RexNode visitFieldAccess( RexFieldAccess fieldAccess ) { } final List projects = new ArrayList<>( foreignKey.columnIds.size() ); final List foreignProjects = new ArrayList<>( foreignKey.columnIds.size() ); - final LogicalTable foreignTable = Catalog.getInstance().getTable( foreignKey.tableId ); + final LogicalTable foreignTable = Catalog.getInstance().getLogicalRel( table.namespaceId ).getTable( foreignKey.tableId ); for ( int i = 0; i < foreignKey.columnIds.size(); ++i ) { final String columnName = foreignKey.getReferencedKeyColumnNames().get( i ); final String foreignColumnName = foreignKey.getColumnNames().get( i ); final LogicalColumn column, foreignColumn; try { - column = Catalog.getInstance().getColumn( table.id, columnName ); - foreignColumn = Catalog.getInstance().getColumn( foreignTable.id, foreignColumnName ); + column = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( table.id, columnName ); + foreignColumn = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( foreignTable.id, foreignColumnName ); } catch ( UnknownColumnException e ) { throw new RuntimeException( e ); } @@ -654,8 +653,9 @@ private boolean testConstraintsValid() { try { List tables = Catalog .getInstance() - .getTables( null, null ) + .getNamespaces( null ) .stream() + .flatMap( n -> Catalog.getInstance().getLogicalRel( n.id ).getTables( null ).stream() ) .filter( t -> t.entityType == EntityType.ENTITY && t.getNamespaceType() == NamespaceType.RELATIONAL ) .collect( Collectors.toList() ); Transaction transaction = this.manager.startTransaction( Catalog.defaultUserId, false, "ConstraintEnforcement" ); @@ -683,7 +683,7 @@ private boolean testConstraintsValid() { } - } catch ( UnknownDatabaseException | UnknownSchemaException | UnknownUserException | TransactionException | GenericCatalogException e ) { + } catch ( UnknownSchemaException | UnknownUserException | TransactionException | GenericCatalogException e ) { return false; } } diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index f7103e818f..039a305182 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -65,7 +65,6 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.schema.graph.PolyGraph; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.transaction.Statement; @@ -79,7 +78,7 @@ public class DataMigratorImpl implements DataMigrator { @Override - public void copyGraphData( LogicalGraph target, Transaction transaction, Integer existingAdapterId, CatalogAdapter to ) { + public void copyGraphData( LogicalGraph target, Transaction transaction, Long existingAdapterId, CatalogAdapter to ) { Statement statement = transaction.createStatement(); AlgBuilder builder = AlgBuilder.create( statement ); @@ -164,20 +163,20 @@ private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGra @Override public void copyData( Transaction transaction, CatalogAdapter store, List columns, List partitionIds ) { - LogicalTable table = Catalog.getInstance().getTable( columns.get( 0 ).tableId ); - CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( table.primaryKey ); + LogicalTable table = Catalog.getInstance().getLogicalRel( columns.get( 0 ).namespaceId ).getTable( columns.get( 0 ).tableId ); + CatalogPrimaryKey primaryKey = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( Catalog.getInstance().getAllocRel( logicalColumn.namespaceId ).getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( cid ); + LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } @@ -203,7 +202,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List selectColumnList, AlgRoot sourceAl @Override public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { - List qualifiedTableName = ImmutableList.of( - PolySchemaBuilder.buildAdapterSchemaName( - to.get( 0 ).adapterUniqueName, - to.get( 0 ).getLogicalSchemaName(), - to.get( 0 ).physicalSchemaName ), - to.get( 0 ).getLogicalTableName() + "_" + partitionId ); PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -332,7 +325,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : to ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( ccp.namespaceId ).getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } @@ -356,12 +349,6 @@ public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { - List qualifiedTableName = ImmutableList.of( - PolySchemaBuilder.buildAdapterSchemaName( - to.get( 0 ).adapterUniqueName, - to.get( 0 ).getLogicalSchemaName(), - to.get( 0 ).physicalSchemaName ), - to.get( 0 ).getLogicalTableName() + "_" + partitionId ); PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -377,7 +364,7 @@ public AlgRoot buildInsertStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( ccp.namespaceId ).getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } @@ -399,12 +386,6 @@ public AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ) { - List qualifiedTableName = ImmutableList.of( - PolySchemaBuilder.buildAdapterSchemaName( - to.get( 0 ).adapterUniqueName, - to.get( 0 ).getLogicalSchemaName(), - to.get( 0 ).physicalSchemaName ), - to.get( 0 ).getLogicalTableName() + "_" + partitionId ); PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -414,15 +395,15 @@ private AlgRoot buildUpdateStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : to ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( ccp.namespaceId ).getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } @@ -477,12 +458,12 @@ public AlgRoot getSourceIterator( Statement statement, Map selectSourcePlacements( LogicalTable table, List columns, int excludingAdapterId ) { + public static List selectSourcePlacements( LogicalTable table, List columns, long excludingAdapterId ) { // Find the adapter with the most column placements Catalog catalog = Catalog.getInstance(); - int adapterIdWithMostPlacements = -1; + long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : catalog.getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getKey() != excludingAdapterId && entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -496,12 +477,12 @@ public static List selectSourcePlacements( LogicalTable // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); - for ( long cid : table.fieldIds ) { - if ( columnIds.contains( cid ) ) { - if ( catalog.getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( cid ) ) { - placementList.add( catalog.getColumnPlacement( adapterIdWithMostPlacements, cid ) ); + for ( LogicalColumn column : table.columns ) { + if ( columnIds.contains( column.id ) ) { + if ( catalog.getAllocRel( column.namespaceId ).getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { + placementList.add( catalog.getAllocRel( column.namespaceId ).getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); } else { - for ( CatalogColumnPlacement placement : catalog.getColumnPlacement( cid ) ) { + for ( CatalogColumnPlacement placement : catalog.getAllocRel( column.namespaceId ).getColumnPlacements( column.id ) ) { if ( placement.adapterId != excludingAdapterId ) { placementList.add( placement ); break; @@ -529,19 +510,19 @@ public static List selectSourcePlacements( LogicalTable */ @Override public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { - CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getPrimaryKey( sourceTable.primaryKey ); + CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( Catalog.getInstance().getAllocRel( sourceTable.namespaceId ).getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : sourcePrimaryKey.columnIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( cid ); + LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } @@ -552,7 +533,7 @@ public void copySelectiveData( Transaction transaction, CatalogAdapter store, Lo AlgRoot sourceAlg = getSourceIterator( sourceStatement, placementDistribution ); AlgRoot targetAlg; - if ( Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( store.id, targetTable.id ).size() == columns.size() ) { + if ( Catalog.getInstance().getAllocRel( targetTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.id, targetTable.id ).size() == columns.size() ) { // There have been no placements for this table on this store before. Build insert statement targetAlg = buildInsertStatement( targetStatement, targetColumnPlacements, targetPartitionIds.get( 0 ) ); } else { @@ -631,19 +612,19 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo throw new RuntimeException( "Unsupported migration scenario. Table ID mismatch" ); } - CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( sourceTable.primaryKey ); + CatalogPrimaryKey primaryKey = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( Catalog.getInstance().getAllocRel( sourceTable.namespaceId ).getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( cid ); + LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } @@ -651,7 +632,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo // Add partition columns to select column list long partitionColumnId = targetTable.partitionProperty.partitionColumnId; - LogicalColumn partitionColumn = Catalog.getInstance().getColumn( partitionColumnId ); + LogicalColumn partitionColumn = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getColumn( partitionColumnId ); if ( !selectColumnList.contains( partitionColumn ) ) { selectColumnList.add( partitionColumn ); } @@ -675,7 +656,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo Map targetAlgs = new HashMap<>(); AlgRoot sourceAlg = getSourceIterator( sourceStatement, placementDistribution ); - if ( Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( store.id, sourceTable.id ).size() == columns.size() ) { + if ( Catalog.getInstance().getAllocRel( sourceTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.id, sourceTable.id ).size() == columns.size() ) { // There have been no placements for this table on this store before. Build insert statement targetPartitionIds.forEach( id -> targetAlgs.put( id, buildInsertStatement( targetStatements.get( id ), targetColumnPlacements, id ) ) ); } else { diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index 60229a88a7..d085482b0e 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -91,13 +91,14 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P ImmutableList.of( "Entity", "Field", "Partition (Group --> ID)", "Adapter", "Physical Name" ) ); if ( proposedRoutingPlan.getPhysicalPlacementsOfPartitions() != null ) { proposedRoutingPlan.getPhysicalPlacementsOfPartitions().forEach( ( k, v ) -> { - CatalogPartition catalogPartition = Catalog.getInstance().getPartition( k ); - CatalogPartitionGroup catalogPartitionGroup = Catalog.getInstance().getPartitionGroup( catalogPartition.partitionGroupId ); - LogicalTable catalogTable = Catalog.getInstance().getTable( catalogPartition.tableId ); + CatalogPartition catalogPartition = Catalog.getInstance().getAllocRel( k ).getPartition( k ); + LogicalTable catalogTable = Catalog.getInstance().getLogicalEntity( catalogPartition.tableId ).unwrap( LogicalTable.class ); + CatalogPartitionGroup catalogPartitionGroup = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionGroup( catalogPartition.partitionGroupId ); + v.forEach( p -> { - CatalogColumnPlacement catalogColumnPlacement = Catalog.getInstance().getColumnPlacement( p.left, p.right ); - CatalogPartitionPlacement catalogPartitionPlacement = Catalog.getInstance().getPartitionPlacement( p.left, k ); - LogicalColumn logicalColumn = Catalog.getInstance().getColumn( catalogColumnPlacement.columnId ); + CatalogColumnPlacement catalogColumnPlacement = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getColumnPlacement( p.left, p.right ); + CatalogPartitionPlacement catalogPartitionPlacement = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( p.left, k ); + LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( catalogTable.namespaceId ).getColumn( catalogColumnPlacement.columnId ); table.addRow( catalogTable.getNamespaceName() + "." + catalogTable.name, logicalColumn.name, diff --git a/dbms/src/main/java/org/polypheny/db/routing/dto/CachedProposedRoutingPlan.java b/dbms/src/main/java/org/polypheny/db/routing/dto/CachedProposedRoutingPlan.java index af53dd9608..ce36d634f7 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/dto/CachedProposedRoutingPlan.java +++ b/dbms/src/main/java/org/polypheny/db/routing/dto/CachedProposedRoutingPlan.java @@ -35,7 +35,8 @@ @Setter public class CachedProposedRoutingPlan implements RoutingPlan { - public Map>> physicalPlacementsOfPartitions; // PartitionId -> List + @Getter + public Map>> physicalPlacementsOfPartitions; // PartitionId -> List protected String queryClass; protected String physicalQueryClass; protected AlgOptCost preCosts; @@ -51,9 +52,4 @@ public CachedProposedRoutingPlan( ProposedRoutingPlan routingPlan, AlgOptCost ap } - @Override - public Map>> getPhysicalPlacementsOfPartitions() { - return this.physicalPlacementsOfPartitions; - } - } diff --git a/dbms/src/main/java/org/polypheny/db/routing/dto/ProposedRoutingPlanImpl.java b/dbms/src/main/java/org/polypheny/db/routing/dto/ProposedRoutingPlanImpl.java index dfe1031e8e..577b8ba137 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/dto/ProposedRoutingPlanImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/dto/ProposedRoutingPlanImpl.java @@ -17,6 +17,7 @@ package org.polypheny.db.routing.dto; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Map; import lombok.Getter; @@ -42,7 +43,7 @@ public class ProposedRoutingPlanImpl implements ProposedRoutingPlan { protected String queryClass; protected String physicalQueryClass; protected Class router; - protected Map>> physicalPlacementsOfPartitions; // PartitionId -> List + protected Map>> physicalPlacementsOfPartitions; // PartitionId -> List protected AlgOptCost preCosts; @@ -113,14 +114,14 @@ public boolean equals( Object obj ) { return true; } - for ( Map.Entry>> entry : this.physicalPlacementsOfPartitions.entrySet() ) { + for ( Map.Entry>> entry : this.physicalPlacementsOfPartitions.entrySet() ) { final Long id = entry.getKey(); - List> values = entry.getValue(); + List> values = entry.getValue(); if ( !other.physicalPlacementsOfPartitions.containsKey( id ) ) { return false; } else { - if ( !values.containsAll( other.physicalPlacementsOfPartitions.get( id ) ) ) { + if ( !new HashSet<>( values ).containsAll( other.physicalPlacementsOfPartitions.get( id ) ) ) { return false; } } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 1cbaed275f..3119b5da4a 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -249,7 +249,7 @@ private List handleRelationalOnGraphScan( AlgNode node, Statem AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); - algBuilder.lpgScan( catalog.getNamespaces( Catalog.defaultDatabaseId, new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); + algBuilder.lpgScan( catalog.getNamespaces( new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); algBuilder.lpgMatch( List.of( algBuilder.lpgNodeMatch( List.of( logicalTable.getLogicalTableName() ) ) ), List.of( "n" ) ); algBuilder.lpgProject( List.of( rexBuilder.makeLpgGetId(), rexBuilder.makeLpgPropertiesExtract(), rexBuilder.makeLpgLabels() ), diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 924727ace9..923111a4d0 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -110,9 +110,9 @@ public abstract class BaseRouter implements Router { */ protected static Map> selectPlacement( LogicalTable table ) { // Find the adapter with the most column placements - int adapterIdWithMostPlacements = -1; + long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -123,7 +123,7 @@ protected static Map> selectPlacement( Logica List placementList = new LinkedList<>(); for ( LogicalColumn column : table.columns ) { if ( catalog.getAllocRel( table.namespaceId ).getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacements( column.id ) ); + placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacements( column.id ).get( 0 ) ); } else { placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacements( column.id ).get( 0 ) ); } @@ -262,7 +262,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< long partitionId = partitionToPlacement.getKey(); List currentPlacements = partitionToPlacement.getValue(); // Sort by adapter - Map> placementsByAdapter = new HashMap<>(); + Map> placementsByAdapter = new HashMap<>(); for ( CatalogColumnPlacement placement : currentPlacements ) { if ( !placementsByAdapter.containsKey( placement.adapterId ) ) { placementsByAdapter.put( placement.adapterId, new LinkedList<>() ); @@ -294,9 +294,9 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< } // Add primary key - for ( Entry> entry : placementsByAdapter.entrySet() ) { + for ( Entry> entry : placementsByAdapter.entrySet() ) { for ( LogicalColumn pkColumn : pkColumns ) { - CatalogColumnPlacement pkPlacement = catalog.getAllocRel( currentPlacements.get( 0 ).namespaceId ).getColumnPlacements( pkColumn.id ); + CatalogColumnPlacement pkPlacement = catalog.getAllocRel( currentPlacements.get( 0 ).namespaceId ).getColumnPlacements( pkColumn.id ).get( 0 ); if ( !entry.getValue().contains( pkPlacement ) ) { entry.getValue().add( pkPlacement ); } @@ -320,7 +320,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< ArrayList rexNodes = new ArrayList<>(); for ( CatalogColumnPlacement p : ccps ) { if ( pkColumnIds.contains( p.columnId ) ) { - String alias = ccps.get( 0 ).adapterUniqueName + "_" + p.getLogicalColumnName(); + String alias = ccps.get( 0 ).adapterId + "_" + p.getLogicalColumnName(); rexNodes.add( builder.alias( builder.field( p.getLogicalColumnName() ), alias ) ); queue.addFirst( alias ); queue.addFirst( p.getLogicalColumnName() ); @@ -386,7 +386,7 @@ private void buildFinalProject( RoutedAlgBuilder builder, List scans = new ArrayList<>(); - List placements = catalogGraph.placements; + List placements = catalogGraph.placements; if ( placementId != null ) { placements = List.of( placementId ); } @@ -428,7 +428,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab } - private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Integer placementId ) { + private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); List tables = catalog.getLogicalRel( namespace.id ).getTables( null ); List> scans = tables.stream() @@ -442,7 +442,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace na } - private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Integer placementId ) { + private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); List collections = catalog.getLogicalDoc( namespace.id ).getCollections( null ); List> scans = collections.stream() @@ -479,7 +479,7 @@ public AlgNode getRelationalScan( LogicalLpgScan alg, long adapterId, Statement } - protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, long columnId, int adapterId ) { + protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, long columnId, long adapterId ) { /*LogicalTable nodes = Catalog.getInstance().getTable( tableId ); CatalogColumnPlacement placement = Catalog.getInstance().getColumnPlacements( adapterId, columnId ); List qualifiedTableName = ImmutableList.of( @@ -512,7 +512,7 @@ protected AlgNode buildSubstitutionJoin( AlgNode alg, CatalogEntity nodesTable, } - protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder, Integer adapterId ) { + protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement statement, RoutedAlgBuilder builder, Long adapterId ) { Snapshot snapshot = statement.getTransaction().getSnapshot(); if ( alg.entity.namespaceType != NamespaceType.DOCUMENT ) { @@ -527,12 +527,12 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st List scans = new ArrayList<>(); - List placements = collection.placements; + List placements = collection.placements; if ( adapterId != null ) { placements = List.of( adapterId ); } - for ( Integer placementId : placements ) { + for ( Long placementId : placements ) { CatalogAdapter adapter = catalog.getAdapter( placementId ); NamespaceType sourceModel = collection.namespaceType; @@ -573,7 +573,7 @@ private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statemen @NotNull - private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Integer adapterId, Statement statement, RoutedAlgBuilder builder ) { + private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Long adapterId, Statement statement, RoutedAlgBuilder builder ) { List columns = node.entity.columns; AlgTraitSet out = node.getTraitSet().replace( ModelTrait.RELATIONAL ); CatalogEntity subTable = getSubstitutionTable( statement, node.entity.id, columns.get( 0 ).id, adapterId ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index b9653e87fe..457692a07e 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -68,7 +68,7 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build for ( long partition : partitionIds ) { if ( cachedPlan.physicalPlacementsOfPartitions.get( partition ) != null ) { List colPlacements = cachedPlan.physicalPlacementsOfPartitions.get( partition ).stream() - .map( placementInfo -> catalog.getColumnPlacement( placementInfo.left, placementInfo.right ) ) + .map( placementInfo -> catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( placementInfo.left, placementInfo.right ) ) .collect( Collectors.toList() ); placement.put( partition, colPlacements ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 21c43984e5..c38849801c 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -156,7 +156,7 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { for ( CatalogColumnPlacement dataPlacement : pkPlacements ) { log.debug( "\t\t -> '{}' {}\t{}", - dataPlacement.adapterUniqueName, + dataPlacement.adapterId, catalog.getAllocRel( modify.entity.namespaceId ).getPartitionGroupsOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ), catalog.getAllocRel( modify.entity.namespaceId ).getPartitionGroupsIndexOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ) ); } @@ -684,19 +684,19 @@ public AlgNode handleBatchIterator( AlgNode alg, Statement statement, LogicalQue @Override - public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Integer adapterId ) { + public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalQueryInformation queryInformation, Long adapterId ) { Snapshot snapshot = statement.getTransaction().getSnapshot(); LogicalCollection collection = alg.entity.unwrap( LogicalCollection.class ); List modifies = new ArrayList<>(); - List placements = collection.placements; + List placements = collection.placements; if ( adapterId != null ) { placements = List.of( adapterId ); } - for ( int placementId : placements ) { + for ( long placementId : placements ) { CatalogAdapter adapter = Catalog.getInstance().getAdapter( placementId ); CatalogCollectionPlacement placement = Catalog.getInstance().getAllocDoc( alg.entity.namespaceId ).getCollectionPlacement( collection.id, placementId ); @@ -733,14 +733,14 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement ) { @Override - public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ) { + public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, LogicalGraph catalogGraph, List placements ) { Snapshot snapshot = statement.getTransaction().getSnapshot(); List modifies = new ArrayList<>(); boolean usedSubstitution = false; - for ( int adapterId : placements ) { + for ( long adapterId : placements ) { CatalogAdapter adapter = Catalog.getInstance().getAdapter( adapterId ); CatalogGraphPlacement graphPlacement = Catalog.getInstance().getAllocGraph( alg.entity.namespaceId ).getGraphPlacement( catalogGraph.id, adapterId ); @@ -800,7 +800,7 @@ private AlgNode buildDocumentDml( AlgNode node, Statement statement, LogicalQuer } - private AlgNode buildGraphDml( AlgNode node, Statement statement, int adapterId ) { + private AlgNode buildGraphDml( AlgNode node, Statement statement, long adapterId ) { if ( node instanceof LpgScan ) { return super.handleGraphScan( (LogicalLpgScan) node, statement, adapterId ); } @@ -814,7 +814,7 @@ private AlgNode buildGraphDml( AlgNode node, Statement statement, int adapterId } - private AlgNode attachRelationalModify( LogicalDocumentModify alg, Statement statement, int adapterId, LogicalQueryInformation queryInformation ) { + private AlgNode attachRelationalModify( LogicalDocumentModify alg, Statement statement, long adapterId, LogicalQueryInformation queryInformation ) { switch ( alg.operation ) { case INSERT: @@ -831,7 +831,7 @@ private AlgNode attachRelationalModify( LogicalDocumentModify alg, Statement sta } - private List attachRelationalDoc( LogicalDocumentModify alg, Statement statement, CatalogEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { + private List attachRelationalDoc( LogicalDocumentModify alg, Statement statement, CatalogEntity collectionTable, LogicalQueryInformation queryInformation, long adapterId ) { RoutedAlgBuilder builder = attachDocUpdate( alg.getInput(), statement, collectionTable, RoutedAlgBuilder.create( statement, alg.getCluster() ), queryInformation, adapterId ); RexBuilder rexBuilder = alg.getCluster().getRexBuilder(); AlgBuilder algBuilder = AlgBuilder.create( statement ); @@ -875,7 +875,7 @@ private AlgNode createDocumentTransform( AlgNode query, RexBuilder rexBuilder ) } - private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, CatalogEntity collectionTable, RoutedAlgBuilder builder, LogicalQueryInformation information, int adapterId ) { + private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, CatalogEntity collectionTable, RoutedAlgBuilder builder, LogicalQueryInformation information, long adapterId ) { switch ( ((DocumentAlg) alg).getDocType() ) { case SCAN: @@ -912,7 +912,7 @@ private RoutedAlgBuilder attachDocUpdate( AlgNode alg, Statement statement, Cata } - private List attachRelationalDocInsert( LogicalDocumentModify alg, Statement statement, CatalogEntity collectionTable, LogicalQueryInformation queryInformation, int adapterId ) { + private List attachRelationalDocInsert( LogicalDocumentModify alg, Statement statement, CatalogEntity collectionTable, LogicalQueryInformation queryInformation, long adapterId ) { if ( alg.getInput() instanceof DocumentValues ) { // simple value insert AlgNode values = ((LogicalDocumentValues) alg.getInput()).getRelationalEquivalent( List.of(), List.of( collectionTable ), statement.getTransaction().getSnapshot() ).get( 0 ); @@ -923,7 +923,7 @@ private List attachRelationalDocInsert( LogicalDocumentModify alg, Stat } - private AlgNode attachRelationalModify( LogicalLpgModify alg, int adapterId, Statement statement ) { + private AlgNode attachRelationalModify( LogicalLpgModify alg, long adapterId, Statement statement ) { /*CatalogGraphMapping mapping = Catalog.getInstance().getGraphMapping( alg.entity.id ); PhysicalTable nodesTable = getSubstitutionTable( statement, mapping.nodesId, mapping.idNodeId, adapterId ).unwrap( PhysicalTable.class ); @@ -1191,7 +1191,7 @@ private AlgBuilder buildDml( if ( log.isDebugEnabled() ) { log.debug( "List of Store specific ColumnPlacements: " ); for ( CatalogColumnPlacement ccp : placements ) { - log.debug( "{}.{}", ccp.adapterUniqueName, ccp.getLogicalColumnName() ); + log.debug( "{}.{}", ccp.adapterId, ccp.getLogicalColumnName() ); } } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index e05de2b83e..8abd3617cc 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -132,7 +132,7 @@ protected Collection>> selectPlacementHor // Utilize scanId to retrieve Partitions being accessed List partitionIds = queryInformation.getAccessedPartitions().get( node.getId() ); - Map>> allPlacements = partitionManager.getAllPlacements( catalogTable, partitionIds ); + Map>> allPlacements = partitionManager.getAllPlacements( catalogTable, partitionIds ); return allPlacements.values(); } @@ -143,16 +143,16 @@ protected Set> selectPlacement( LogicalTable catalo List usedColumns = queryInformation.getAllColumnsPerTable( catalogTable.id ); // Filter for placements by adapters - List adapters = catalog.getColumnPlacementsByAdapter( catalogTable.id ).entrySet() + List adapters = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByAdapter( catalogTable.id ).entrySet() .stream() .filter( elem -> elem.getValue().containsAll( usedColumns ) ) .map( Entry::getKey ) .collect( Collectors.toList() ); final Set> result = new HashSet<>(); - for ( int adapterId : adapters ) { + for ( long adapterId : adapters ) { List placements = usedColumns.stream() - .map( colId -> catalog.getColumnPlacement( adapterId, colId ) ) + .map( colId -> catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( adapterId, colId ) ) .collect( Collectors.toList() ); if ( !placements.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index 7874f11f0c..281141d3d0 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -91,17 +91,17 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa currentPlacementDistribution.put( catalogTable.partitionProperty.partitionIds.get( 0 ), currentPlacement ); // AdapterId for all col placements same - final int adapterId = currentPlacement.get( 0 ).adapterId; + final long adapterId = currentPlacement.get( 0 ).adapterId; // Find corresponding builder: final RoutedAlgBuilder builder = builders.stream().filter( b -> { - final List> listPairs = b.getPhysicalPlacementsOfPartitions().values().stream() - .flatMap( Collection::stream ) - .collect( Collectors.toList() ); - final Optional found = listPairs.stream() - .map( elem -> elem.left ) - .filter( elem -> elem == adapterId ) - .findFirst(); + final List> listPairs = b.getPhysicalPlacementsOfPartitions().values().stream() + .flatMap( Collection::stream ) + .collect( Collectors.toList() ); + final Optional found = listPairs.stream() + .map( elem -> elem.left ) + .filter( elem -> elem == adapterId ) + .findFirst(); return found.isPresent(); } ).findAny().orElse( null ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java index 74a6a94f60..5036dc2be4 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java @@ -32,7 +32,7 @@ public class CreateAllPlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { - LogicalTable catalogTable = Catalog.getInstance().getTable( addedColumn.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getLogicalRel( addedColumn.namespaceId ).getTable( addedColumn.tableId ); return catalogTable.dataPlacements.stream() .map( elem -> AdapterManager.getInstance().getStore( elem ) ) .collect( Collectors.toList() ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index 25741413c9..828b35519f 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -30,7 +30,7 @@ public class CreateSinglePlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { - LogicalTable catalogTable = Catalog.getInstance().getTable( addedColumn.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getLogicalRel( addedColumn.namespaceId ).getTable( addedColumn.tableId ); return ImmutableList.of( AdapterManager.getInstance().getStore( catalogTable.dataPlacements.get( 0 ) ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 60a8c67461..112952c426 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -324,7 +324,7 @@ private void extractWriteConstraints( LogicalEntity logicalTable ) { for ( long constraintTable : logicalTable.getConstraintIds() ) { for ( long constraintPartitionIds - : Catalog.getInstance().getTable( constraintTable ).partitionProperty.partitionIds ) { + : Catalog.getInstance().getLogicalRel( logicalTable.namespaceId ).getTable( constraintTable ).partitionProperty.partitionIds ) { EntityIdentifier id = new EntityIdentifier( constraintTable, constraintPartitionIds, NamespaceLevel.ENTITY_LEVEL ); if ( !accessMap.containsKey( id ) ) { diff --git a/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java index f3e9415b7a..5b43839dcd 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java @@ -104,7 +104,6 @@ public ContextImpl getPrepareContext() { transaction.getSnapshot(), getDataContext(), transaction.getDefaultSchema().name, - transaction.getDatabase().id, transaction.getUser().id, this ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java index 015e87e039..a64c74a98b 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java @@ -77,8 +77,6 @@ public class TransactionImpl implements Transaction, Comparable { private final CatalogUser user; @Getter private final LogicalNamespace defaultSchema; - @Getter - private final CatalogDatabase database; private final TransactionManagerImpl transactionManager; @@ -118,7 +116,6 @@ public class TransactionImpl implements Transaction, Comparable { TransactionManagerImpl transactionManager, CatalogUser user, LogicalNamespace defaultSchema, - CatalogDatabase database, boolean analyze, String origin, MultimediaFlavor flavor ) { @@ -127,7 +124,6 @@ public class TransactionImpl implements Transaction, Comparable { this.transactionManager = transactionManager; this.user = user; this.defaultSchema = defaultSchema; - this.database = database; this.analyze = analyze; this.origin = origin; this.flavor = flavor; diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java index e66497f938..7eb0cb45cf 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java @@ -22,12 +22,9 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.information.InformationGroup; @@ -84,35 +81,36 @@ public static TransactionManager getInstance() { @Override - public Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin, MultimediaFlavor flavor ) { + public Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, boolean analyze, String origin, MultimediaFlavor flavor ) { final NodeId nodeId = (NodeId) PUID.randomPUID( Type.NODE ); // TODO: get real node id -- configuration.get("nodeid") final UserId userId = (UserId) PUID.randomPUID( Type.USER ); // TODO: use real user id final ConnectionId connectionId = (ConnectionId) PUID.randomPUID( Type.CONNECTION ); // TODO PolyXid xid = generateNewTransactionId( nodeId, userId, connectionId ); - transactions.put( xid, new TransactionImpl( xid, this, user, defaultSchema, database, analyze, origin, flavor ) ); + transactions.put( xid, new TransactionImpl( xid, this, user, defaultSchema, analyze, origin, flavor ) ); return transactions.get( xid ); } @Override - public Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, CatalogDatabase database, boolean analyze, String origin ) { - return startTransaction( user, defaultSchema, database, analyze, origin, MultimediaFlavor.DEFAULT ); + public Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, boolean analyze, String origin ) { + return startTransaction( user, defaultSchema, analyze, origin, MultimediaFlavor.DEFAULT ); } @Override public Transaction startTransaction( long userId, boolean analyze, String origin, MultimediaFlavor flavor ) throws UnknownUserException, UnknownSchemaException { - Catalog catalog = Catalog.getInstance(); + /*Catalog catalog = Catalog.getInstance(); CatalogUser catalogUser = catalog.getUser( (int) userId ); - CatalogDatabase catalogDatabase = catalog.getDatabase( databaseId ); - LogicalNamespace logicalNamespace = catalog.getSchema( catalogDatabase.id, catalogDatabase.defaultNamespaceName ); - return startTransaction( catalogUser, logicalNamespace, catalogDatabase, analyze, origin, flavor ); + LogicalNamespace logicalNamespace = catalog.getNamespace( catalogDatabase.defaultNamespaceName ); + return startTransaction( catalogUser, logicalNamespace, catalogDatabase, analyze, origin, flavor );*/ + throw new RuntimeException(); } @Override - public Transaction startTransaction( long userId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownDatabaseException, UnknownSchemaException { - return startTransaction( userId, databaseId, analyze, origin, MultimediaFlavor.DEFAULT ); + public Transaction startTransaction( long userId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownSchemaException { + throw new RuntimeException(); + // return startTransaction( userId, databaseId, analyze, origin, MultimediaFlavor.DEFAULT ); } diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index b8d4e32118..8021d1fe2e 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -43,12 +43,12 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogMaterializedView; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; @@ -109,7 +109,7 @@ public MaterializedViewManagerImpl( TransactionManager transactionManager ) { public synchronized Map updateMaterializedViewInfo() { List toRemove = new ArrayList<>(); for ( Long id : materializedInfo.keySet() ) { - if ( Catalog.getInstance().getTable( id ) == null ) { + if ( Catalog.getInstance().getLogicalEntity( id ) == null ) { toRemove.add( id ); } } @@ -138,7 +138,7 @@ public synchronized void deleteMaterializedViewFromInfo( Long materializedId ) { public synchronized void updateMaterializedTime( Long materializedId ) { if ( materializedInfo.containsKey( materializedId ) ) { materializedInfo.get( materializedId ).setLastUpdate( new Timestamp( System.currentTimeMillis() ) ); - Catalog.getInstance().updateMaterializedViewRefreshTime( materializedId ); + Catalog.getInstance().getLogicalRel( 0 ).updateMaterializedViewRefreshTime( materializedId ); } } @@ -178,15 +178,14 @@ public synchronized void addMaterializedInfo( Long materializedId, MaterializedC public void addTables( Transaction transaction, List tableNames ) { if ( tableNames.size() > 1 ) { try { - LogicalTable catalogTable = Catalog.getInstance().getTable( tableNames.get( 0 ), tableNames.get( 1 ) ); + LogicalNamespace namespace = Catalog.getInstance().getNamespace( tableNames.get( 0 ) ); + LogicalTable catalogTable = Catalog.getInstance().getLogicalRel( namespace.id ).getTable( tableNames.get( 1 ) ); long id = catalogTable.id; if ( !catalogTable.getConnectedViews().isEmpty() ) { updateCandidates.put( transaction.getXid(), id ); } } catch ( UnknownTableException e ) { throw new RuntimeException( "Not possible to getLogicalTable to update which Tables were changed.", e ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } } } @@ -213,11 +212,11 @@ public void updateCommittedXid( PolyXid xid ) { */ public void materializedUpdate( Long potentialInteresting ) { Catalog catalog = Catalog.getInstance(); - LogicalTable catalogTable = catalog.getTable( potentialInteresting ); + LogicalTable catalogTable = catalog.getLogicalEntity( potentialInteresting ).unwrap( LogicalTable.class ); List connectedViews = catalogTable.getConnectedViews(); - for ( Long id : connectedViews ) { - LogicalTable view = catalog.getTable( id ); + for ( long id : connectedViews ) { + LogicalTable view = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( id ); if ( view.entityType == EntityType.MATERIALIZED_VIEW ) { MaterializedCriteria materializedCriteria = materializedInfo.get( view.id ); if ( materializedCriteria.getCriteriaType() == CriteriaType.UPDATE ) { @@ -274,11 +273,11 @@ private void updatingIntervalMaterialized() { */ public void prepareToUpdate( Long materializedId ) { Catalog catalog = Catalog.getInstance(); - LogicalTable catalogTable = catalog.getTable( materializedId ); + LogicalTable catalogTable = catalog.getLogicalEntity( materializedId ).unwrap( LogicalTable.class ); try { Transaction transaction = getTransactionManager().startTransaction( - catalogTable.ownerId, + Catalog.defaultUserId, false, "Materialized View" ); @@ -296,7 +295,7 @@ public void prepareToUpdate( Long materializedId ) { } updateData( transaction, materializedId ); commitTransaction( transaction ); - } catch ( GenericCatalogException | UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { + } catch ( GenericCatalogException | UnknownUserException | UnknownSchemaException e ) { throw new RuntimeException( "Not possible to create Transaction for Materialized View update", e ); } updateMaterializedTime( materializedId ); @@ -307,21 +306,21 @@ public void prepareToUpdate( Long materializedId ) { * Is used if a materialized view is created in order to add the data from the underlying tables to the materialized view */ @Override - public void addData( Transaction transaction, List stores, Map> columns, AlgRoot algRoot, CatalogMaterializedView materializedView ) { + public void addData( Transaction transaction, List stores, Map> columns, AlgRoot algRoot, CatalogMaterializedView materializedView ) { addMaterializedInfo( materializedView.id, materializedView.getMaterializedCriteria() ); List columnPlacements = new LinkedList<>(); DataMigrator dataMigrator = transaction.getDataMigrator(); - for ( int id : materializedView.dataPlacements ) { + for ( long id : materializedView.dataPlacements ) { Statement sourceStatement = transaction.createStatement(); prepareSourceRel( sourceStatement, materializedView.getAlgCollation(), algRoot.alg ); Statement targetStatement = transaction.createStatement(); columnPlacements.clear(); - columns.get( id ).forEach( column -> columnPlacements.add( Catalog.getInstance().getColumnPlacement( id, column.id ) ) ); + columns.get( id ).forEach( column -> columnPlacements.add( Catalog.getInstance().getAllocRel( materializedView.namespaceId ).getColumnPlacement( id, column.id ) ) ); // If partitions should be allowed for materialized views this needs to be changed that all partitions are considered - AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, Catalog.getInstance().getPartitionsOnDataPlacement( id, materializedView.id ).get( 0 ) ); + AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, Catalog.getInstance().getAllocRel( materializedView.namespaceId ).getPartitionsOnDataPlacement( id, materializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( id ), algRoot, sourceStatement, targetStatement, targetRel, true, materializedView.isOrdered() ); } @@ -341,26 +340,26 @@ public void updateData( Transaction transaction, Long materializedId ) { DataMigrator dataMigrator = transaction.getDataMigrator(); List columnPlacements = new LinkedList<>(); - Map> columns = new HashMap<>(); + Map> columns = new HashMap<>(); - List ids = new ArrayList<>(); - if ( catalog.checkIfExistsEntity( materializedId ) && materializedInfo.containsKey( materializedId ) ) { - CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalog.getTable( materializedId ); - for ( int id : catalogMaterializedView.dataPlacements ) { + List ids = new ArrayList<>(); + if ( catalog.getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { + CatalogMaterializedView catalogMaterializedView = catalog.getLogicalEntity( materializedId ).unwrap( CatalogMaterializedView.class ); + for ( long id : catalogMaterializedView.dataPlacements ) { ids.add( id ); List logicalColumns = new ArrayList<>(); int localAdapterIndex = catalogMaterializedView.dataPlacements.indexOf( id ); - catalog.getDataPlacement( catalogMaterializedView.dataPlacements.get( localAdapterIndex ), catalogMaterializedView.id ) + catalog.getAllocRel( catalogMaterializedView.namespaceId ).getDataPlacement( catalogMaterializedView.dataPlacements.get( localAdapterIndex ), catalogMaterializedView.id ) .columnPlacementsOnAdapter.forEach( col -> - logicalColumns.add( catalog.getColumn( col ) ) + logicalColumns.add( catalog.getLogicalRel( catalogMaterializedView.namespaceId ).getColumn( col ) ) ); columns.put( id, logicalColumns ); } AlgRoot targetRel; - for ( int id : ids ) { + for ( long id : ids ) { Statement sourceStatement = transaction.createStatement(); Statement deleteStatement = transaction.createStatement(); Statement insertStatement = transaction.createStatement(); @@ -368,7 +367,7 @@ public void updateData( Transaction transaction, Long materializedId ) { columnPlacements.clear(); - columns.get( id ).forEach( column -> columnPlacements.add( Catalog.getInstance().getColumnPlacement( id, column.id ) ) ); + columns.get( id ).forEach( column -> columnPlacements.add( Catalog.getInstance().getAllocRel( column.namespaceId ).getColumnPlacement( id, column.id ) ) ); // Build {@link AlgNode} to build delete Statement from materialized view AlgBuilder deleteAlgBuilder = AlgBuilder.create( deleteStatement ); @@ -383,7 +382,7 @@ public void updateData( Transaction transaction, Long materializedId ) { targetRel = dataMigrator.buildDeleteStatement( targetStatementDelete, columnPlacements, - Catalog.getInstance().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); + Catalog.getInstance().getAllocRel( catalogMaterializedView.namespaceId ).getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( id ), AlgRoot.of( deleteRel, Kind.SELECT ), @@ -399,7 +398,7 @@ public void updateData( Transaction transaction, Long materializedId ) { targetRel = dataMigrator.buildInsertStatement( targetStatementInsert, columnPlacements, - Catalog.getInstance().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); + Catalog.getInstance().getAllocRel( catalogMaterializedView.namespaceId ).getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( id ), AlgRoot.of( insertRel, Kind.SELECT ), diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java index 9e49ed2129..77bb2af312 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java @@ -37,7 +37,7 @@ public class AlphabeticStatisticColumn> extends Statisti public AlphabeticStatisticColumn( QueryResult column ) { - super( column.getColumn().schemaId, column.getColumn().tableId, column.getColumn().id, column.getColumn().type, StatisticType.ALPHABETICAL ); + super( column.getColumn().namespaceId, column.getColumn().tableId, column.getColumn().id, column.getColumn().type, StatisticType.ALPHABETICAL ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java index 4709a5a3da..08adff0bca 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java @@ -51,7 +51,7 @@ public class NumericalStatisticColumn extends StatisticColumn { public NumericalStatisticColumn( QueryResult column ) { - super( column.getColumn().schemaId, column.getEntity().id, column.getColumn().id, column.getColumn().type, StatisticType.NUMERICAL ); + super( column.getColumn().namespaceId, column.getEntity().id, column.getColumn().id, column.getColumn().type, StatisticType.NUMERICAL ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java index c08d602c0b..6907f4e79a 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java @@ -42,7 +42,7 @@ class QueryResult { public static QueryResult fromCatalogColumn( LogicalColumn column ) { - return new QueryResult( Catalog.getInstance().getTable( column.tableId ), column ); + return new QueryResult( Catalog.getInstance().getLogicalRel( column.namespaceId ).getTable( column.tableId ), column ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java index 690b8af738..ee60e8592a 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java @@ -86,10 +86,10 @@ public StatisticColumn( long schemaId, long tableId, long columnId, PolyType typ this.columnType = columnType; Catalog catalog = Catalog.getInstance(); - if ( catalog.checkIfExistsEntity( tableId ) ) { + if ( catalog.getLogicalEntity( tableId ) != null ) { this.schema = catalog.getNamespace( schemaId ).name; - this.table = catalog.getTable( tableId ).name; - this.column = catalog.getColumn( columnId ).name; + this.table = catalog.getLogicalRel( schemaId ).getTable( tableId ).name; + this.column = catalog.getLogicalRel( schemaId ).getColumn( columnId ).name; } this.qualifiedColumnName = String.format( "%s.%s.%s", this.schema, this.table, this.column ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index a9308790b2..9cd702bc72 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -30,7 +30,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; @@ -90,14 +89,13 @@ public List> getSchemaTree() { Catalog catalog = Catalog.getInstance(); List> result = new ArrayList<>(); List schemaTree = new ArrayList<>(); - List schemas = catalog.getNamespaces( databaseId, null ); + List schemas = catalog.getNamespaces( null ); for ( LogicalNamespace schema : schemas ) { List tables = new ArrayList<>(); - List childTables = catalog.getTables( schema.id, null ); + List childTables = catalog.getLogicalRel( schema.id ).getTables( null ); for ( LogicalTable childTable : childTables ) { List table = new ArrayList<>(); - List childColumns = catalog.getColumns( childTable.id ); - for ( LogicalColumn logicalColumn : childColumns ) { + for ( LogicalColumn logicalColumn : childTable.columns ) { table.add( schema.name + "." + childTable.name + "." + logicalColumn.name ); } if ( childTable.entityType == EntityType.ENTITY ) { @@ -118,21 +116,12 @@ public List> getSchemaTree() { */ public List getAllColumns() { Catalog catalog = Catalog.getInstance(); - List logicalColumns = catalog.getColumns( - null, - null, - null ) + return catalog.getNamespaces( null ) .stream() - .filter( c -> c.getNamespaceType() == NamespaceType.RELATIONAL ) + .filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) + .flatMap( n -> catalog.getLogicalRel( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> t.columns.stream() ) ) + .map( QueryResult::fromCatalogColumn ) .collect( Collectors.toList() ); - List allColumns = new ArrayList<>(); - - for ( LogicalColumn logicalColumn : logicalColumns ) { - if ( catalog.getTable( logicalColumn.tableId ).entityType != EntityType.VIEW ) { - allColumns.add( QueryResult.fromCatalogColumn( logicalColumn ) ); - } - } - return allColumns; } @@ -143,17 +132,8 @@ public List getAllColumns() { */ public List getAllTable() { Catalog catalog = Catalog.getInstance(); - List catalogEntities = catalog.getTables( - null, - null ); - List allTables = new ArrayList<>(); - - for ( LogicalTable catalogTable : catalogEntities ) { - if ( catalogTable.entityType != EntityType.VIEW ) { - allTables.add( catalogTable ); - } - } - return allTables; + return catalog.getNamespaces( null ).stream().filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) + .flatMap( n -> catalog.getLogicalRel( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ) ).collect( Collectors.toList() ); } @@ -164,9 +144,7 @@ public List getAllTable() { */ public List getAllColumns( Long tableId ) { Catalog catalog = Catalog.getInstance(); - List columns = new ArrayList<>(); - catalog.getColumns( tableId ).forEach( c -> columns.add( QueryResult.fromCatalogColumn( c ) ) ); - return columns; + return catalog.getNamespaces( null ).stream().flatMap( n -> catalog.getLogicalRel( n.id ).getTable( tableId ).columns.stream() ).map( QueryResult::fromCatalogColumn ).collect( Collectors.toList() ); } @@ -203,7 +181,7 @@ private StatisticResult executeColStat( AlgNode node, Transaction transaction, S private Transaction getTransaction() { try { return transactionManager.startTransaction( userId, false, "Statistics", MultimediaFlavor.FILE ); - } catch ( UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { + } catch ( UnknownUserException | UnknownSchemaException e ) { throw new RuntimeException( "Error while starting transaction", e ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java index 10c295451b..5b610ce9d6 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java @@ -86,7 +86,7 @@ private void updateQueryStatistics( QueryDataPointImpl dataPoint, StatisticsMana if ( isOneTable ) { long tableId = values.stream().findFirst().get(); - if ( catalog.checkIfExistsEntity( tableId ) ) { + if ( catalog.getLogicalEntity( tableId ) != null ) { statisticsManager.setTableCalls( tableId, dataPoint.getMonitoringType() ); // RowCount from UI is only used if there is no other possibility @@ -100,7 +100,7 @@ private void updateQueryStatistics( QueryDataPointImpl dataPoint, StatisticsMana } } else { for ( long id : values ) { - if ( catalog.checkIfExistsEntity( id ) ) { + if ( catalog.getLogicalEntity( id ) != null ) { statisticsManager.setTableCalls( id, dataPoint.getMonitoringType() ); } } @@ -119,14 +119,14 @@ private void updateDmlStatistics( DmlDataPoint dataPoint, StatisticsManager stat long tableId = values.stream().findFirst().get(); statisticsManager.setTableCalls( tableId, dataPoint.getMonitoringType() ); - if ( catalog.checkIfExistsEntity( tableId ) ) { + if ( catalog.getLogicalEntity( tableId ) != null ) { if ( dataPoint.getMonitoringType().equals( "INSERT" ) ) { int added = dataPoint.getRowCount(); statisticsManager.tablesToUpdate( tableId, dataPoint.getChangedValues(), dataPoint.getMonitoringType(), - catalog.getTable( tableId ).namespaceId ); + catalog.getLogicalEntity( tableId ).namespaceId ); statisticsManager.updateRowCountPerTable( tableId, added, dataPoint.getMonitoringType() ); } else if ( dataPoint.getMonitoringType().equals( "DELETE" ) ) { int deleted = dataPoint.getRowCount(); @@ -137,7 +137,7 @@ private void updateDmlStatistics( DmlDataPoint dataPoint, StatisticsManager stat } } else { for ( long id : values ) { - if ( catalog.checkIfExistsEntity( id ) ) { + if ( catalog.getLogicalEntity( id ) != null ) { statisticsManager.setTableCalls( id, dataPoint.getMonitoringType() ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index 0dd8f89e59..783cbe18a1 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -47,7 +47,7 @@ public class StatisticTable> { private NamespaceType namespaceType; @Getter - private ImmutableList dataPlacements; + private ImmutableList dataPlacements; @Getter private final List availableAdapters = new ArrayList<>(); @@ -75,8 +75,8 @@ public StatisticTable( Long tableId ) { this.tableId = tableId; Catalog catalog = Catalog.getInstance(); - if ( catalog.checkIfExistsEntity( tableId ) ) { - LogicalTable catalogTable = catalog.getTable( tableId ); + if ( catalog.getLogicalEntity( tableId ) != null ) { + LogicalTable catalogTable = catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); this.table = catalogTable.name; this.namespaceType = catalogTable.getNamespaceType(); this.dataPlacements = catalogTable.dataPlacements; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 928fc458ac..dc6644e09f 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -17,6 +17,7 @@ package org.polypheny.db.monitoring.statistics; +import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeSupport; @@ -35,6 +36,7 @@ import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.Getter; @@ -55,9 +57,9 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; @@ -146,12 +148,12 @@ public void initializeStatisticSettings() { @Override public void updateColumnName( LogicalColumn logicalColumn, String newName ) { - if ( statisticSchemaMap.containsKey( logicalColumn.schemaId ) - && statisticSchemaMap.get( logicalColumn.schemaId ).containsKey( logicalColumn.tableId ) - && statisticSchemaMap.get( logicalColumn.schemaId ).get( logicalColumn.tableId ).containsKey( logicalColumn.id ) ) { - StatisticColumn statisticColumn = statisticSchemaMap.get( logicalColumn.schemaId ).get( logicalColumn.tableId ).get( logicalColumn.id ); + if ( statisticSchemaMap.containsKey( logicalColumn.namespaceId ) + && statisticSchemaMap.get( logicalColumn.namespaceId ).containsKey( logicalColumn.tableId ) + && statisticSchemaMap.get( logicalColumn.namespaceId ).get( logicalColumn.tableId ).containsKey( logicalColumn.id ) ) { + StatisticColumn statisticColumn = statisticSchemaMap.get( logicalColumn.namespaceId ).get( logicalColumn.tableId ).get( logicalColumn.id ); statisticColumn.updateColumnName( newName ); - statisticSchemaMap.get( logicalColumn.schemaId ).get( logicalColumn.tableId ).put( logicalColumn.id, statisticColumn ); + statisticSchemaMap.get( logicalColumn.namespaceId ).get( logicalColumn.tableId ).put( logicalColumn.id, statisticColumn ); } } @@ -192,11 +194,7 @@ public void updateSchemaName( LogicalNamespace logicalNamespace, String newName private Transaction getTransaction() { Transaction transaction; - try { - transaction = statisticQueryInterface.getTransactionManager().startTransaction( Catalog.defaultUserId, false, "Statistic Manager" ); - } catch ( GenericCatalogException | UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { - throw new RuntimeException( e ); - } + transaction = statisticQueryInterface.getTransactionManager().startTransaction( Catalog.getInstance().getUser( Catalog.defaultUserId ), Catalog.getInstance().getNamespace( 0 ), false, "Statistic Manager" ); return transaction; } @@ -240,7 +238,7 @@ public void restart( Config c ) { private void resetAllIsFull() { this.statisticSchemaMap.values().forEach( s -> s.values().forEach( t -> t.values().forEach( c -> { - assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getColumn( c.getColumnId() ) ), NodeType.UNIQUE_VALUE ) ); + assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getSnapshot( 0 ).getColumn( c.getColumnId() ) ), NodeType.UNIQUE_VALUE ) ); } ) ) ); } @@ -281,7 +279,7 @@ private void reevaluateRowCount() { log.debug( "Reevaluate Row Count." ); statisticQueryInterface.getAllTable().forEach( table -> { - int rowCount = getNumberColumnCount( this.prepareNode( new QueryResult( Catalog.getInstance().getTable( table.id ), null ), NodeType.ROW_COUNT_TABLE ) ); + int rowCount = getNumberColumnCount( this.prepareNode( new QueryResult( Catalog.getInstance().getLogicalEntity( table.id ), null ), NodeType.ROW_COUNT_TABLE ) ); updateRowCountPerTable( table.id, rowCount, "SET-ROW-COUNT" ); } ); } @@ -301,8 +299,9 @@ public void reevaluateTable( long tableId ) { if ( statisticQueryInterface == null ) { return; } - if ( Catalog.getInstance().checkIfExistsEntity( tableId ) ) { - deleteTable( Catalog.getInstance().getTable( tableId ).namespaceId, tableId ); + LogicalEntity entity = Catalog.getInstance().getLogicalEntity( tableId ); + if ( entity != null ) { + deleteTable( entity.namespaceId, tableId ); List res = statisticQueryInterface.getAllColumns( tableId ); @@ -512,7 +511,7 @@ private void put( private StatisticQueryResult prepareNode( QueryResult queryResult, NodeType nodeType ) { StatisticQueryResult statisticQueryColumn = null; - if ( Catalog.getInstance().checkIfExistsEntity( queryResult.getEntity().id ) ) { + if ( Catalog.getInstance().getLogicalEntity( queryResult.getEntity().id ) != null ) { AlgNode queryNode = getQueryNode( queryResult, nodeType ); //queryNode = getQueryNode( queryResult, nodeType ); statisticQueryColumn = statisticQueryInterface.selectOneColumnStat( queryNode, transaction, statement, queryResult ); @@ -834,7 +833,7 @@ public void propertyChange( PropertyChangeEvent evt ) { private void workQueue() { while ( !this.tablesToUpdate.isEmpty() ) { long tableId = this.tablesToUpdate.poll(); - if ( Catalog.getInstance().checkIfExistsEntity( tableId ) ) { + if ( Catalog.getInstance().getLogicalEntity( tableId ) != null ) { reevaluateTable( tableId ); } tableStatistic.remove( tableId ); @@ -870,7 +869,7 @@ public void tablesToUpdate( long tableId ) { @Override public void tablesToUpdate( long tableId, Map> changedValues, String type, long schemaId ) { Catalog catalog = Catalog.getInstance(); - if ( catalog.checkIfExistsEntity( tableId ) ) { + if ( catalog.getLogicalEntity( tableId ) != null ) { switch ( type ) { case "INSERT": handleInsert( tableId, changedValues, schemaId, catalog ); @@ -898,11 +897,11 @@ private void handleDrop( long tableId, Map> changedValues, lo private void handleTruncate( long tableId, long schemaId, Catalog catalog ) { - LogicalTable catalogTable = catalog.getTable( tableId ); - for ( int i = 0; i < catalogTable.fieldIds.size(); i++ ) { - PolyType polyType = catalog.getColumn( catalogTable.fieldIds.get( i ) ).type; - QueryResult queryResult = new QueryResult( catalogTable, Catalog.getInstance().getColumn( catalogTable.fieldIds.get( i ) ) ); - if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( catalogTable.fieldIds.get( i ) ) != null ) { + LogicalTable catalogTable = catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); + for ( LogicalColumn column : catalogTable.columns ) { + PolyType polyType = column.type; + QueryResult queryResult = new QueryResult( catalogTable, column ); + if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( column.id ) != null ) { StatisticColumn statisticColumn = createNewStatisticColumns( polyType, queryResult ); if ( statisticColumn != null ) { put( queryResult, statisticColumn ); @@ -926,24 +925,23 @@ private > StatisticColumn createNewStatisticColumns( private void handleInsert( long tableId, Map> changedValues, long schemaId, Catalog catalog ) { - LogicalTable catalogTable = catalog.getTable( tableId ); - List columns = catalogTable.fieldIds; + LogicalTable catalogTable = catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); if ( this.statisticSchemaMap.get( schemaId ) != null ) { if ( this.statisticSchemaMap.get( schemaId ).get( tableId ) != null ) { - for ( int i = 0; i < columns.size(); i++ ) { - PolyType polyType = catalog.getColumn( columns.get( i ) ).type; - QueryResult queryResult = new QueryResult( catalogTable, Catalog.getInstance().getColumn( columns.get( i ) ) ); - if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( columns.get( i ) ) != null && changedValues.get( (long) i ) != null ) { - handleInsertColumn( tableId, changedValues, schemaId, columns, i, queryResult ); + for ( LogicalColumn column : catalogTable.columns ) { + PolyType polyType = column.type; + QueryResult queryResult = new QueryResult( catalogTable, column ); + if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( column.id ) != null && changedValues.get( (long) column.position ) != null ) { + handleInsertColumn( tableId, changedValues, schemaId, catalogTable.columns.stream().map( c -> c.id ).collect( Collectors.toList() ), column.position, queryResult ); } else { - addNewColumnStatistics( changedValues, i, polyType, queryResult ); + addNewColumnStatistics( changedValues, column.position, polyType, queryResult ); } } } else { - addInserts( changedValues, catalog, catalogTable, columns ); + addInserts( changedValues, catalogTable, catalogTable.columns ); } } else { - addInserts( changedValues, catalog, catalogTable, columns ); + addInserts( changedValues, catalogTable, catalogTable.columns ); } } @@ -951,11 +949,10 @@ private void handleInsert( long tableId, Map> changedValues, /** * Creates new StatisticColumns and inserts the values. */ - private void addInserts( Map> changedValues, Catalog catalog, LogicalTable catalogTable, List columns ) { - for ( int i = 0; i < columns.size(); i++ ) { - PolyType polyType = catalog.getColumn( columns.get( i ) ).type; - QueryResult queryResult = new QueryResult( catalogTable, Catalog.getInstance().getColumn( columns.get( i ) ) ); - addNewColumnStatistics( changedValues, i, polyType, queryResult ); + private void addInserts( Map> changedValues, LogicalTable catalogTable, ImmutableList columns ) { + for ( LogicalColumn column : columns ) { + QueryResult queryResult = new QueryResult( catalogTable, column ); + addNewColumnStatistics( changedValues, column.position, column.type, queryResult ); } } @@ -1192,7 +1189,7 @@ public > Object getTableStatistic( long schemaId, long t } else if ( v.getType().getFamily() == PolyTypeFamily.CHARACTER ) { alphabeticInfo.add( (AlphabeticStatisticColumn) v ); statisticTable.setAlphabeticColumn( alphabeticInfo ); - } else if ( PolyType.DATETIME_TYPES.contains( Catalog.getInstance().getColumn( k ).type ) ) { + } else if ( PolyType.DATETIME_TYPES.contains( Catalog.getInstance().getSnapshot( 0 ).getColumn( k ).type ) ) { temporalInfo.add( (TemporalStatisticColumn) v ); statisticTable.setTemporalColumn( temporalInfo ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java index 354ea887db..698d5305f0 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java @@ -74,7 +74,7 @@ public void setMax( T max ) { public TemporalStatisticColumn( QueryResult column ) { - super( column.getColumn().schemaId, column.getEntity().id, column.getColumn().id, column.getColumn().type, StatisticType.TEMPORAL ); + super( column.getColumn().namespaceId, column.getEntity().id, column.getColumn().id, column.getColumn().type, StatisticType.TEMPORAL ); temporalType = column.getColumn().type.getFamily().name(); } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index cc16bd7c05..3d73a3f26c 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -41,6 +41,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.avatica.AvaticaParameter; import org.apache.calcite.avatica.AvaticaSeverity; @@ -59,6 +60,7 @@ import org.apache.calcite.avatica.util.Unsafe; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Linq4j; +import org.jetbrains.annotations.NotNull; import org.polypheny.db.PolyImplementation; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.algebra.AlgRoot; @@ -86,8 +88,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn.PrimitiveCatalogColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.logical.LogicalTable.PrimitiveCatalogTable; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.EntityType.PrimitiveTableType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -269,10 +269,7 @@ public MetaResultSet getTables( final ConnectionHandle ch, final String database log.trace( "getTables( ConnectionHandle {}, String {}, Pat {}, Pat {}, List {} )", ch, database, schemaPattern, tablePattern, typeList ); } - final List tables = catalog.getTables( - (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ), - (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ) - ); + final List tables = getLogicalTables( schemaPattern, tablePattern ); StatementHandle statementHandle = createStatement( ch ); return createMetaResultSet( ch, @@ -297,6 +294,24 @@ public MetaResultSet getTables( final ConnectionHandle ch, final String database } + @NotNull + private List getLogicalTables( Pat schemaPattern, Pat tablePattern ) { + return getLogicalTables( (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ), + (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ) ); + } + + + @NotNull + private List getLogicalTables( Pattern schemaPattern, Pattern tablePattern ) { + List namespaces = catalog.getNamespaces( schemaPattern ); + + return namespaces + .stream() + .flatMap( + n -> catalog.getLogicalRel( n.id ).getTables( tablePattern ).stream() ).collect( Collectors.toList() ); + } + + @Override public MetaResultSet getColumns( final ConnectionHandle ch, final String database, final Pat schemaPattern, final Pat tablePattern, final Pat columnPattern ) { final PolyphenyDbConnectionHandle connection = getPolyphenyDbConnectionHandle( ch.id ); @@ -304,11 +319,10 @@ public MetaResultSet getColumns( final ConnectionHandle ch, final String databas if ( log.isTraceEnabled() ) { log.trace( "getColumns( ConnectionHandle {}, String {}, Pat {}, Pat {}, Pat {} )", ch, database, schemaPattern, tablePattern, columnPattern ); } - final List columns = catalog.getColumns( - (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ), + final List columns = getLogicalTables( schemaPattern, tablePattern ).stream().flatMap( t -> catalog.getLogicalRel( t.namespaceId ).getColumns( (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ), (columnPattern == null || columnPattern.s == null) ? null : new Pattern( columnPattern.s ) - ); + ).stream() ).collect( Collectors.toList() ); StatementHandle statementHandle = createStatement( ch ); return createMetaResultSet( ch, @@ -375,7 +389,7 @@ public MetaResultSet getCatalogs( final ConnectionHandle ch ) { if ( log.isTraceEnabled() ) { log.trace( "getCatalogs( ConnectionHandle {} )", ch ); } - final List databases = catalog.getDatabases( null ); + final List databases = List.of(); StatementHandle statementHandle = createStatement( ch ); return createMetaResultSet( ch, @@ -514,11 +528,11 @@ public MetaResultSet getPrimaryKeys( final ConnectionHandle ch, final String dat final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( schemaPattern, tablePattern ); + final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List primaryKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.primaryKey != null ) { - final CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + final CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); primaryKeyColumns.addAll( primaryKey.getCatalogPrimaryKeyColumns() ); } } @@ -551,10 +565,10 @@ public MetaResultSet getImportedKeys( final ConnectionHandle ch, final String da final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( schemaPattern, tablePattern ); + final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List importedKeys = catalog.getForeignKeys( catalogTable.id ); + List importedKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); importedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -593,11 +607,11 @@ public MetaResultSet getExportedKeys( final ConnectionHandle ch, final String da } final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); - final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( schemaPattern, tablePattern ); + + final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List exportedKeys = catalog.getExportedKeys( catalogTable.id ); + List exportedKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getExportedKeys( catalogTable.id ); exportedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -710,11 +724,10 @@ public MetaResultSet getIndexInfo( final ConnectionHandle ch, final String datab } final Pattern tablePattern = table == null ? null : new Pattern( table ); final Pattern schemaPattern = schema == null ? null : new Pattern( schema ); - final Pattern databasePattern = database == null ? null : new Pattern( database ); - final List catalogEntities = catalog.getTables( schemaPattern, tablePattern ); + final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List catalogIndexColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List catalogIndexInfos = catalog.getIndexes( catalogTable.id, unique ); + List catalogIndexInfos = catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, unique ); catalogIndexInfos.forEach( info -> catalogIndexColumns.addAll( info.getCatalogIndexColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -1401,26 +1414,14 @@ public void openConnection( final ConnectionHandle ch, final Map } // Create transaction - Transaction transaction = transactionManager.startTransaction( user, null, null, false, "AVATICA Interface" ); + Transaction transaction = transactionManager.startTransaction( user, null, false, "AVATICA Interface" ); - // Check database access - final CatalogDatabase database; - try { - database = catalog.getDatabase( databaseName ); - } catch ( UnknownDatabaseException e ) { - throw new AvaticaRuntimeException( e.getLocalizedMessage(), -1, "", AvaticaSeverity.ERROR ); - } - assert database != null; // Authorizer.hasAccess( user, database ); // Check schema access final LogicalNamespace schema; - try { - schema = catalog.getSchema( database.id, defaultSchemaName ); - } catch ( UnknownSchemaException e ) { - throw new AvaticaRuntimeException( e.getLocalizedMessage(), -1, "", AvaticaSeverity.ERROR ); - } + schema = catalog.getNamespace( defaultSchemaName ); assert schema != null; // Authorizer.hasAccess( user, schema ); @@ -1432,7 +1433,7 @@ public void openConnection( final ConnectionHandle ch, final Map throw new AvaticaRuntimeException( e.getLocalizedMessage(), -1, "", AvaticaSeverity.ERROR ); } - openConnections.put( ch.id, new PolyphenyDbConnectionHandle( ch, user, ch.id, database, schema, transactionManager ) ); + openConnections.put( ch.id, new PolyphenyDbConnectionHandle( ch, user, ch.id, null, schema, transactionManager ) ); } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java index 56f28158b5..1acbe0b9ba 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java @@ -103,7 +103,7 @@ public Transaction endCurrentTransaction() { public Transaction getCurrentOrCreateNewTransaction() { synchronized ( this ) { if ( currentTransaction == null || !currentTransaction.isActive() ) { - currentTransaction = transactionManager.startTransaction( user, schema, database, false, "AVATICA Interface" ); + currentTransaction = transactionManager.startTransaction( user, schema, false, "AVATICA Interface" ); } return currentTransaction; } diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java index 968361b88f..9905746388 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java @@ -18,9 +18,9 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.cql.exception.UnknownIndexException; @@ -56,9 +56,10 @@ public static ColumnIndex createIndex( String inDatabase, String schemaName, Str try { log.debug( "Creating ColumnIndex." ); Catalog catalog = Catalog.getInstance(); - LogicalColumn column = catalog.getColumn( schemaName, tableName, columnName ); + LogicalNamespace namespace = catalog.getNamespace( schemaName ); + LogicalColumn column = catalog.getLogicalRel( namespace.id ).getColumn( tableName, columnName ); return new ColumnIndex( column, schemaName, tableName, columnName ); - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException | UnknownColumnException e ) { + } catch ( UnknownTableException | UnknownSchemaException | UnknownColumnException e ) { log.error( "Cannot find a underlying column for the specified column name: {}.{}.{}.", schemaName, tableName, columnName, e ); throw new UnknownIndexException( "Cannot find a underlying column for the specified column name: " + schemaName + "." + tableName + "." + columnName + "." ); } diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java index 347d351faf..fefbd74a52 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java @@ -119,8 +119,8 @@ private void setScanColumnOrdinalities() { cqlQuery.queryRelation.traverse( TraversalType.INORDER, ( treeNode, nodeType, direction, frame ) -> { if ( nodeType == NodeType.DESTINATION_NODE && treeNode.isLeaf() ) { TableIndex tableIndex = treeNode.getExternalNode(); - for ( Long columnId : tableIndex.catalogTable.fieldIds ) { - tableScanColumnOrdinalities.put( columnId, tableScanColumnOrdinalities.size() ); + for ( LogicalColumn column : tableIndex.catalogTable.columns ) { + tableScanColumnOrdinalities.put( column.id, tableScanColumnOrdinalities.size() ); } } return true; @@ -195,13 +195,12 @@ private AlgBuilder generateProjections( AlgBuilder algBuilder, RexBuilder rexBui TableIndex tableIndex = treeNode.getExternalNode(); String columnNamePrefix = tableIndex.fullyQualifiedName + "."; LogicalTable catalogTable = tableIndex.catalogTable; - for ( Long columnId : catalogTable.fieldIds ) { + for ( LogicalColumn column : catalogTable.columns ) { int ordinal = tableScanColumnOrdinalities.size(); RexNode inputRef = rexBuilder.makeInputRef( baseNode, ordinal ); inputRefs.add( inputRef ); - LogicalColumn column = catalog.getColumn( columnId ); columnNames.add( columnNamePrefix + column.name ); - tableScanColumnOrdinalities.put( columnId, ordinal ); + tableScanColumnOrdinalities.put( column.id, ordinal ); } } catch ( UnexpectedTypeException e ) { throw new RuntimeException( "This exception will never be thrown since checks have been" diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlQueryBuilder.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlQueryBuilder.java index 84567a86ad..65fb8ccfae 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlQueryBuilder.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlQueryBuilder.java @@ -156,7 +156,7 @@ public TableIndex addTableIndex( String schemaName, String tableName ) throws Un String fullyQualifiedTableName = schemaName + "." + tableName; if ( !this.tableIndexMapping.containsKey( fullyQualifiedTableName ) ) { - TableIndex tableIndex = TableIndex.createIndex( databaseName, schemaName, tableName ); + TableIndex tableIndex = TableIndex.createIndex( schemaName, tableName ); this.tableIndexMapping.put( tableIndex.fullyQualifiedName, tableIndex ); } return this.tableIndexMapping.get( fullyQualifiedTableName ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java index 09096f59d0..8f344eb0ae 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java @@ -18,8 +18,8 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.cql.exception.UnknownIndexException; @@ -45,13 +45,14 @@ public TableIndex( final LogicalTable catalogTable, final String schemaName, fin } - public static TableIndex createIndex( String inDatabase, String schemaName, String tableName ) throws UnknownIndexException { + public static TableIndex createIndex( String schemaName, String tableName ) throws UnknownIndexException { try { log.debug( "Creating TableIndex." ); Catalog catalog = Catalog.getInstance(); - LogicalTable table = catalog.getTable( schemaName, tableName ); + LogicalNamespace namespace = catalog.getNamespace( schemaName ); + LogicalTable table = catalog.getLogicalRel( namespace.id ).getTable( tableName ); return new TableIndex( table, schemaName, tableName ); - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { + } catch ( UnknownTableException e ) { throw new UnknownIndexException( "Cannot find a underlying table for the specified table name: " + schemaName + "." + tableName + "." ); } } diff --git a/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/CombinerTest.java b/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/CombinerTest.java index c8d8164e59..476dd37cb0 100644 --- a/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/CombinerTest.java +++ b/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/CombinerTest.java @@ -43,8 +43,8 @@ public class CombinerTest extends AlgBuildTestHelper { public CombinerTest() throws UnknownIndexException { super( AlgBuildLevel.TABLE_SCAN ); - employee = TableIndex.createIndex( "APP", "test", "employee" ); - dept = TableIndex.createIndex( "APP", "test", "dept" ); + employee = TableIndex.createIndex( "test", "employee" ); + dept = TableIndex.createIndex( "test", "dept" ); } diff --git a/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/IndexTest.java b/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/IndexTest.java index e6e84a4c9e..8d37debc9d 100644 --- a/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/IndexTest.java +++ b/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/IndexTest.java @@ -41,14 +41,14 @@ public void testCreateColumnIndexThrowsUnknownIndexException() throws UnknownInd @Test public void testCreateTableIndex() throws UnknownIndexException { - TableIndex index = TableIndex.createIndex( "APP", "test", "testtable" ); + TableIndex index = TableIndex.createIndex( "test", "testtable" ); Assert.assertEquals( index.fullyQualifiedName, "test.testtable" ); } @Test(expected = UnknownIndexException.class) public void testCreateTableIndexThrowsUnknownIndexException() throws UnknownIndexException { - TableIndex.createIndex( "APP", "hello", "world" ); + TableIndex.createIndex( "hello", "world" ); } } diff --git a/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/helper/AlgBuildTestHelper.java b/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/helper/AlgBuildTestHelper.java index 62b6412aed..373fefa862 100644 --- a/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/helper/AlgBuildTestHelper.java +++ b/plugins/cql-language/src/test/java/org/polypheny/db/cql/utils/helper/AlgBuildTestHelper.java @@ -73,8 +73,8 @@ public AlgBuildTestHelper( AlgBuildLevel algBuildLevel ) throws UnknownIndexExce List inputRefs = new ArrayList<>(); List columnNames = new ArrayList<>(); List tableIndices = new ArrayList<>(); - tableIndices.add( TableIndex.createIndex( "APP", "test", "employee" ) ); - tableIndices.add( TableIndex.createIndex( "APP", "test", "dept" ) ); + tableIndices.add( TableIndex.createIndex( "test", "employee" ) ); + tableIndices.add( TableIndex.createIndex( "test", "dept" ) ); Catalog catalog = Catalog.getInstance(); for ( TableIndex tableIndex : tableIndices ) { diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 423f879ed7..4ffb2d2fb4 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -78,7 +78,7 @@ public class CsvSource extends DataSource { private Map> exportedColumnCache; - public CsvSource( final int storeId, final String uniqueName, final Map settings ) { + public CsvSource( final long storeId, final String uniqueName, final Map settings ) { super( storeId, uniqueName, settings, true ); setCsvDir( settings ); diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java index 2f23e907a1..f00b8a366f 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java @@ -19,6 +19,7 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; @@ -53,7 +54,7 @@ public CypherAlterDatabaseAlias( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - List graphs = Catalog.getInstance().getGraphs( new Pattern( targetName ) ); + List graphs = Catalog.getInstance().getNamespaces( new Pattern( targetName ) ); if ( graphs.size() != 1 ) { if ( !ifExists ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java index 0f5bcb9ef9..c0c4af6e5e 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java @@ -19,6 +19,7 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; @@ -56,7 +57,7 @@ public CypherCreateDatabaseAlias( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - List graphs = Catalog.getInstance().getGraphs( new Pattern( targetName ) ); + List graphs = Catalog.getInstance().getNamespaces( new Pattern( targetName ) ); if ( graphs.size() != 1 ) { throw new RuntimeException( "Error while creating a new graph database alias." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java index 62ccf2f2f2..3ab3e51e70 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java @@ -19,6 +19,7 @@ import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; @@ -47,7 +48,7 @@ public CypherDropAlias( ParserPos pos, CypherSimpleEither graphs = Catalog.getInstance().getGraphs( new Pattern( aliasName ) ); + List graphs = Catalog.getInstance().getNamespaces( new Pattern( aliasName ) ); if ( graphs.size() != 1 ) { throw new RuntimeException( "Error while dropping a graph database alias." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java index 7f470c0b75..6b5e84b0b2 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; @@ -64,7 +65,7 @@ public void execute( Context context, Statement statement, QueryParameters param } } - List databases = Catalog.getInstance().getGraphs( new Pattern( databaseName ) ); + List databases = Catalog.getInstance().getNamespaces( new Pattern( databaseName ) ); if ( databases.size() != 1 ) { if ( !ifExists ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index cc7857913a..025d641500 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -109,7 +109,7 @@ public AlgRoot convert( CypherNode query, ExtendedQueryParameters parameters, Al databaseId = parameters.getDatabaseId(); } - LogicalGraph graph = Catalog.getInstance().getGraph( databaseId ); + LogicalGraph graph = Catalog.getInstance().getLogicalGraph( databaseId ).getGraph( databaseId ); if ( parameters.isFullGraph() ) { // simple full graph scan @@ -138,13 +138,7 @@ private AlgNode buildFullScan( LogicalGraph graph ) { private long getDatabaseId( ExtendedQueryParameters parameters ) { - long databaseId; - try { - databaseId = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, parameters.getDatabaseName() ).id; - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( "Error on retrieving the used namespace" ); - } - return databaseId; + return Catalog.getInstance().getNamespace( parameters.getDatabaseName() ).id; } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java index 9faa6ed7a6..a6236c6e09 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java @@ -23,6 +23,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; @@ -64,7 +65,7 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = catalog.getGraphs( new Pattern( this.database ) ); + List graphs = catalog.getNamespaces( new Pattern( this.database ) ).stream().map( g -> catalog.getLogicalGraph( g.id ).getGraph( g.id ) ).collect( Collectors.toList() ); List dataStores = Stream.of( store ) .map( store -> (DataStore) adapterManager.getAdapter( store ) ) diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java index d44d84bc74..b1d7b64a3d 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java @@ -22,6 +22,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; @@ -55,7 +56,7 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = catalog.getGraphs( new Pattern( this.databaseName ) ); + List graphs = catalog.getNamespaces( new Pattern( this.databaseName ) ); DataStore dataStore = Stream.of( storeName ) .map( store -> (DataStore) adapterManager.getAdapter( storeName ) ) diff --git a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java index fe3b0c67a2..33dc5360db 100644 --- a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java +++ b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java @@ -27,7 +27,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.config.RuntimeConfig; @@ -67,7 +66,7 @@ public ExploreQueryProcessor( final TransactionManager transactionManager, Authe private Transaction getTransaction() { try { return transactionManager.startTransaction( userId, false, "Explore-by-Example", MultimediaFlavor.FILE ); - } catch ( UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { + } catch ( UnknownUserException | UnknownSchemaException e ) { throw new RuntimeException( "Error while starting transaction", e ); } } diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index d85af4a71a..673d20fa29 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -69,7 +69,7 @@ public class HsqldbStore extends AbstractJdbcStore { - public HsqldbStore( final int storeId, final String uniqueName, final Map settings ) { + public HsqldbStore( final long storeId, final String uniqueName, final Map settings ) { super( storeId, uniqueName, settings, HsqldbSqlDialect.DEFAULT, settings.get( "type" ).equals( "File" ) ); } @@ -119,9 +119,9 @@ public Namespace getCurrentSchema() { @Override public void addIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { - List ccps = Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); + List ccps = Catalog.getInstance().getAllocRel( catalogIndex.key.namespaceId ).getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( catalog.getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( catalog.getAllocRel( catalogIndex.key.namespaceId ).getPartitionPlacement( getAdapterId(), id ) ) ); String physicalIndexName = getPhysicalIndexName( catalogIndex.key.tableId, catalogIndex.id ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { @@ -152,14 +152,14 @@ public void addIndex( Context context, CatalogIndex catalogIndex, List par builder.append( ")" ); executeUpdate( builder, context ); } - Catalog.getInstance().setIndexPhysicalName( catalogIndex.id, physicalIndexName ); + Catalog.getInstance().getLogicalRel( catalogIndex.key.namespaceId ).setIndexPhysicalName( catalogIndex.id, physicalIndexName ); } @Override public void dropIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( catalog.getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( catalog.getAllocRel( catalogIndex.key.namespaceId ).getPartitionPlacement( getAdapterId(), id ) ) ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { StringBuilder builder = new StringBuilder(); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index be0acf6839..50d8541a4a 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -123,8 +123,8 @@ public void truncate( Context context, LogicalTable catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. - String physicalTableName = Catalog.getInstance().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalTableName; - String physicalSchemaName = Catalog.getInstance().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalSchemaName; + String physicalTableName = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalTableName; + String physicalSchemaName = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalSchemaName; StringBuilder builder = new StringBuilder(); builder.append( "TRUNCATE TABLE " ) .append( dialect.quoteIdentifier( physicalSchemaName ) ) diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 4dd054bdec..4caa099c7b 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -60,7 +60,7 @@ public abstract class AbstractJdbcStore extends DataStore implements ExtensionPo public AbstractJdbcStore( - int storeId, + long storeId, String uniqueName, Map settings, SqlDialect dialect, @@ -167,7 +167,7 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica @Override public void addColumn( Context context, LogicalTable catalogTable, LogicalColumn logicalColumn ) { String physicalColumnName = getPhysicalColumnName( logicalColumn.id ); - for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { + for ( CatalogPartitionPlacement partitionPlacement : catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { String physicalTableName = partitionPlacement.physicalTableName; String physicalSchemaName = partitionPlacement.physicalSchemaName; StringBuilder query = buildAddColumnQuery( physicalSchemaName, physicalTableName, physicalColumnName, catalogTable, logicalColumn ); @@ -178,7 +178,7 @@ public void addColumn( Context context, LogicalTable catalogTable, LogicalColumn executeUpdate( query, context ); } // Add physical name to placement - catalog.updateColumnPlacementPhysicalNames( + catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalNames( getAdapterId(), logicalColumn.id, physicalSchemaName, @@ -266,7 +266,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac if ( !this.dialect.supportsNestedArrays() && logicalColumn.collectionsType != null ) { return; } - for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { + for ( CatalogPartitionPlacement partitionPlacement : catalog.getAllocRel( logicalColumn.namespaceId ).getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) @@ -296,10 +296,10 @@ public void dropTable( Context context, LogicalTable catalogTable, List pa String physicalSchemaName; List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( catalog.getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( getAdapterId(), id ) ) ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { - catalog.deletePartitionPlacement( getAdapterId(), partitionPlacement.partitionId ); + catalog.getAllocRel( catalogTable.namespaceId ).deletePartitionPlacement( getAdapterId(), partitionPlacement.partitionId ); physicalSchemaName = partitionPlacement.physicalSchemaName; physicalTableName = partitionPlacement.physicalTableName; @@ -320,7 +320,7 @@ public void dropTable( Context context, LogicalTable catalogTable, List pa @Override public void dropColumn( Context context, CatalogColumnPlacement columnPlacement ) { - for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { + for ( CatalogPartitionPlacement partitionPlacement : catalog.getAllocRel( columnPlacement.namespaceId ).getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) @@ -337,7 +337,7 @@ public void truncate( Context context, LogicalTable catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. - for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ) ) { + for ( CatalogPartitionPlacement partitionPlacement : catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ) ) { String physicalTableName = partitionPlacement.physicalTableName; String physicalSchemaName = partitionPlacement.physicalSchemaName; StringBuilder builder = new StringBuilder(); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java index 95129a4f00..43624f723a 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java @@ -104,8 +104,8 @@ public Pair validate( Transaction transaction, Node parsed, b public boolean needsDdlGeneration( Node query, QueryParameters parameters ) { if ( query instanceof MqlCollectionStatement ) { return Catalog.getInstance() - .getTables( new Pattern( ((MqlQueryParameters) parameters).getDatabase() ), null ) - .stream() + .getNamespaces( Pattern.of( ((MqlQueryParameters) parameters).getDatabase() ) ) + .stream().flatMap( n -> Catalog.getInstance().getLogicalDoc( n.id ).getCollections( null ).stream() ) .noneMatch( t -> t.name.equals( ((MqlCollectionStatement) query).getCollection() ) ); } return false; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java index 0ca58622b9..ec37168a47 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java @@ -66,12 +66,7 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - long schemaId; - try { - schemaId = catalog.getSchema( Catalog.defaultDatabaseId, ((MqlQueryParameters) parameters).getDatabase() ).id; - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( "The used document database (Polypheny Schema) is not available." ); - } + long schemaId = catalog.getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; PlacementType placementType = PlacementType.AUTOMATIC; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java index fba45b0963..7f8c3bf7c0 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java @@ -58,12 +58,7 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); String database = ((MqlQueryParameters) parameters).getDatabase(); - long schemaId; - try { - schemaId = catalog.getSchema( context.getDatabaseId(), database ).id; - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( "Poly schema was not found." ); - } + long schemaId = catalog.getNamespace( database ).id; Node mqlNode = statement.getTransaction() .getProcessor( QueryLanguage.from( "mongo" ) ) diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java index fcd547930e..b3ee6474b7 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java @@ -46,14 +46,9 @@ public void execute( Context context, Statement statement, QueryParameters param final Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - long namespaceId; - try { - namespaceId = catalog.getSchema( Catalog.defaultDatabaseId, ((MqlQueryParameters) parameters).getDatabase() ).id; - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( "The used document database (Polypheny Schema) is not available." ); - } + long namespaceId = catalog.getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; - List collections = catalog.getCollections( namespaceId, new Pattern( getCollection() ) ); + List collections = catalog.getLogicalDoc( namespaceId ).getCollections( new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { throw new RuntimeException( "Error while adding new collection placement, collection not found." ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java index f18bf46d62..2ce57c64e2 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java @@ -49,13 +49,13 @@ public void execute( Context context, Statement statement, QueryParameters param Catalog catalog = Catalog.getInstance(); String database = ((MqlQueryParameters) parameters).getDatabase(); - if ( catalog.getNamespaces( Catalog.defaultDatabaseId, new Pattern( database ) ).size() != 1 ) { + if ( catalog.getNamespaces( new Pattern( database ) ).size() != 1 ) { // dropping a document database( Polyschema ), which does not exist, which is a no-op return; } - LogicalNamespace namespace = catalog.getNamespaces( Catalog.defaultDatabaseId, new Pattern( database ) ).get( 0 ); - List collections = catalog.getCollections( namespace.id, new Pattern( getCollection() ) ); + LogicalNamespace namespace = catalog.getNamespaces( new Pattern( database ) ).get( 0 ); + List collections = catalog.getLogicalDoc( namespace.id ).getCollections( new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { // dropping a collection, which does not exist, which is a no-op return; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index 20d8ed9096..2e23d48534 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -58,8 +58,8 @@ public void execute( Context context, Statement statement, QueryParameters param String database = ((MqlQueryParameters) parameters).getDatabase(); try { - LogicalNamespace schema = catalog.getSchema( Catalog.defaultDatabaseId, database ); - List tables = catalog.getTables( schema.id, null ); + LogicalNamespace schema = catalog.getNamespace( database ); + List tables = catalog.getLogicalRel( schema.id ).getTables( null ); if ( dropTarget ) { Optional newTable = tables.stream() @@ -80,7 +80,7 @@ public void execute( Context context, Statement statement, QueryParameters param } DdlManager.getInstance().renameTable( table.get(), newName, statement ); - } catch ( DdlOnSourceException | EntityAlreadyExistsException | UnknownSchemaException e ) { + } catch ( DdlOnSourceException | EntityAlreadyExistsException e ) { throw new RuntimeException( "The rename was not successful, due to an error: " + e.getMessage() ); } } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java index e2df3ec3bf..948377dabd 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java @@ -17,7 +17,6 @@ package org.polypheny.db.languages.mql; import lombok.Getter; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.exceptions.NamespaceAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; @@ -44,7 +43,7 @@ public MqlUseDatabase( ParserPos pos, String database ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { try { - DdlManager.getInstance().createNamespace( this.database, Catalog.defaultDatabaseId, NamespaceType.DOCUMENT, Catalog.defaultUserId, true, false ); + DdlManager.getInstance().createNamespace( this.database, NamespaceType.DOCUMENT, true, false ); } catch ( NamespaceAlreadyExistsException e ) { throw new RuntimeException( "The schema creation failed" ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java index c66b81014c..89838c4521 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java @@ -17,26 +17,28 @@ package org.polypheny.db.catalog; import java.util.concurrent.atomic.AtomicLong; +import lombok.Value; +@Value public class IdBuilder { - private final AtomicLong snapshotId; - private final AtomicLong databaseId; - private final AtomicLong namespaceId; - private final AtomicLong entityId; + AtomicLong snapshotId; + AtomicLong databaseId; + AtomicLong namespaceId; + AtomicLong entityId; - private final AtomicLong allocId; - private final AtomicLong fieldId; + AtomicLong allocId; + AtomicLong fieldId; - private final AtomicLong userId; + AtomicLong userId; - private final AtomicLong indexId; + AtomicLong indexId; - private final AtomicLong keyId; + AtomicLong keyId; - private final AtomicLong adapterId; + AtomicLong adapterId; - private final AtomicLong interfaceId; + AtomicLong interfaceId; private static IdBuilder INSTANCE; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 120ad43206..a7c8f3938b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -16,11 +16,13 @@ package org.polypheny.db.catalog; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.beans.PropertyChangeSupport; +import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; @@ -52,18 +54,26 @@ import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logical.DocumentCatalog; import org.polypheny.db.catalog.logical.GraphCatalog; import org.polypheny.db.catalog.logical.RelationalCatalog; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.physical.PolyPhysicalCatalog; import org.polypheny.db.catalog.snapshot.FullSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.type.PolyType; /** @@ -111,6 +121,12 @@ public PolyCatalog() { new ConcurrentHashMap<>(), new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); + + try { + insertDefaultData(); + } catch ( UnknownAdapterException e ) { + throw new RuntimeException( e ); + } } @@ -132,6 +148,149 @@ public PolyCatalog( } + /** + * Fills the catalog database with default data, skips if data is already inserted + */ + private void insertDefaultData() throws UnknownAdapterException { + + ////////////// + // init users + long systemId = addUser( "system", "" ); + + addUser( "pa", "" ); + + Catalog.defaultUserId = systemId; + + ////////////// + // init schema + + long namespaceId = addNamespace( "public", NamespaceType.getDefault(), false ); + + ////////////// + // init adapters + if ( adapters.size() == 0 ) { + // Deploy default store + addAdapter( "hsqldb", defaultStore.getAdapterName(), AdapterType.STORE, defaultStore.getDefaultSettings() ); + + // Deploy default CSV view + long adapter = addAdapter( "hr", defaultSource.getAdapterName(), AdapterType.SOURCE, defaultSource.getDefaultSettings() ); + + // init schema + CatalogAdapter csv = getAdapter( "hr" ); + + long id = getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); + + id = getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); + + id = getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); + + id = getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); + try { + addDefaultCsvColumns( csv ); + } catch ( UnknownTableException | GenericCatalogException | UnknownColumnException e ) { + throw new RuntimeException( e ); + } + + + } + + commit(); + + } + + + /** + * Initiates default columns for csv files + */ + private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownTableException, GenericCatalogException, UnknownColumnException, UnknownTableException, UnknownColumnException, GenericCatalogException { + LogicalNamespace schema = getNamespace( "public" ); + LogicalTable depts = getLogicalRel( schema.id ).getTable( "depts" ); + + addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); + + LogicalTable emps = getLogicalRel( schema.id ).getTable( "emps" ); + addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); + addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); + addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); + + LogicalTable emp = getLogicalRel( schema.id ).getTable( "emp" ); + addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); + addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 ); + addDefaultCsvColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 ); + addDefaultCsvColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); + addDefaultCsvColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null ); + addDefaultCsvColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); + addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); + addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); + + LogicalTable work = getLogicalRel( schema.id ).getTable( "work" ); + addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); + addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null ); + addDefaultCsvColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 ); + addDefaultCsvColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); + addDefaultCsvColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 ); + addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); + addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); + + // set all needed primary keys + getLogicalRel( schema.id ).addPrimaryKey( depts.id, Collections.singletonList( getLogicalRel( schema.id ).getColumn( depts.id, "deptno" ).id ) ); + getLogicalRel( schema.id ).addPrimaryKey( emps.id, Collections.singletonList( getLogicalRel( schema.id ).getColumn( emps.id, "empid" ).id ) ); + getLogicalRel( schema.id ).addPrimaryKey( emp.id, Collections.singletonList( getLogicalRel( schema.id ).getColumn( emp.id, "employeeno" ).id ) ); + getLogicalRel( schema.id ).addPrimaryKey( work.id, Collections.singletonList( getLogicalRel( schema.id ).getColumn( work.id, "employeeno" ).id ) ); + + // set foreign keys + getLogicalRel( schema.id ).addForeignKey( + emps.id, + ImmutableList.of( getLogicalRel( schema.id ).getColumn( emps.id, "deptno" ).id ), + depts.id, + ImmutableList.of( getLogicalRel( schema.id ).getColumn( depts.id, "deptno" ).id ), + "fk_emps_depts", + ForeignKeyOption.NONE, + ForeignKeyOption.NONE ); + getLogicalRel( schema.id ).addForeignKey( + work.id, + ImmutableList.of( getLogicalRel( schema.id ).getColumn( work.id, "employeeno" ).id ), + emp.id, + ImmutableList.of( getLogicalRel( schema.id ).getColumn( emp.id, "employeeno" ).id ), + "fk_work_emp", + ForeignKeyOption.NONE, + ForeignKeyOption.NONE ); + } + + + private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { + if ( !getLogicalRel( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { + long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); + String filename = table.name + ".csv"; + if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { + filename += ".gz"; + } + + getAllocRel( table.namespaceId ).addColumnPlacement( table, csv.id, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); + getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( csv.id, colId, position ); + + // long partitionId = table.partitionProperty.partitionIds.get( 0 ); + // getAllocRel( table.namespaceId ).addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); + } + } + + + private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { + if ( !getLogicalRel( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { + long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); + getAllocRel( table.namespaceId ).addColumnPlacement( table, adapter.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); + getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, colId, position ); + } + } + + private void updateSnapshot() { this.fullSnapshot = new FullSnapshot( idBuilder.getNewSnapshotId(), logicalCatalogs ); } @@ -218,7 +377,7 @@ public LogicalEntity getLogicalEntity( String entityName ) { public LogicalEntity getLogicalEntity( long id ) { for ( LogicalCatalog catalog : logicalCatalogs.values() ) { LogicalEntity entity = catalog.getEntity( id ); - if( entity != null ) { + if ( entity != null ) { return entity; } } @@ -281,8 +440,10 @@ public void restoreViews( Transaction transaction ) { @Override - public int addUser( String name, String password ) { - return 0; + public long addUser( String name, String password ) { + long id = idBuilder.getNewUserId(); + users.put( id, new CatalogUser( id, name, password ) ); + return id; } @@ -292,15 +453,15 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case switch ( namespaceType ) { case RELATIONAL: - logicalCatalogs.put( id, new RelationalCatalog( namespace, idBuilder ) ); + logicalCatalogs.put( id, new RelationalCatalog( namespace ) ); allocationCatalogs.put( id, new PolyAllocRelCatalog() ); break; case DOCUMENT: - logicalCatalogs.put( id, new DocumentCatalog( namespace, idBuilder ) ); + logicalCatalogs.put( id, new DocumentCatalog( namespace ) ); allocationCatalogs.put( id, new PolyAllocDocCatalog() ); break; case GRAPH: - logicalCatalogs.put( id, new GraphCatalog( namespace, idBuilder ) ); + logicalCatalogs.put( id, new GraphCatalog( namespace ) ); allocationCatalogs.put( id, new PolyAllocGraphCatalog() ); break; } @@ -354,7 +515,8 @@ public void renameNamespace( long id, String name ) { if ( logicalCatalogs.get( id ) == null ) { return; } - logicalCatalogs.get( id ).withLogicalNamespace( logicalCatalogs.get( id ).getLogicalNamespace().withName( name ) ); + + logicalCatalogs.put( id, logicalCatalogs.get( id ).withLogicalNamespace( logicalCatalogs.get( id ).getLogicalNamespace().withName( name ) ) ); change(); } @@ -417,7 +579,7 @@ public void updateAdapterSettings( long adapterId, Map newSettin if ( !adapters.containsKey( adapterId ) ) { return; } - adapters.put( adapterId, adapters.get( adapterId ).withSettings( ImmutableMap.copyOf( newSettings ) ) ); + adapters.put( adapterId, adapters.get( adapterId ).toBuilder().settings( ImmutableMap.copyOf( newSettings ) ).build() ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PusherMap.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PusherMap.java index 58d68c77a3..e5028995ae 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PusherMap.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PusherMap.java @@ -29,7 +29,9 @@ public class PusherMap extends ConcurrentHashMap { public void change() { - onChange.forEach( p -> p.accept( this ) ); + if ( onChange != null ) { + onChange.forEach( p -> p.accept( this ) ); + } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java index e0485d0289..b5fe167759 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java @@ -77,7 +77,7 @@ public void dropCollectionPlacement( long id, long adapterId ) { @Override - public CatalogCollectionPlacement getCollectionPlacement( long id, int placementId ) { + public CatalogCollectionPlacement getCollectionPlacement( long id, long placementId ) { return null; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index e921cc7081..ab7285942a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -29,8 +29,8 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.Nullable; -import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.IdBuilder; +import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter; @@ -40,6 +40,7 @@ import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; @@ -69,10 +70,12 @@ public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Seriali private final ConcurrentHashMap> logicalTableToAllocs; - public PolyAllocRelCatalog(){ + + public PolyAllocRelCatalog() { this( new ConcurrentHashMap<>() ); } + public PolyAllocRelCatalog( @Deserialize("allocations") Map allocations ) { this.allocations = new PusherMap<>( allocations ); @@ -156,8 +159,20 @@ private Long getAllocId( long adapterId, long tableId ) { @Override - public void addColumnPlacement( long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - allocations.put( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ), adapterLogicalColumnToAlloc.get( Pair.of( adapterId, columnId ) ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); + public void addColumnPlacement( LogicalTable table, long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ) { + Long allocationId = adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ); + + AllocationTable allocationTable; + + if ( allocationId == null ) { + allocationId = idBuilder.getNewAllocId(); + allocationTable = new AllocationTable( table, allocationId, physicalTableName, adapterId, List.of( + new CatalogColumnPlacement( table.namespaceId, table.id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, position ) ) ); + } else { + allocationTable = adapterLogicalColumnToAlloc.get( Pair.of( adapterId, columnId ) ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ); + } + + allocations.put( allocationId, allocationTable ); } @@ -216,7 +231,7 @@ public List getColumnPlacementsByColumn( long columnId ) @Override - public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { + public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { return null; } @@ -396,7 +411,7 @@ public List getPartitionsOnDataPlacement( long adapterId, long tableId ) { @Override - public List getPartitionGroupsIndexOnDataPlacement( int adapterId, long tableId ) { + public List getPartitionGroupsIndexOnDataPlacement( long adapterId, long tableId ) { return null; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java index 262a827a60..8fffd0ee19 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java @@ -22,14 +22,16 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import lombok.Builder; import lombok.Getter; import lombok.Value; -import lombok.With; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.IdBuilder; +import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; @@ -38,43 +40,40 @@ import org.polypheny.db.catalog.logistic.Pattern; @Value -@With +@SuperBuilder(toBuilder = true) public class DocumentCatalog implements Serializable, LogicalDocumentCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( DocumentCatalog.class ); - @Serialize - public IdBuilder idBuilder; + IdBuilder idBuilder = IdBuilder.getInstance(); @Serialize public PusherMap collections; - private ConcurrentHashMap names; + ConcurrentHashMap names; @Getter @Serialize public LogicalNamespace logicalNamespace; - public DocumentCatalog( LogicalNamespace logicalNamespace, IdBuilder idBuilder ) { - this( logicalNamespace, idBuilder, new ConcurrentHashMap<>() ); + public DocumentCatalog( LogicalNamespace logicalNamespace ) { + this( logicalNamespace, new ConcurrentHashMap<>() ); } public DocumentCatalog( @Deserialize("logicalNamespace") LogicalNamespace logicalNamespace, - @Deserialize("idBuilder") IdBuilder idBuilder, @Deserialize("collections") Map collections ) { this.logicalNamespace = logicalNamespace; this.collections = new PusherMap<>( collections ); - this.idBuilder = idBuilder; - this.names = new ConcurrentHashMap<>(); this.collections.addRowConnection( this.names, ( k, v ) -> logicalNamespace.caseSensitive ? v.name : v.name.toLowerCase(), ( k, v ) -> v ); } @NonFinal + @Builder.Default boolean openChanges = false; @@ -121,7 +120,7 @@ public List getCollections( Pattern namePattern ) { @Override - public long addCollection( Long id, String name, int currentUserId, EntityType entity, boolean modifiable ) { + public long addCollection( Long id, String name, EntityType entity, boolean modifiable ) { return 0; } @@ -137,4 +136,10 @@ public long addCollectionLogistics( String name, List stores, boolean return 0; } + + @Override + public LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ) { + return toBuilder().logicalNamespace( namespace ).build(); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java index ec5cf1fe68..cd004c6c00 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java @@ -18,13 +18,16 @@ import io.activej.serializer.BinarySerializer; import java.util.List; +import lombok.Builder; import lombok.Getter; import lombok.Value; import lombok.With; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; @@ -35,23 +38,23 @@ import org.polypheny.db.catalog.logistic.Pattern; @Value -@With +@SuperBuilder(toBuilder = true) public class GraphCatalog implements Serializable, LogicalGraphCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( GraphCatalog.class ); @Getter public LogicalNamespace logicalNamespace; - public IdBuilder idBuilder; + public IdBuilder idBuilder = IdBuilder.getInstance(); @NonFinal + @Builder.Default boolean openChanges = false; - public GraphCatalog( LogicalNamespace logicalNamespace, IdBuilder idBuilder ) { + public GraphCatalog( LogicalNamespace logicalNamespace ) { this.logicalNamespace = logicalNamespace; - this.idBuilder = idBuilder; } @@ -85,6 +88,12 @@ public LogicalEntity getEntity( long id ) { } + @Override + public LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ) { + return toBuilder().logicalNamespace( namespace ).build(); + } + + @Override public void addGraphAlias( long graphId, String alias, boolean ifNotExists ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index fbcd224eff..4e2c5678a6 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -28,16 +28,18 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import javax.annotation.Nullable; +import lombok.Builder; import lombok.Getter; import lombok.Value; -import lombok.With; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.IdBuilder; +import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.Serializable; +import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -68,7 +70,7 @@ import org.polypheny.db.type.PolyType; @Value -@With +@SuperBuilder(toBuilder = true) public class RelationalCatalog implements Serializable, LogicalRelationalCatalog { @Getter @@ -80,6 +82,7 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog @Serialize public PusherMap columns; + @Serialize @Getter public LogicalNamespace logicalNamespace; @@ -89,13 +92,15 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog @Serialize public Map keys; + @Serialize public Map keyColumns; - @Serialize - public IdBuilder idBuilder; + + public IdBuilder idBuilder = IdBuilder.getInstance(); ConcurrentHashMap names; @NonFinal + @Builder.Default boolean openChanges = false; PropertyChangeSupport listeners = new PropertyChangeSupport( this ); @@ -103,7 +108,6 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog public RelationalCatalog( @Deserialize("logicalNamespace") LogicalNamespace logicalNamespace, - @Deserialize("idBuilder") IdBuilder idBuilder, @Deserialize("tables") Map tables, @Deserialize("columns") Map columns, @Deserialize("indexes") Map indexes, @@ -120,12 +124,11 @@ public RelationalCatalog( this.names = new ConcurrentHashMap<>(); this.tables.addRowConnection( this.names, ( k, v ) -> logicalNamespace.caseSensitive ? v.name : v.name.toLowerCase(), ( k, v ) -> v ); - this.idBuilder = idBuilder; } - public RelationalCatalog( LogicalNamespace namespace, IdBuilder idBuilder ) { - this( namespace, idBuilder, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); + public RelationalCatalog( LogicalNamespace namespace ) { + this( namespace, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); } @@ -164,6 +167,12 @@ public LogicalEntity getEntity( long id ) { } + @Override + public LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ) { + return toBuilder().logicalNamespace( namespace ).build(); + } + + @Override public List getTables( @Nullable Pattern name ) { if ( name == null ) { @@ -180,13 +189,13 @@ public List getTables( @Nullable Pattern name ) { @Override public LogicalTable getTable( long tableId ) { - return null; + return tables.get( tableId ); } @Override public LogicalTable getTable( String tableName ) throws UnknownTableException { - return null; + return names.get( tableName ); } @@ -197,19 +206,22 @@ public LogicalTable getTableFromPartition( long partitionId ) { @Override - public long addTable( String name, int ownerId, EntityType entityType, boolean modifiable ) { - return 0; + public long addTable( String name, EntityType entityType, boolean modifiable ) { + long id = idBuilder.getNewEntityId(); + LogicalTable table = new LogicalTable( id, name, List.of(), logicalNamespace.id, logicalNamespace.name, entityType, null, List.of(), modifiable, null, List.of() ); + tables.put( id, table ); + return id; } @Override - public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { + public long addView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { return 0; } @Override - public long addMaterializedView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { + public long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { return 0; } @@ -227,7 +239,7 @@ public void deleteTable( long tableId ) { @Override - public void setTableOwner( long tableId, int ownerId ) { + public void setTableOwner( long tableId, long ownerId ) { } @@ -403,19 +415,23 @@ public LogicalColumn getColumn( long columnId ) { @Override public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { - return null; + return tables.get( tableId ).columns.stream().filter( c -> logicalNamespace.isCaseSensitive() ? c.name.equals( columnName ) : c.name.equalsIgnoreCase( columnName ) ).findFirst().orElse( null ); } @Override - public LogicalColumn getColumn( String schemaName, String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { + public LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { return null; } @Override public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { - return 0; + long id = idBuilder.getNewFieldId(); + LogicalColumn column = new LogicalColumn( id, name, tableId, logicalNamespace.id, position, type, collectionsType, length, scale, dimension, cardinality, nullable, collation, null ); + columns.put( id, column ); + tables.put( tableId, tables.get( tableId ).withAddedColumn( column ) ); + return id; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java index 95b80cde95..721c1a484d 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java @@ -226,4 +226,10 @@ public boolean isPartitioned( long id ) { return false; } + + @Override + public LogicalColumn getColumn( long columnId ) { + return null; + } + } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index da231b1002..2da5d927c3 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -43,10 +43,10 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.iface.AuthenticationException; @@ -259,12 +259,13 @@ LogicalTable parseCatalogTableName( String tableName ) throws ParserException { } try { - LogicalTable table = this.catalog.getTable( tableElements[0], tableElements[1] ); + LogicalNamespace namespace = catalog.getNamespace( tableElements[0] ); + LogicalTable table = this.catalog.getLogicalRel( namespace.id ).getTable( tableElements[1] ); if ( log.isDebugEnabled() ) { log.debug( "Finished parsing table \"{}\".", tableName ); } return table; - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { + } catch ( UnknownTableException e ) { log.error( "Unable to fetch table: {}.", tableName, e ); throw new ParserException( ParserErrorCode.TABLE_LIST_UNKNOWN_TABLE, tableName ); } @@ -279,8 +280,8 @@ List newParseProjectionsAndAggregations( String[] possibleProject int columnOffset = 0; for ( LogicalTable table : tables ) { tableOffsets.put( table.id, columnOffset ); - validColumns.addAll( table.fieldIds ); - columnOffset += table.fieldIds.size(); + validColumns.addAll( table.getColumnIds() ); + columnOffset += table.columns.size(); } List columns; @@ -308,8 +309,7 @@ List generateRequestColumnsWithoutProject( List tab List columns = new ArrayList<>(); long internalPosition = 0L; for ( LogicalTable table : tables ) { - for ( long columnId : table.fieldIds ) { - LogicalColumn column = this.catalog.getColumn( columnId ); + for ( LogicalColumn column : table.columns ) { int calculatedPosition = tableOffsets.get( table.id ) + column.position - 1; RequestColumn requestColumn = new RequestColumn( column, calculatedPosition, calculatedPosition, null, null, true ); columns.add( requestColumn ); @@ -337,7 +337,7 @@ List generateRequestColumnsWithProject( String projectionString, try { logicalColumn = this.getCatalogColumnFromString( columnName ); log.debug( "Fetched catalog column for projection key: {}.", columnName ); - } catch ( UnknownColumnException | UnknownDatabaseException | UnknownSchemaException | UnknownTableException e ) { + } catch ( UnknownColumnException | UnknownSchemaException | UnknownTableException e ) { log.warn( "Unable to fetch column: {}.", columnName, e ); throw new ParserException( ParserErrorCode.PROJECTION_MALFORMED, columnName ); } @@ -362,7 +362,7 @@ List generateRequestColumnsWithProject( String projectionString, Set notYetAdded = new HashSet<>( validColumns ); notYetAdded.removeAll( projectedColumns ); for ( long columnId : notYetAdded ) { - LogicalColumn column = this.catalog.getColumn( columnId ); + LogicalColumn column = this.catalog.getSnapshot( 0 ).getColumn( columnId ); int calculatedPosition = tableOffsets.get( column.tableId ) + column.position - 1; RequestColumn requestColumn = new RequestColumn( column, calculatedPosition, calculatedPosition, null, null, false ); columns.add( requestColumn ); @@ -412,14 +412,16 @@ AggFunction decodeAggregateFunction( String function ) { } - private LogicalColumn getCatalogColumnFromString( String name ) throws ParserException, UnknownColumnException, UnknownDatabaseException, UnknownSchemaException, UnknownTableException { + private LogicalColumn getCatalogColumnFromString( String name ) throws ParserException, UnknownColumnException, UnknownSchemaException, UnknownTableException { String[] splitString = name.split( "\\." ); if ( splitString.length != 3 ) { log.warn( "Column name is not 3 fields long. Got: {}", name ); throw new ParserException( ParserErrorCode.PROJECTION_MALFORMED, name ); } - return this.catalog.getColumn( splitString[0], splitString[1], splitString[2] ); + LogicalNamespace namespace = this.catalog.getNamespace( splitString[0] ); + + return this.catalog.getLogicalRel( namespace.id ).getColumn( splitString[1], splitString[2] ); } @@ -747,7 +749,7 @@ private List> parseInsertStatementValues( Map rowVal public Map generateNameMapping( List tables ) { Map nameMapping = new HashMap<>(); for ( LogicalTable table : tables ) { - for ( LogicalColumn column : this.catalog.getColumns( table.id ) ) { + for ( LogicalColumn column : this.catalog.getLogicalRel( table.namespaceId ).getColumns( table.id ) ) { nameMapping.put( column.getSchemaName() + "." + column.getTableName() + "." + column.name, column ); } } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index c4b54cf27d..9baf59aa1f 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -22,7 +22,6 @@ import io.javalin.http.Context; import java.io.InputStream; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -47,16 +46,17 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptPlanner; -import org.polypheny.db.prepare.PolyphenyDbCatalogReader; -import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.restapi.RequestParser.Filters; import org.polypheny.db.restapi.exception.RestException; import org.polypheny.db.restapi.models.requests.ResourceDeleteRequest; @@ -156,8 +156,14 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi JavaTypeFactory typeFactory = transaction.getTypeFactory(); RexBuilder rexBuilder = new RexBuilder( typeFactory ); - PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getSnapshot(); - PreparingEntity table = catalogReader.getTable( Arrays.asList( resourcePatchRequest.tables.get( 0 ).getNamespaceName(), resourcePatchRequest.tables.get( 0 ).name ) ); + Snapshot snapshot = statement.getTransaction().getSnapshot(); + LogicalNamespace namespace = Catalog.getInstance().getNamespace( resourcePatchRequest.tables.get( 0 ).getNamespaceName() ); + LogicalTable table = null; + try { + table = Catalog.getInstance().getLogicalRel( namespace.id ).getTable( resourcePatchRequest.tables.get( 0 ).name ); + } catch ( UnknownTableException e ) { + throw new RuntimeException( e ); + } // Table Scans algBuilder = this.tableScans( algBuilder, rexBuilder, resourcePatchRequest.tables ); @@ -173,7 +179,7 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi // Table Modify AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot( 0 ) ); // Values AlgDataType tableRowType = table.getRowType(); @@ -182,11 +188,10 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi List rexValues = this.valuesNode( statement, algBuilder, rexBuilder, resourcePatchRequest, tableRows, inputStreams ).get( 0 ); AlgNode algNode = algBuilder.build(); - RelModify modify = new LogicalRelModify( + RelModify modify = new LogicalRelModify( cluster, algNode.getTraitSet(), table, - catalogReader, algNode, Modify.Operation.UPDATE, valueColumnNames, @@ -215,8 +220,7 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, JavaTypeFactory typeFactory = transaction.getTypeFactory(); RexBuilder rexBuilder = new RexBuilder( typeFactory ); - PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getSnapshot(); - PreparingEntity table = catalogReader.getTable( Arrays.asList( resourceDeleteRequest.tables.get( 0 ).getNamespaceName(), resourceDeleteRequest.tables.get( 0 ).name ) ); + LogicalTable table = getLogicalTable( resourceDeleteRequest.tables.get( 0 ).getNamespaceName(), resourceDeleteRequest.tables.get( 0 ).getName() ); // Table Scans algBuilder = this.tableScans( algBuilder, rexBuilder, resourceDeleteRequest.tables ); @@ -232,14 +236,13 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, // Table Modify AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot( 0 ) ); AlgNode algNode = algBuilder.build(); - RelModify modify = new LogicalRelModify( + RelModify modify = new LogicalRelModify( cluster, algNode.getTraitSet(), table, - catalogReader, algNode, Modify.Operation.DELETE, null, @@ -261,6 +264,19 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, } + private static LogicalTable getLogicalTable( String namespaceName, String tableName ) { + Catalog catalog = Catalog.getInstance(); + LogicalNamespace namespace = catalog.getNamespace( namespaceName ); + LogicalTable table; + try { + table = catalog.getLogicalRel( namespace.id ).getTable( tableName ); + } catch ( UnknownTableException e ) { + throw new RuntimeException( e ); + } + return table; + } + + String processPostResource( final ResourcePostRequest insertValueRequest, final Context ctx, Map inputStreams ) throws RestException { Transaction transaction = getTransaction(); Statement statement = transaction.createStatement(); @@ -268,8 +284,7 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final JavaTypeFactory typeFactory = transaction.getTypeFactory(); RexBuilder rexBuilder = new RexBuilder( typeFactory ); - PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getSnapshot(); - PreparingEntity table = catalogReader.getTable( Arrays.asList( insertValueRequest.tables.get( 0 ).getNamespaceName(), insertValueRequest.tables.get( 0 ).name ) ); + LogicalTable table = getLogicalTable( insertValueRequest.tables.get( 0 ).getNamespaceName(), insertValueRequest.tables.get( 0 ).getName() ); // Values AlgDataType tableRowType = table.getRowType(); @@ -278,7 +293,7 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final // List valueColumnNames = this.valuesColumnNames( updateResourceRequest.values ); AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, traitSet, rootSchema ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot( 0 ) ); List valueColumnNames = this.valuesColumnNames( insertValueRequest.values ); List rexValues = this.valuesNode( statement, algBuilder, rexBuilder, insertValueRequest, tableRows, inputStreams ).get( 0 ); @@ -287,11 +302,10 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final // Table Modify AlgNode algNode = algBuilder.build(); - RelModify modify = new LogicalRelModify( + RelModify modify = new LogicalRelModify( cluster, algNode.getTraitSet(), table, - catalogReader, algNode, Modify.Operation.INSERT, null, @@ -551,7 +565,7 @@ AlgBuilder sort( AlgBuilder algBuilder, RexBuilder rexBuilder, List columnNames = catalogTable.getColumnNames(); size += oldColumnList.getSqlList().stream().filter( column -> !columnNames.contains( ((SqlIdentifier) column).names.get( 0 ) ) ).count(); @@ -259,8 +260,7 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { SqlNode[][] newValues = new SqlNode[((SqlBasicCall) insert.getSource()).getOperands().length][size]; int pos = 0; - List columns = Catalog.getInstance().getColumns( catalogTable.id ); - for ( LogicalColumn column : columns ) { + for ( LogicalColumn column : catalogTable.columns ) { // Add column newColumnList.add( new SqlIdentifier( column.name, ParserPos.ZERO ) ); @@ -363,13 +363,17 @@ private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tab schemaId = Catalog.getInstance().getNamespace( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = Catalog.getInstance().getSchema( transaction.getDefaultSchema().databaseId, tableName.names.get( 0 ) ).id; + schemaId = Catalog.getInstance().getNamespace( tableName.names.get( 0 ) ).id; tableOldName = tableName.names.get( 1 ); } else { // TableName - schemaId = Catalog.getInstance().getSchema( transaction.getDefaultSchema().databaseId, transaction.getDefaultSchema().name ).id; + schemaId = Catalog.getInstance().getNamespace( transaction.getDefaultSchema().name ).id; tableOldName = tableName.names.get( 0 ); } - catalogTable = Catalog.getInstance().getTable( schemaId, tableOldName ); + try { + catalogTable = Catalog.getInstance().getLogicalRel( schemaId ).getTable( tableOldName ); + } catch ( UnknownTableException e ) { + throw new RuntimeException( e ); + } return catalogTable; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 9c74aefada..92daff0848 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -29,6 +29,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Operator; import org.polypheny.db.prepare.Context; @@ -72,21 +73,25 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName schemaId = catalog.getNamespace( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), tableName.names.get( 0 ) ).id; + schemaId = catalog.getNamespace( tableName.names.get( 0 ) ).id; tableOldName = tableName.names.get( 1 ); } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + schemaId = catalog.getNamespace( context.getDefaultSchemaName() ).id; tableOldName = tableName.names.get( 0 ); } - catalogTable = catalog.getTable( schemaId, tableOldName ); + try { + catalogTable = catalog.getLogicalRel( schemaId ).getTable( tableOldName ); + } catch ( UnknownTableException e ) { + throw new RuntimeException( e ); + } return catalogTable; } - protected LogicalColumn getCatalogColumn( long tableId, SqlIdentifier columnName ) { + protected LogicalColumn getCatalogColumn( long namespaceId, long tableId, SqlIdentifier columnName ) { LogicalColumn logicalColumn; try { - logicalColumn = Catalog.getInstance().getColumn( tableId, columnName.getSimple() ); + logicalColumn = Catalog.getInstance().getLogicalRel( namespaceId ).getColumn( tableId, columnName.getSimple() ); } catch ( UnknownColumnException e ) { throw CoreUtil.newContextException( columnName.getPos(), RESOURCE.columnNotFoundInTable( columnName.getSimple(), tableId + "" ) ); } @@ -110,7 +115,7 @@ protected DataStore getDataStoreInstance( SqlIdentifier storeName ) { } - protected DataStore getDataStoreInstance( int storeId ) { + protected DataStore getDataStoreInstance( long storeId ) { Adapter adapterInstance = AdapterManager.getInstance().getAdapter( storeId ); if ( adapterInstance == null ) { throw new RuntimeException( "Unknown store id: " + storeId ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java index d81947f6f0..4c4dfe4fae 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java @@ -123,10 +123,10 @@ public void execute( Context context, Statement statement, QueryParameters param schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; viewName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; + schemaId = catalog.getNamespace( name.names.get( 0 ) ).id; viewName = name.names.get( 1 ); } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + schemaId = catalog.getNamespace( context.getDefaultSchemaName() ).id; viewName = name.names.get( 0 ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateSchema.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateSchema.java index bec6b70891..7e5a442e06 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateSchema.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateSchema.java @@ -92,7 +92,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { try { - DdlManager.getInstance().createNamespace( name.getSimple(), context.getDatabaseId(), type, context.getCurrentUserId(), ifNotExists, replace ); + DdlManager.getInstance().createNamespace( name.getSimple(), type, ifNotExists, replace ); } catch ( NamespaceAlreadyExistsException e ) { throw CoreUtil.newContextException( name.getPos(), RESOURCE.schemaExists( name.getSimple() ) ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java index b4c6de00c3..088639ff6e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java @@ -106,10 +106,10 @@ public void execute( Context context, Statement statement, QueryParameters param schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; viewName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), name.names.get( 0 ) ).id; + schemaId = catalog.getNamespace( name.names.get( 0 ) ).id; viewName = name.names.get( 1 ); } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + schemaId = catalog.getNamespace( context.getDefaultSchemaName() ).id; viewName = name.names.get( 0 ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java index b8b49abbed..9b937f7a07 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java @@ -26,7 +26,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -172,7 +171,7 @@ public void execute( Context context, Statement statement, QueryParameters param throw CoreUtil.newContextException( storeName.getPos(), RESOURCE.missingColumnPlacement( e.getColumnName() ) ); - } catch ( GenericCatalogException | UnknownKeyException | UnknownUserException | UnknownDatabaseException | TransactionException e ) { + } catch ( GenericCatalogException | UnknownKeyException | UnknownUserException | TransactionException e ) { throw new RuntimeException( e ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index bd397af371..5a5715c499 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -131,7 +131,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // Make sure that all adapters are of type store (and not source) - for ( int storeId : catalogTable.dataPlacements ) { + for ( long storeId : catalogTable.dataPlacements ) { getDataStoreInstance( storeId ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java index 689d84b757..d4f908474e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java @@ -27,7 +27,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -176,7 +175,7 @@ public void execute( Context context, Statement statement, QueryParameters param throw CoreUtil.newContextException( storeName.getPos(), RESOURCE.missingColumnPlacement( e.getColumnName() ) ); - } catch ( GenericCatalogException | UnknownKeyException | UnknownUserException | UnknownDatabaseException | TransactionException e ) { + } catch ( GenericCatalogException | UnknownKeyException | UnknownUserException | TransactionException e ) { throw new RuntimeException( e ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java index a62751daed..88f44ad4eb 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java @@ -28,7 +28,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; @@ -171,7 +170,7 @@ public void execute( Context context, Statement statement, QueryParameters param } else { throw new RuntimeException( "Table '" + catalogTable.name + "' is already partitioned" ); } - } catch ( UnknownPartitionTypeException | GenericCatalogException | UnknownDatabaseException | UnknownTableException | TransactionException | UnknownSchemaException | UnknownUserException | UnknownKeyException e ) { + } catch ( UnknownPartitionTypeException | GenericCatalogException | UnknownTableException | TransactionException | UnknownSchemaException | UnknownUserException | UnknownKeyException e ) { throw new RuntimeException( e ); } catch ( PartitionGroupNamesNotUniqueException e ) { throw CoreUtil.newContextException( partitionColumn.getPos(), RESOURCE.partitionNamesNotUnique() ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index 0df524c664..8ed1397b1a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -131,7 +131,7 @@ public void execute( Context context, Statement statement, QueryParameters param List columnIds = new LinkedList<>(); for ( SqlNode node : columnList.getSqlList() ) { - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.id, (SqlIdentifier) node ); + LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, (SqlIdentifier) node ); columnIds.add( logicalColumn.id ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java index 2d62337b64..f24bac590a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java @@ -24,7 +24,6 @@ import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -97,7 +96,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { DdlManager.getInstance().removePartitioning( catalogTable, statement ); - } catch ( UnknownDatabaseException | GenericCatalogException | UnknownTableException | TransactionException | UnknownSchemaException | UnknownUserException | UnknownKeyException e ) { + } catch ( GenericCatalogException | UnknownTableException | TransactionException | UnknownSchemaException | UnknownUserException | UnknownKeyException e ) { throw new RuntimeException( "Error while merging partitions", e ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java index 3c7dc0e3c8..78b556d43c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java @@ -121,7 +121,7 @@ public void execute( Context context, Statement statement, QueryParameters param storeName.getPos(), RESOURCE.unknownStoreName( storeName.getSimple() ) ); } - int storeId = storeInstance.getAdapterId(); + long storeId = storeInstance.getAdapterId(); // Check whether this placement already exists if ( !catalogTable.dataPlacements.contains( storeId ) ) { throw CoreUtil.newContextException( @@ -146,7 +146,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // If name partitions are specified else if ( !partitionGroupNamesList.isEmpty() && partitionGroupList.isEmpty() ) { - List catalogPartitionGroups = catalog.getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNamesList.stream().map( Object::toString ) .collect( Collectors.toList() ) ) { boolean isPartOfTable = false; @@ -159,14 +159,14 @@ else if ( !partitionGroupNamesList.isEmpty() && partitionGroupList.isEmpty() ) { } if ( !isPartOfTable ) { throw new RuntimeException( "Specified Partition-Name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "', has only " + catalog.getPartitionGroupNames( tableId ) + " partitions" ); + + catalogTable.name + "', has only " + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupNames( tableId ) + " partitions" ); } } } // Check if in-memory dataPartitionPlacement Map should even be changed and therefore start costly partitioning // Avoid unnecessary partitioning when the placement is already partitioned in the same way it has been specified - if ( tempPartitionList.equals( catalog.getPartitionGroupsOnDataPlacement( storeId, tableId ) ) ) { + if ( tempPartitionList.equals( catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupsOnDataPlacement( storeId, tableId ) ) ) { log.info( "The data placement for table: '{}' on store: '{}' already contains all specified partitions of statement: {}", catalogTable.name, storeName, partitionGroupList ); return; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java index b0a3ff80b8..a980dc3561 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java @@ -129,7 +129,7 @@ public void execute( Context context, Statement statement, QueryParameters param // Check if all columns exist for ( SqlNode node : columnList.getSqlList() ) { - getCatalogColumn( catalogTable.id, (SqlIdentifier) node ); + getCatalogColumn( catalogTable.namespaceId, catalogTable.id, (SqlIdentifier) node ); } DataStore storeInstance = getDataStoreInstance( storeName ); @@ -137,7 +137,7 @@ public void execute( Context context, Statement statement, QueryParameters param DdlManager.getInstance().modifyDataPlacement( catalogTable, columnList.getList().stream() - .map( c -> getCatalogColumn( catalogTable.id, (SqlIdentifier) c ).id ) + .map( c -> getCatalogColumn( catalogTable.namespaceId, catalogTable.id, (SqlIdentifier) c ).id ) .collect( Collectors.toList() ), partitionGroupList, partitionGroupNamesList.stream() diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java index 35a0157ab4..57a6783983 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java @@ -82,11 +82,6 @@ public SqlNode getNode() { } - @Override - public CatalogEntity getTable() { - return table; - } - @Override public Monotonicity getMonotonicity( String columnName ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java index 8d3c6a7ba5..0edfd3745d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java @@ -41,7 +41,7 @@ public class SchemaToJsonMapper { public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogTable, boolean exportPrimaryKey, boolean exportDefaultValues ) { List columns = new LinkedList<>(); - for ( LogicalColumn logicalColumn : Catalog.getInstance().getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : Catalog.getInstance().getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { String defaultValue = null; String defaultFunctionName = null; if ( exportDefaultValues ) { @@ -61,7 +61,7 @@ public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogT } List primaryKeyColumnNames = null; if ( exportPrimaryKey ) { - for ( CatalogKey catalogKey : Catalog.getInstance().getTableKeys( catalogTable.id ) ) { + for ( CatalogKey catalogKey : Catalog.getInstance().getLogicalRel( catalogTable.namespaceId ).getTableKeys( catalogTable.id ) ) { if ( catalogKey.id == catalogTable.primaryKey ) { primaryKeyColumnNames = catalogKey.getColumnNames(); break; diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index bcbda3c343..d3591e7850 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1222,17 +1222,18 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk /** * Get additional columns of the DataSource that are not mapped to the table. */ - void getAvailableSourceColumns( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { + void getAvailableSourceColumns( final Context ctx ) throws UnknownTableException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalTable table = catalog.getTable( request.getSchemaName(), request.getTableName() ); - ImmutableMap> placements = catalog.getColumnPlacementsByAdapter( table.id ); - Set adapterIds = placements.keySet(); + LogicalNamespace namespace = catalog.getNamespace( request.getSchemaName() ); + LogicalTable table = catalog.getLogicalRel( namespace.id ).getTable( request.getTableName() ); + ImmutableMap> placements = catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ); + Set adapterIds = placements.keySet(); if ( adapterIds.size() > 1 ) { log.warn( String.format( "The number of sources of an entity should not be > 1 (%s.%s)", request.getSchemaName(), request.getTableName() ) ); } List exportedColumns = new ArrayList<>(); - for ( int adapterId : adapterIds ) { + for ( Long adapterId : adapterIds ) { Adapter adapter = AdapterManager.getInstance().getAdapter( adapterId ); if ( adapter instanceof DataSource ) { DataSource dataSource = (DataSource) adapter; @@ -1265,10 +1266,10 @@ void getAvailableSourceColumns( final Context ctx ) throws UnknownDatabaseExcept } - void getMaterializedInfo( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { + void getMaterializedInfo( final Context ctx ) throws UnknownTableException, UnknownSchemaException { EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); - LogicalTable catalogTable = catalog.getTable( request.schema, request.table ); + LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW ) { CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalogTable; @@ -1296,6 +1297,13 @@ void getMaterializedInfo( final Context ctx ) throws UnknownDatabaseException, U } + private LogicalTable getLogicalTable( String schema, String table ) throws UnknownTableException { + LogicalNamespace namespace = catalog.getNamespace( schema ); + + return catalog.getLogicalRel( namespace.id ).getTable( table ); + } + + void updateMaterialized( final Context ctx ) { UIRequest request = ctx.bodyAsClass( UIRequest.class ); Transaction transaction = getTransaction(); @@ -1592,11 +1600,11 @@ void getConstraints( final Context ctx ) { Map> temp = new HashMap<>(); try { - LogicalTable catalogTable = catalog.getTable( t[0], t[1] ); + LogicalTable catalogTable = getLogicalTable( t[0], t[1] ); // get primary key if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : primaryKey.getColumnNames() ) { if ( !temp.containsKey( "" ) ) { temp.put( "", new ArrayList<>() ); @@ -1610,7 +1618,7 @@ void getConstraints( final Context ctx ) { // get unique constraints. temp.clear(); - List constraints = catalog.getConstraints( catalogTable.id ); + List constraints = catalog.getLogicalRel( catalogTable.namespaceId ).getConstraints( catalogTable.id ); for ( CatalogConstraint catalogConstraint : constraints ) { if ( catalogConstraint.type == ConstraintType.UNIQUE ) { temp.put( catalogConstraint.name, new ArrayList<>( catalogConstraint.key.getColumnNames() ) ); @@ -1630,8 +1638,6 @@ void getConstraints( final Context ctx ) { } catch ( UnknownTableException e ) { log.error( "Caught exception while fetching constraints", e ); result = new Result( e ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } ctx.json( result ); @@ -1752,8 +1758,8 @@ void getIndexes( final Context ctx ) { EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); Result result; try { - LogicalTable catalogTable = catalog.getTable( request.schema, request.table ); - List catalogIndexes = catalog.getIndexes( catalogTable.id, false ); + LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); + List catalogIndexes = catalog.getLogicalRel( catalogTable.id ).getIndexes( catalogTable.id, false ); DbColumn[] header = { new DbColumn( "Name" ), @@ -1807,8 +1813,6 @@ void getIndexes( final Context ctx ) { } catch ( UnknownTableException e ) { log.error( "Caught exception while fetching indexes", e ); result = new Result( e ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } ctx.json( result ); } @@ -1878,11 +1882,11 @@ void createIndex( final Context ctx ) { } - void getUnderlyingTable( final Context ctx ) throws UnknownDatabaseException, UnknownTableException, UnknownSchemaException { + void getUnderlyingTable( final Context ctx ) throws UnknownTableException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalTable catalogTable = catalog.getTable( request.getSchemaName(), request.getTableName() ); + LogicalTable catalogTable = getLogicalTable( request.getSchemaName(), request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { ImmutableMap> underlyingTableOriginal = ((CatalogView) catalogTable).getUnderlyingTables(); @@ -1890,9 +1894,9 @@ void getUnderlyingTable( final Context ctx ) throws UnknownDatabaseException, Un for ( Entry> entry : underlyingTableOriginal.entrySet() ) { List columns = new ArrayList<>(); for ( Long ids : entry.getValue() ) { - columns.add( catalog.getColumn( ids ).name ); + columns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( ids ).name ); } - underlyingTable.put( catalog.getTable( entry.getKey() ).name, columns ); + underlyingTable.put( catalog.getLogicalRel( catalogTable.namespaceId ).getTable( entry.getKey() ).name, columns ); } ctx.json( new UnderlyingTables( underlyingTable ) ); } else { @@ -1914,23 +1918,23 @@ private Placement getPlacements( final Index index ) { String schemaName = index.getSchema(); String tableName = index.getTable(); try { - LogicalTable table = catalog.getTable( schemaName, tableName ); - Placement p = new Placement( table.partitionProperty.isPartitioned, catalog.getPartitionGroupNames( table.id ), table.entityType ); + LogicalTable table = getLogicalTable( schemaName, tableName ); + Placement p = new Placement( table.partitionProperty.isPartitioned, catalog.getAllocRel( table.namespaceId ).getPartitionGroupNames( table.id ), table.entityType ); if ( table.entityType == EntityType.VIEW ) { return p; } else { long pkid = table.primaryKey; - List pkColumnIds = Catalog.getInstance().getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = Catalog.getInstance().getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = catalog.getColumnPlacement( pkColumn.id ); + List pkColumnIds = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + List pkPlacements = catalog.getAllocRel( table.namespaceId ).getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement placement : pkPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); p.addAdapter( new RelationalStore( adapter.getUniqueName(), adapter.getUniqueName(), - catalog.getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), - catalog.getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), + catalog.getAllocRel( table.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), + catalog.getAllocRel( table.namespaceId ).getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), table.partitionProperty.numPartitionGroups, table.partitionProperty.partitionType ) ); } @@ -1939,8 +1943,6 @@ private Placement getPlacements( final Index index ) { } catch ( UnknownTableException e ) { log.error( "Caught exception while getting placements", e ); return new Placement( e ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } } @@ -2047,7 +2049,7 @@ private List buildPartitionFunctionRow( PartitioningReq } - void getPartitionFunctionModel( final Context ctx ) throws UnknownDatabaseException, UnknownColumnException, UnknownTableException, UnknownSchemaException { + void getPartitionFunctionModel( final Context ctx ) throws UnknownColumnException, UnknownTableException, UnknownSchemaException { PartitioningRequest request = ctx.bodyAsClass( PartitioningRequest.class ); // Get correct partition function @@ -2057,7 +2059,9 @@ void getPartitionFunctionModel( final Context ctx ) throws UnknownDatabaseExcept // Check whether the selected partition function supports the selected partition column LogicalColumn partitionColumn; - partitionColumn = Catalog.getInstance().getColumn( request.schemaName, request.tableName, request.column ); + LogicalNamespace namespace = Catalog.getInstance().getNamespace( request.schemaName ); + + partitionColumn = Catalog.getInstance().getLogicalRel( namespace.id ).getColumn( request.tableName, request.column ); if ( !partitionManager.supportsColumnOfType( partitionColumn.type ) ) { ctx.json( new PartitionFunctionModel( "The partition function " + request.method + " does not support columns of type " + partitionColumn.type ) ); @@ -2508,11 +2512,15 @@ void getUml( final Context ctx ) { ArrayList fKeys = new ArrayList<>(); ArrayList tables = new ArrayList<>(); - List catalogEntities = catalog.getTables( new org.polypheny.db.catalog.logistic.Pattern( request.schema ), null ); + List catalogEntities = Catalog.getInstance().getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( request.schema ) ) + .stream() + .filter( s -> s.namespaceType == NamespaceType.RELATIONAL ) + .flatMap( s -> catalog.getLogicalRel( s.id ).getTables( null ).stream() ).collect( Collectors.toList() ); + for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { // get foreign keys - List foreignKeys = catalog.getForeignKeys( catalogTable.id ); + List foreignKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); for ( CatalogForeignKey catalogForeignKey : foreignKeys ) { for ( int i = 0; i < catalogForeignKey.getReferencedKeyColumnNames().size(); i++ ) { fKeys.add( ForeignKey.builder() @@ -2537,14 +2545,14 @@ void getUml( final Context ctx ) { // get primary key with its columns if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey catalogPrimaryKey = catalog.getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey catalogPrimaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : catalogPrimaryKey.getColumnNames() ) { table.addPrimaryKeyField( columnName ); } } // get unique constraints - List catalogConstraints = catalog.getConstraints( catalogTable.id ); + List catalogConstraints = catalog.getLogicalRel( catalogTable.namespaceId ).getConstraints( catalogTable.id ); for ( CatalogConstraint catalogConstraint : catalogConstraints ) { if ( catalogConstraint.type == ConstraintType.UNIQUE ) { // TODO: unique constraints can be over multiple columns. @@ -2558,7 +2566,7 @@ void getUml( final Context ctx ) { } // get unique indexes - List catalogIndexes = catalog.getIndexes( catalogTable.id, true ); + List catalogIndexes = catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, true ); for ( CatalogIndex catalogIndex : catalogIndexes ) { // TODO: unique indexes can be over multiple columns. if ( catalogIndex.key.getColumnNames().size() == 1 && @@ -2890,7 +2898,7 @@ void schemaRequest( final Context ctx ) { // drop schema else if ( !schema.isCreate() && schema.isDrop() ) { if ( type == null ) { - List namespaces = catalog.getNamespaces( Catalog.defaultDatabaseId, new org.polypheny.db.catalog.logistic.Pattern( schema.getName() ) ); + List namespaces = catalog.getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( schema.getName() ) ); assert namespaces.size() == 1; type = namespaces.get( 0 ).namespaceType; @@ -3188,12 +3196,12 @@ public static Result executeSqlSelect( final Statement statement, final UIReques if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); try { - catalogTable = crud.catalog.getTable( t[0], t[1] ); + LogicalNamespace namespace = crud.catalog.getNamespace( t[0] ); + + catalogTable = crud.catalog.getLogicalRel( namespace.id ).getTable( t[1] ); entityType = catalogTable.entityType; } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } } @@ -3224,8 +3232,8 @@ public static Result executeSqlSelect( final Statement statement, final UIReques // Get column default values if ( catalogTable != null ) { try { - if ( crud.catalog.checkIfExistsColumn( catalogTable.id, columnName ) ) { - LogicalColumn logicalColumn = crud.catalog.getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = crud.catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + if ( logicalColumn != null ) { if ( logicalColumn.defaultValue != null ) { dbCol.defaultValue = logicalColumn.defaultValue.value; } @@ -3576,7 +3584,7 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Tra Transaction transaction = transactionManager.startTransaction( userId, analyze, origin, MultimediaFlavor.FILE ); transaction.setUseCache( useCache ); return transaction; - } catch ( UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { + } catch ( UnknownUserException | UnknownSchemaException e ) { throw new RuntimeException( "Error while starting transaction", e ); } } @@ -3597,15 +3605,13 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Cru private Map getCatalogColumns( String schemaName, String tableName ) { Map dataTypes = new HashMap<>(); try { - LogicalTable table = catalog.getTable( schemaName, tableName ); - List logicalColumns = catalog.getColumns( table.id ); + LogicalTable table = getLogicalTable( schemaName, tableName ); + List logicalColumns = catalog.getLogicalRel( table.namespaceId ).getColumns( table.id ); for ( LogicalColumn logicalColumn : logicalColumns ) { dataTypes.put( logicalColumn.name, logicalColumn ); } } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); - } catch ( UnknownSchemaException e ) { - throw new RuntimeException( e ); } return dataTypes; } @@ -3613,7 +3619,7 @@ private Map getCatalogColumns( String schemaName, String void getTypeSchemas( final Context ctx ) { ctx.json( catalog - .getNamespaces( 1, null ) + .getNamespaces( null ) .stream() .collect( Collectors.toMap( LogicalNamespace::getName, LogicalNamespace::getNamespaceType ) ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 91a7c5337c..f88292ead1 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -46,7 +46,6 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.RuntimeConfig; @@ -201,7 +200,6 @@ private void attachExceptions( Javalin server ) { defaultException( IOException.class, server ); defaultException( ServletException.class, server ); - defaultException( UnknownDatabaseException.class, server ); defaultException( UnknownSchemaException.class, server ); defaultException( UnknownTableException.class, server ); defaultException( UnknownColumnException.class, server ); diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index 607a903802..58ed27dfba 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -146,7 +146,7 @@ public void onMessage( final WsMessageContext ctx ) { } else {//TableRequest, is equal to UIRequest UIRequest uiRequest = ctx.messageAsClass( UIRequest.class ); try { - LogicalNamespace namespace = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, uiRequest.getSchemaName() ); + LogicalNamespace namespace = Catalog.getInstance().getNamespace( uiRequest.getSchemaName() ); switch ( namespace.namespaceType ) { case RELATIONAL: result = crud.getTable( uiRequest ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 3c10285a7d..b8fbaa4bd7 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -43,7 +43,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownCollectionException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.EntityType; @@ -193,9 +192,10 @@ public static Result getResult( QueryLanguage language, Statement statement, Que LogicalTable catalogTable = null; if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); + LogicalNamespace namespace = catalog.getNamespace( t[0] ); try { - catalogTable = catalog.getTable( t[0], t[1] ); - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { + catalogTable = catalog.getLogicalRel( namespace.id ).getTable( t[1] ); + } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); } } @@ -226,15 +226,11 @@ public static Result getResult( QueryLanguage language, Statement statement, Que // Get column default values if ( catalogTable != null ) { - try { - if ( catalog.checkIfExistsColumn( catalogTable.id, columnName ) ) { - LogicalColumn logicalColumn = catalog.getColumn( catalogTable.id, columnName ); - if ( logicalColumn.defaultValue != null ) { - dbCol.defaultValue = logicalColumn.defaultValue.value; - } + LogicalColumn logicalColumn = catalogTable.columns.stream().filter( c -> c.name.equals( columnName ) ).findFirst().orElse( null ); + if ( logicalColumn != null ) { + if ( logicalColumn.defaultValue != null ) { + dbCol.defaultValue = logicalColumn.defaultValue.value; } - } catch ( UnknownColumnException e ) { - log.error( "Caught exception", e ); } } header.add( dbCol ); @@ -306,7 +302,7 @@ public void createCollection( final Context ctx ) { */ public void getDocumentDatabases( final Context ctx ) { Map names = Catalog.getInstance() - .getNamespaces( Catalog.defaultDatabaseId, null ) + .getNamespaces( null ) .stream() .collect( Collectors.toMap( LogicalNamespace::getName, s -> s.namespaceType.name() ) ); @@ -324,7 +320,11 @@ public void getGraphPlacements( final Context ctx ) { private Placement getPlacements( final Index index ) { Catalog catalog = Catalog.getInstance(); String graphName = index.getSchema(); - List graphs = catalog.getGraphs( new Pattern( graphName ) ); + List namespaces = catalog.getNamespaces( new Pattern( graphName ) ); + if ( namespaces.size() != 1 ) { + throw new RuntimeException(); + } + List graphs = catalog.getLogicalGraph( namespaces.get( 0 ).id ).getGraphs( new Pattern( graphName ) ); if ( graphs.size() != 1 ) { log.error( "The requested graph does not exist." ); return new Placement( new RuntimeException( "The requested graph does not exist." ) ); @@ -336,13 +336,13 @@ private Placement getPlacements( final Index index ) { return p; } else { - for ( int adapterId : graph.placements ) { - CatalogGraphPlacement placement = catalog.getGraphPlacement( graph.id, adapterId ); + for ( long adapterId : graph.placements ) { + CatalogGraphPlacement placement = catalog.getAllocGraph( graph.id ).getGraphPlacement( graph.id, adapterId ); Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); p.addAdapter( new Placement.GraphStore( adapter.getUniqueName(), adapter.getUniqueName(), - catalog.getGraphPlacements( adapterId ), + catalog.getAllocGraph( graph.id ).getGraphPlacements( adapterId ), adapter.getSupportedNamespaceTypes().contains( NamespaceType.GRAPH ) ) ); } return p; @@ -369,26 +369,21 @@ public void getCollectionPlacements( Context context ) { String collectionName = index.getTable(); Catalog catalog = Catalog.getInstance(); long namespaceId; - try { - namespaceId = catalog.getSchema( Catalog.defaultDatabaseId, namespace ).id; - } catch ( UnknownSchemaException e ) { - context.json( new Placement( e ) ); - return; - } - List collections = catalog.getCollections( namespaceId, new Pattern( collectionName ) ); + namespaceId = catalog.getNamespace( namespace ).id; + List collections = catalog.getLogicalDoc( namespaceId ).getCollections( new Pattern( collectionName ) ); if ( collections.size() != 1 ) { context.json( new Placement( new UnknownCollectionException( 0 ) ) ); return; } - LogicalCollection collection = catalog.getCollection( collections.get( 0 ).id ); + LogicalCollection collection = collections.get( 0 ); Placement placement = new Placement( false, List.of(), EntityType.ENTITY ); - for ( Integer adapterId : collection.placements ) { + for ( long adapterId : collection.placements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( adapterId ); - List placements = catalog.getCollectionPlacementsByAdapter( adapterId ); + List placements = catalog.getAllocDoc( collection.namespaceId ).getCollectionPlacementsByAdapter( adapterId ); placement.addAdapter( new DocumentStore( adapter.getUniqueName(), adapter.getUniqueName(), placements, adapter.getSupportedNamespaceTypes().contains( NamespaceType.DOCUMENT ) ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java index b3e0600c93..4357c4a0ef 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java @@ -89,11 +89,11 @@ public void getTableStatistics( Context ctx ) { long tableId; long schemaId; try { - schemaId = Catalog.getInstance().getSchema( 1, request.tableId.split( "\\." )[0] ).id; - tableId = Catalog.getInstance().getTable( schemaId, request.tableId.split( "\\." )[1] ).id; + schemaId = Catalog.getInstance().getNamespace( request.tableId.split( "\\." )[0] ).id; + tableId = Catalog.getInstance().getLogicalRel( schemaId ).getTable( request.tableId.split( "\\." )[1] ).id; ctx.json( statisticsManager.getTableStatistic( schemaId, tableId ) ); - } catch ( UnknownTableException | UnknownSchemaException e ) { + } catch ( UnknownTableException e ) { throw new RuntimeException( "Schema: " + request.tableId.split( "\\." )[0] + " or Table: " + request.tableId.split( "\\." )[1] + "is unknown." ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java index b5bf330629..c8721d9340 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java @@ -184,7 +184,6 @@ private static class ColumnPlacement { private final long columnId; private final String columnName; private final int storeId; - private final String storeUniqueName; private final PlacementType placementType; private final String physicalSchemaName; private final String physicalColumnName; @@ -195,8 +194,7 @@ public ColumnPlacement( CatalogColumnPlacement catalogColumnPlacement ) { this.tableName = catalogColumnPlacement.getLogicalTableName(); this.columnId = catalogColumnPlacement.columnId; this.columnName = catalogColumnPlacement.getLogicalColumnName(); - this.storeId = catalogColumnPlacement.adapterId; - this.storeUniqueName = catalogColumnPlacement.adapterUniqueName; + this.storeId = (int) catalogColumnPlacement.adapterId; this.placementType = catalogColumnPlacement.placementType; this.physicalSchemaName = catalogColumnPlacement.physicalSchemaName; this.physicalColumnName = catalogColumnPlacement.physicalColumnName; diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java index 4033b31f1e..7f9e689b3a 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java @@ -30,9 +30,10 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.transaction.Statement; @@ -65,8 +66,10 @@ public String getQuery( String tableId, Statement statement, HttpServletRequest String[] split = tableId.split( "\\." ); LogicalColumn logicalColumn; try { - logicalColumn = catalog.getColumn( catalog.getTable( split[0], split[1] ).id, entry.getKey() ); - } catch ( UnknownColumnException | UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { + LogicalNamespace namespace = catalog.getNamespace( split[0] ); + LogicalTable table = catalog.getLogicalRel( namespace.id ).getTable( split[1] ); + logicalColumn = catalog.getLogicalRel( table.namespaceId ).getColumn( table.id, entry.getKey() ); + } catch ( UnknownColumnException | UnknownTableException e ) { log.error( "Could not determine column type", e ); return null; } From b68375607257f06362ef657987e76938005b9e35 Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 7 Mar 2023 22:42:08 +0100 Subject: [PATCH 037/436] started with fullsnapshot logic --- .../db/catalog/snapshot/Snapshot.java | 9 ++-- .../org/polypheny/db/util/BuiltInMethod.java | 11 ++-- .../db/routing/routers/BaseRouter.java | 3 +- .../org/polypheny/db/catalog/PolyCatalog.java | 4 +- .../db/catalog/snapshot/FullSnapshot.java | 53 ++++++++++++++++--- 5 files changed, 61 insertions(+), 19 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index c730fd08ae..ee76453c9d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -24,8 +24,7 @@ import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogNamespace; -import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; @@ -55,11 +54,11 @@ default Expression getSnapshotExpression( long id ) { //// NAMESPACES - CatalogNamespace getNamespace( long id ); + LogicalNamespace getNamespace( long id ); - CatalogNamespace getNamespace( String name ); + LogicalNamespace getNamespace( String name ); - List getNamespaces( Pattern name ); + List getNamespaces( Pattern name ); //// ENTITIES diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index 417fb1f70e..14a3d30e9c 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -108,6 +108,8 @@ import org.polypheny.db.algebra.metadata.BuiltInMetadata.TableReferences; import org.polypheny.db.algebra.metadata.BuiltInMetadata.UniqueKeys; import org.polypheny.db.algebra.metadata.Metadata; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.interpreter.Context; import org.polypheny.db.interpreter.Row; import org.polypheny.db.interpreter.Scalar; @@ -128,7 +130,6 @@ import org.polypheny.db.schema.FilterableEntity; import org.polypheny.db.schema.Namespace; import org.polypheny.db.schema.ProjectableFilterableEntity; -import org.polypheny.db.schema.QueryableEntity; import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.SchemaPlus; import org.polypheny.db.schema.Schemas; @@ -154,7 +155,7 @@ public enum BuiltInMethod { PARSE_ARRAY_FROM_TEXT( Functions.class, "reparse", PolyType.class, Long.class, String.class ), QUERYABLE_SELECT( Queryable.class, "select", FunctionExpression.class ), QUERYABLE_AS_ENUMERABLE( Queryable.class, "asEnumerable" ), - QUERYABLE_TABLE_AS_QUERYABLE( QueryableEntity.class, "asQueryable", DataContext.class, SchemaPlus.class, String.class ), + QUERYABLE_TABLE_AS_QUERYABLE( QueryableEntity.class, "asQueryable", DataContext.class, Snapshot.class, long.class ), AS_QUERYABLE( Enumerable.class, "asQueryable" ), ABSTRACT_ENUMERABLE_CTOR( AbstractEnumerable.class ), BATCH_ITERATOR_CTOR( BatchIteratorEnumerable.class ), @@ -162,16 +163,16 @@ public enum BuiltInMethod { INTO( ExtendedEnumerable.class, "into", Collection.class ), REMOVE_ALL( ExtendedEnumerable.class, "removeAll", Collection.class ), SCHEMA_GET_SUB_SCHEMA( Namespace.class, "getSubNamespace", String.class ), - SCHEMA_GET_TABLE( Namespace.class, "getLogicalTable", String.class ), + SCHEMA_GET_TABLE( Namespace.class, "getEntity", String.class ), SCHEMA_PLUS_UNWRAP( SchemaPlus.class, "unwrap", Class.class ), SCHEMAS_ENUMERABLE_SCANNABLE( Schemas.class, "enumerable", ScannableEntity.class, DataContext.class ), SCHEMAS_ENUMERABLE_FILTERABLE( Schemas.class, "enumerable", FilterableEntity.class, DataContext.class ), SCHEMAS_ENUMERABLE_PROJECTABLE_FILTERABLE( Schemas.class, "enumerable", ProjectableFilterableEntity.class, DataContext.class ), - SCHEMAS_QUERYABLE( Schemas.class, "queryable", DataContext.class, SchemaPlus.class, Class.class, String.class ), + SCHEMAS_QUERYABLE( Schemas.class, "queryable", DataContext.class, Snapshot.class, Class.class, String.class ), REFLECTIVE_SCHEMA_GET_TARGET( ReflectiveSchema.class, "getTarget" ), DATA_CONTEXT_GET( DataContext.class, "get", String.class ), DATA_CONTEXT_GET_PARAMETER_VALUE( DataContext.class, "getParameterValue", long.class ), - DATA_CONTEXT_GET_ROOT_SCHEMA( DataContext.class, "getRootSchema" ), + DATA_CONTEXT_GET_ROOT_SCHEMA( DataContext.class, "getSnapshot" ), //JDBC_SCHEMA_DATA_SOURCE( JdbcSchema.class, "getDataSource" ), ROW_VALUE( Row.class, "getObject", int.class ), ROW_AS_COPY( Row.class, "asCopy", Object[].class ), diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 923111a4d0..93fc6db1b0 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -59,6 +59,7 @@ import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -389,7 +390,7 @@ private void buildFinalProject( RoutedAlgBuilder builder, List physicalCatalogs; + Map logicalCatalogs; + Map allocationCatalogs; + ImmutableMap namespaceIds; - public FullSnapshot( long id, Map catalogs ) { + ImmutableMap namespaceNames; + + ImmutableMap tableIds; + + ImmutableMap, LogicalTable> tableNames; + + + ImmutableMap columnIds; + + ImmutableMap, LogicalColumn> columnNames; + + ImmutableMap collectionIds; + ImmutableMap, LogicalCollection> collectionNames; + + ImmutableMap graphId; + + ImmutableMap graphName; + + + public FullSnapshot( long id, Map logicalCatalogs, Map allocationCatalogs, Map physicalCatalogs ) { this.id = id; + this.logicalCatalogs = logicalCatalogs; + this.allocationCatalogs = allocationCatalogs; + this.physicalCatalogs = physicalCatalogs; + namespaceIds = ImmutableMap.copyOf( logicalCatalogs.values().stream().map( LogicalCatalog::getLogicalNamespace ).collect( Collectors.toMap( n -> n.id, n -> n ) ) ); + namespaceNames = ImmutableMap.copyOf( namespaceIds.values().stream().collect( Collectors.toMap( n -> n.name, n -> n ) ) ); + tableIds = ImmutableMap.copyOf( logicalCatalogs.values().stream() + .filter( c -> c.getLogicalNamespace().namespaceType == NamespaceType.RELATIONAL ) + .map( c -> (LogicalRelationalCatalog) c ).flatMap( c -> c. ) ) } @Override - public CatalogNamespace getNamespace( long id ) { + public LogicalNamespace getNamespace( long id ) { return null; } @Override - public CatalogNamespace getNamespace( String name ) { + public LogicalNamespace getNamespace( String name ) { return null; } @Override - public List getNamespaces( Pattern name ) { + public List getNamespaces( Pattern name ) { return null; } From 7d8e2c30fb725e1a8cc28669fdc6fab2a34bbc3c Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 8 Mar 2023 15:36:26 +0100 Subject: [PATCH 038/436] moved into separate snapshots per model --- .../org/polypheny/db/catalog/Catalog.java | 149 ++------- .../db/catalog/catalogs/LogicalCatalog.java | 24 -- .../catalogs/LogicalDocumentCatalog.java | 18 -- .../catalog/catalogs/LogicalGraphCatalog.java | 17 - .../catalogs/LogicalRelationalCatalog.java | 276 +--------------- .../db/catalog/snapshot/AllocSnapshot.java | 40 +++ .../catalog/snapshot/LogicalDocSnapshot.java | 53 +++ .../snapshot/LogicalGraphSnapshot.java | 53 +++ .../catalog/snapshot/LogicalRelSnapshot.java | 301 ++++++++++++++++++ .../db/catalog/snapshot/PhysicalSnapshot.java | 40 +++ .../db/prepare/PolyphenyDbCatalogReader.java | 14 +- .../org/polypheny/db/catalog/MockCatalog.java | 127 +++++--- .../db/docker/MockCatalogDocker.java | 2 +- .../org/polypheny/db/catalog/PolyCatalog.java | 4 +- .../db/catalog/logical/DocumentCatalog.java | 42 --- .../db/catalog/snapshot/FullSnapshot.java | 14 +- 16 files changed, 601 insertions(+), 573 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 16b380a8a0..5de6e24595 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -19,9 +19,7 @@ import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; -import java.util.List; import java.util.Map; -import lombok.NonNull; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.pf4j.ExtensionPoint; @@ -33,23 +31,16 @@ import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; -import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalEntity; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.AllocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; +import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Transaction; @@ -166,7 +157,7 @@ protected final boolean isValidIdentifier( final String str ) { /** * Inserts a new user, - * if a user with the same name already exists, it throws an error // TODO should it? + * if a user with the same name already exists, it throws an error * * @param name of the user * @param password of the user @@ -175,50 +166,6 @@ protected final boolean isValidIdentifier( final String str ) { public abstract long addUser( String name, String password ); - /** - * Get all schemas which fit to the specified filter pattern. - * getNamespaces(xid, null, null) returns all schemas of all databases. - * - * @param name Pattern for the schema name. null returns all. - * @return List of schemas which fit to the specified filter. If there is no schema which meets the criteria, an empty list is returned. - */ - public abstract @NonNull List getNamespaces( Pattern name ); - - /** - * Returns the schema with the specified id. - * - * @param id The id of the schema - * @return The schema - */ - public abstract LogicalNamespace getNamespace( long id ); - - /** - * Returns the schema with the given name in the specified database. - * - * @param name The name of the schema - * @return The schema - * @throws UnknownSchemaException If there is no schema with this name in the specified database. - */ - public abstract LogicalNamespace getNamespace( String name ); - - /** - * Adds a schema in a specified database - * - * @param name The name of the schema - * @param namespaceType The type of this schema - * @param caseSensitive - * @return The id of the inserted schema - */ - public abstract long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ); - - /** - * Checks weather a schema with the specified name exists in a database. - * - * @param name The name of the schema to check - * @return True if there is a schema with this name. False if not. - */ - public abstract boolean checkIfExistsNamespace( String name ); - /** * Renames a schema * @@ -237,50 +184,14 @@ protected final boolean isValidIdentifier( final String str ) { /** - * Get the user with the specified name - * - * @param name The name of the user - * @return The user - * @throws UnknownUserException If there is no user with the specified name - */ - public abstract CatalogUser getUser( String name ) throws UnknownUserException; - - /** - * Get the user with the specified id. - * - * @param id The id of the user - * @return The user - */ - public abstract CatalogUser getUser( long id ); - - /** - * Get list of all adapters - * - * @return List of adapters - */ - public abstract List getAdapters(); - - /** - * Get an adapter by its unique name - * - * @return The adapter - */ - public abstract CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException; - - /** - * Get an adapter by its id - * - * @return The adapter - */ - public abstract CatalogAdapter getAdapter( long id ); - - /** - * Check if an adapter with the given id exists + * Adds a schema in a specified database * - * @param id the id of the adapter - * @return if the adapter exists + * @param name The name of the schema + * @param namespaceType The type of this schema + * @param caseSensitive + * @return The id of the inserted schema */ - public abstract boolean checkIfExistsAdapter( long id ); + public abstract long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ); /** * Add an adapter @@ -308,28 +219,6 @@ protected final boolean isValidIdentifier( final String str ) { */ public abstract void deleteAdapter( long id ); - /* - * Get list of all query interfaces - * - * @return List of query interfaces - */ - public abstract List getQueryInterfaces(); - - /** - * Get a query interface by its unique name - * - * @param uniqueName The unique name of the query interface - * @return The CatalogQueryInterface - */ - public abstract CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException; - - /** - * Get a query interface by its id - * - * @param id The id of the query interface - * @return The CatalogQueryInterface - */ - public abstract CatalogQueryInterface getQueryInterface( long id ); /** * Add a query interface @@ -354,21 +243,19 @@ protected final boolean isValidIdentifier( final String str ) { public abstract void clear(); - public abstract Snapshot getSnapshot( long id ); - - //// todo move into snapshot - + public abstract Snapshot getSnapshot( long namespaceId ); - public abstract List> getAllocationsOnAdapter( long id ); + public abstract LogicalDocSnapshot getDocSnapshot( long namespaceId ); + public abstract LogicalGraphSnapshot getGraphSnapshot( long namespaceId ); - public abstract List> getPhysicalsOnAdapter( long adapterId ); + public abstract LogicalRelSnapshot getRelSnapshot( long namespaceId ); - public abstract List getIndexes(); + public abstract PhysicalSnapshot getPhysicalSnapshot(); - public abstract List getTablesForPeriodicProcessing(); + public abstract AllocSnapshot getAllocSnapshot(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java index 228f1b1cdb..1ee874c5e2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java @@ -17,34 +17,10 @@ package org.polypheny.db.catalog.catalogs; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; public interface LogicalCatalog { - /** - * Checks if there is a table with the specified name in the specified schema. - * - * @param entityName The name to check for - * @return true if there is a table with this name, false if not. - */ - public abstract boolean checkIfExistsEntity( String entityName ); - - /** - * Checks if there is a table with the specified id. - * - * @param tableId id of the table - * @return true if there is a table with this id, false if not. - */ - public abstract boolean checkIfExistsEntity( long tableId ); - - LogicalNamespace getLogicalNamespace(); - - - LogicalEntity getEntity( String name ); - - LogicalEntity getEntity( long id ); - LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java index b31cac25e6..711cb97f65 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java @@ -18,28 +18,10 @@ import java.util.List; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.Pattern; public interface LogicalDocumentCatalog extends LogicalCatalog { - /** - * Get the collection with the given id. - * - * @param collectionId The id of the collection - * @return The requested collection - */ - public abstract LogicalCollection getCollection( long collectionId ); - - /** - * Get a collection of collections which match the given naming pattern. - * - * @param namePattern The naming pattern of the collection itself, null if all are matched - * @return collection of collections matching conditions - */ - public abstract List getCollections( Pattern namePattern ); - /** * Add a new collection with the given parameters. * diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java index 4ff400a941..1f2f3028bd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java @@ -18,11 +18,9 @@ import java.util.List; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.logistic.Pattern; public interface LogicalGraphCatalog extends LogicalCatalog { @@ -63,21 +61,6 @@ public interface LogicalGraphCatalog extends LogicalCatalog { */ public abstract void deleteGraph( long id ); - /** - * Returns an existing graph. - * - * @param id The id of the graph to return - * @return The graph entity with the provided id - */ - public abstract LogicalGraph getGraph( long id ); - - /** - * Get a collection of all graphs, which match the given conditions. - * - * @param graphName The pattern to which the name has to match, null if every name is matched - * @return A collection of all graphs matching - */ - public abstract List getGraphs( Pattern graphName ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 422d0d6c11..a3d432403c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -18,71 +18,21 @@ import java.util.List; import java.util.Map; -import javax.annotation.Nullable; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.MaterializedCriteria; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.IndexType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.type.PolyType; public interface LogicalRelationalCatalog extends LogicalCatalog { - /** - * Get all tables of the specified schema which fit to the specified filters. - * getTables(xid, databaseName, null, null, null) returns all tables of the database. - * - * @param name Pattern for the table name. null returns all. - * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. - */ - public abstract List getTables( @Nullable Pattern name ); - - /** - * Returns the table with the given id - * - * @param tableId The id of the table - * @return The table - */ - public abstract LogicalTable getTable( long tableId ); - - /** - * Returns the table with the given name in the specified schema. - * - * @param tableName The name of the table - * @return The table - * @throws UnknownTableException If there is no table with this name in the specified database and schema. - */ - public abstract LogicalTable getTable( String tableName ) throws UnknownTableException; - - /** - * Returns the table which is associated with a given partitionId - * - * @param partitionId to use for lookup - * @return CatalogEntity that contains partitionId - */ - public abstract LogicalTable getTableFromPartition( long partitionId ); - /** * Adds a table to a specified schema. * @@ -158,69 +108,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract void setPrimaryKey( long tableId, Long keyId ); - - /** - * Gets a collection of all keys. - * - * @return The keys - */ - public abstract List getKeys(); - - - /** - * Get all keys for a given table. - * - * @param tableId The id of the table for which the keys are returned - * @return The collection of keys - */ - public abstract List getTableKeys( long tableId ); - - - /** - * Get all columns of the specified table. - * - * @param tableId The id of the table - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - public abstract List getColumns( long tableId ); - - /** - * Get all columns of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all columns of the database. - * - * @param tableNamePattern Pattern for the table name. null returns all. - * @param columnNamePattern Pattern for the column name. null returns all. - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - public abstract List getColumns( @Nullable Pattern tableNamePattern, @Nullable Pattern columnNamePattern ); - - /** - * Returns the column with the specified id. - * - * @param columnId The id of the column - * @return A CatalogColumn - */ - public abstract LogicalColumn getColumn( long columnId ); - - /** - * Returns the column with the specified name in the specified table of the specified database and schema. - * - * @param tableId The id of the table - * @param columnName The name of the column - * @return A CatalogColumn - * @throws UnknownColumnException If there is no column with this name in the specified table of the database and schema. - */ - public abstract LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException; - - /** - * Returns the column with the specified name in the specified table of the specified database and schema. - * - * @param tableName The name of the table - * @param columnName The name of the column - * @return A CatalogColumn - */ - public abstract LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; - /** * Adds a column. * @@ -236,6 +123,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ); + /** * Renames a column * @@ -277,14 +165,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract void setCollation( long columnId, Collation collation ); - /** - * Checks if there is a column with the specified name in the specified table. - * - * @param tableId The id of the table - * @param columnName The name to check for - * @return true if there is a column with this name, false if not. - */ - public abstract boolean checkIfExistsColumn( long tableId, String columnName ); /** * Delete the specified column. This also deletes a default value in case there is one defined for this column. @@ -309,45 +189,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract void deleteDefaultValue( long columnId ); - /** - * Returns a specified primary key - * - * @param key The id of the primary key - * @return The primary key - */ - public abstract CatalogPrimaryKey getPrimaryKey( long key ); - - /** - * Check whether a key is a primary key - * - * @param keyId The id of the key - * @return Whether the key is a primary key - */ - public abstract boolean isPrimaryKey( long keyId ); - - /** - * Check whether a key is a foreign key - * - * @param keyId The id of the key - * @return Whether the key is a foreign key - */ - public abstract boolean isForeignKey( long keyId ); - - /** - * Check whether a key is an index - * - * @param keyId The id of the key - * @return Whether the key is an index - */ - public abstract boolean isIndex( long keyId ); - /** - * Check whether a key is a constraint - * - * @param keyId The id of the key - * @return Whether the key is a constraint - */ - public abstract boolean isConstraint( long keyId ); /** * Adds a primary key @@ -357,56 +199,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException; - /** - * Returns all (imported) foreign keys of a specified table - * - * @param tableId The id of the table - * @return List of foreign keys - */ - public abstract List getForeignKeys( long tableId ); - - /** - * Returns all foreign keys that reference the specified table (exported keys). - * - * @param tableId The id of the table - * @return List of foreign keys - */ - public abstract List getExportedKeys( long tableId ); - - /** - * Get all constraints of the specified table - * - * @param tableId The id of the table - * @return List of constraints - */ - public abstract List getConstraints( long tableId ); - - - /** - * Gets a collection of constraints for a given key. - * - * @param key The key for which the collection is returned - * @return The collection of constraints - */ - public abstract List getConstraints( CatalogKey key ); - - /** - * Returns the constraint with the specified name in the specified table. - * - * @param tableId The id of the table - * @param constraintName The name of the constraint - * @return The constraint - */ - public abstract CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException; - - /** - * Return the foreign key with the specified name from the specified table - * - * @param tableId The id of the table - * @param foreignKeyName The name of the foreign key - * @return The foreign key - */ - public abstract CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException; /** * Adds a unique foreign key constraint. @@ -468,14 +260,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { public abstract void updateMaterializedViewRefreshTime( long materializedViewId ); - /** - * Returns all tables which are in need of special periodic treatment. - * - * @return List of tables which need to be periodically processed - */ - public abstract List getTablesForPeriodicProcessing(); - - /** * Flags the table for deletion. * This method should be executed on a partitioned table before we run a DROP TABLE statement. @@ -495,64 +279,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract boolean isTableFlaggedForDeletion( long tableId ); - /** - * Gets a collection of index for the given key. - * - * @param key The key for which the collection is returned - * @return The collection of indexes - */ - public abstract List getIndexes( CatalogKey key ); - - /** - * Gets a collection of foreign keys for a given {@link Catalog Key}. - * - * @param key The key for which the collection is returned - * @return The collection foreign keys - */ - public abstract List getForeignKeys( CatalogKey key ); - - /** - * Returns all indexes of a table - * - * @param tableId The id of the table - * @param onlyUnique true if only indexes for unique values are returned. false if all indexes are returned. - * @return List of indexes - */ - public abstract List getIndexes( long tableId, boolean onlyUnique ); - - /** - * Returns the index with the specified name in the specified table - * - * @param tableId The id of the table - * @param indexName The name of the index - * @return The Index - */ - public abstract CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException; - - /** - * Checks if there is an index with the specified name in the specified table. - * - * @param tableId The id of the table - * @param indexName The name to check for - * @return true if there is an index with this name, false if not. - */ - public abstract boolean checkIfExistsIndex( long tableId, String indexName ); - - /** - * Returns the index with the specified id - * - * @param indexId The id of the index - * @return The Index - */ - public abstract CatalogIndex getIndex( long indexId ); - - /** - * Returns list of all indexes - * - * @return List of indexes - */ - public abstract List getIndexes(); - /** * Adds an index over the specified columns * diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java new file mode 100644 index 0000000000..a09c0c4abe --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -0,0 +1,40 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot; + +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; + +public interface AllocSnapshot { + + //// ALLOCATION ENTITIES + + // AllocationTable getAllocTable( long id ); + + // AllocationCollection getAllocCollection( long id ); + + // AllocationGraph getAllocGraph( long id ); + + AllocationEntity getAllocEntity( long id ); + + //// LOGISTICS + + boolean isHorizontalPartitioned( long id ); + + + boolean isVerticalPartitioned( long id ); + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java new file mode 100644 index 0000000000..7459c54260 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java @@ -0,0 +1,53 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot; + +import java.util.List; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.logistic.Pattern; + +public interface LogicalDocSnapshot { + //// DOCUMENT + + /** + * Get the collection with the given id. + * + * @param collectionId The id of the collection + * @return The requested collection + */ + public abstract LogicalCollection getCollection( long collectionId ); + + /** + * Get a collection of collections which match the given naming pattern. + * + * @param namePattern The naming pattern of the collection itself, null if all are matched + * @return collection of collections matching conditions + */ + public abstract List getCollections( Pattern namePattern ); + + + @Deprecated + LogicalCollection getLogicalCollection( List names ); + + LogicalCollection getLogicalCollection( long id ); + + LogicalCollection getLogicalCollection( long namespaceId, String name ); + + List getLogicalCollections( long namespaceId, Pattern name ); + + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java new file mode 100644 index 0000000000..e5ebb4488f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java @@ -0,0 +1,53 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot; + +import java.util.List; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; + +public interface LogicalGraphSnapshot { + + //// GRAPH + + /** + * Returns an existing graph. + * + * @param id The id of the graph to return + * @return The graph entity with the provided id + */ + public abstract LogicalGraph getGraph( long id ); + + /** + * Get a collection of all graphs, which match the given conditions. + * + * @param graphName The pattern to which the name has to match, null if every name is matched + * @return A collection of all graphs matching + */ + public abstract List getGraphs( Pattern graphName ); + + @Deprecated + LogicalGraph getLogicalGraph( List names ); + + + LogicalGraph getLogicalGraph( long id ); + + LogicalGraph getLogicalGraph( long namespaceId, String name ); + + List getLogicalGraphs( long namespaceId, Pattern name ); + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java new file mode 100644 index 0000000000..27c66f19f0 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -0,0 +1,301 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot; + +import java.util.List; +import javax.annotation.Nullable; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogConstraint; +import org.polypheny.db.catalog.entity.CatalogForeignKey; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownConstraintException; +import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; +import org.polypheny.db.catalog.exceptions.UnknownIndexException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.Pattern; + +public interface LogicalRelSnapshot { + //// RELATIONAL + + /** + * Get all tables of the specified schema which fit to the specified filters. + * getTables(xid, databaseName, null, null, null) returns all tables of the database. + * + * @param name Pattern for the table name. null returns all. + * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. + */ + public abstract List getTables( @Nullable Pattern name ); + + /** + * Returns the table with the given id + * + * @param tableId The id of the table + * @return The table + */ + public abstract LogicalTable getTable( long tableId ); + + /** + * Returns the table with the given name in the specified schema. + * + * @param tableName The name of the table + * @return The table + * @throws UnknownTableException If there is no table with this name in the specified database and schema. + */ + public abstract LogicalTable getTable( String tableName ) throws UnknownTableException; + + /** + * Returns the table which is associated with a given partitionId + * + * @param partitionId to use for lookup + * @return CatalogEntity that contains partitionId + */ + public abstract LogicalTable getTableFromPartition( long partitionId ); + + + /** + * Gets a collection of all keys. + * + * @return The keys + */ + public abstract List getKeys(); + + + /** + * Get all keys for a given table. + * + * @param tableId The id of the table for which the keys are returned + * @return The collection of keys + */ + public abstract List getTableKeys( long tableId ); + + + /** + * Get all columns of the specified table. + * + * @param tableId The id of the table + * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. + */ + public abstract List getColumns( long tableId ); + + /** + * Get all columns of the specified database which fit to the specified filter patterns. + * getColumns(xid, databaseName, null, null, null) returns all columns of the database. + * + * @param tableNamePattern Pattern for the table name. null returns all. + * @param columnNamePattern Pattern for the column name. null returns all. + * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. + */ + public abstract List getColumns( @Nullable Pattern tableNamePattern, @Nullable Pattern columnNamePattern ); + + /** + * Returns the column with the specified id. + * + * @param columnId The id of the column + * @return A CatalogColumn + */ + public abstract LogicalColumn getColumn( long columnId ); + + /** + * Returns the column with the specified name in the specified table of the specified database and schema. + * + * @param tableId The id of the table + * @param columnName The name of the column + * @return A CatalogColumn + * @throws UnknownColumnException If there is no column with this name in the specified table of the database and schema. + */ + public abstract LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException; + + /** + * Returns the column with the specified name in the specified table of the specified database and schema. + * + * @param tableName The name of the table + * @param columnName The name of the column + * @return A CatalogColumn + */ + public abstract LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; + + /** + * Checks if there is a column with the specified name in the specified table. + * + * @param tableId The id of the table + * @param columnName The name to check for + * @return true if there is a column with this name, false if not. + */ + public abstract boolean checkIfExistsColumn( long tableId, String columnName ); + + /** + * Returns a specified primary key + * + * @param key The id of the primary key + * @return The primary key + */ + public abstract CatalogPrimaryKey getPrimaryKey( long key ); + + /** + * Check whether a key is a primary key + * + * @param keyId The id of the key + * @return Whether the key is a primary key + */ + public abstract boolean isPrimaryKey( long keyId ); + + /** + * Check whether a key is a foreign key + * + * @param keyId The id of the key + * @return Whether the key is a foreign key + */ + public abstract boolean isForeignKey( long keyId ); + + /** + * Check whether a key is an index + * + * @param keyId The id of the key + * @return Whether the key is an index + */ + public abstract boolean isIndex( long keyId ); + + /** + * Check whether a key is a constraint + * + * @param keyId The id of the key + * @return Whether the key is a constraint + */ + public abstract boolean isConstraint( long keyId ); + + /** + * Returns all (imported) foreign keys of a specified table + * + * @param tableId The id of the table + * @return List of foreign keys + */ + public abstract List getForeignKeys( long tableId ); + + /** + * Returns all foreign keys that reference the specified table (exported keys). + * + * @param tableId The id of the table + * @return List of foreign keys + */ + public abstract List getExportedKeys( long tableId ); + + /** + * Get all constraints of the specified table + * + * @param tableId The id of the table + * @return List of constraints + */ + public abstract List getConstraints( long tableId ); + + + /** + * Gets a collection of constraints for a given key. + * + * @param key The key for which the collection is returned + * @return The collection of constraints + */ + public abstract List getConstraints( CatalogKey key ); + + /** + * Returns the constraint with the specified name in the specified table. + * + * @param tableId The id of the table + * @param constraintName The name of the constraint + * @return The constraint + */ + public abstract CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException; + + /** + * Return the foreign key with the specified name from the specified table + * + * @param tableId The id of the table + * @param foreignKeyName The name of the foreign key + * @return The foreign key + */ + public abstract CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException; + + /** + * Gets a collection of index for the given key. + * + * @param key The key for which the collection is returned + * @return The collection of indexes + */ + public abstract List getIndexes( CatalogKey key ); + + /** + * Gets a collection of foreign keys for a given {@link Catalog Key}. + * + * @param key The key for which the collection is returned + * @return The collection foreign keys + */ + public abstract List getForeignKeys( CatalogKey key ); + + /** + * Returns all indexes of a table + * + * @param tableId The id of the table + * @param onlyUnique true if only indexes for unique values are returned. false if all indexes are returned. + * @return List of indexes + */ + public abstract List getIndexes( long tableId, boolean onlyUnique ); + + /** + * Returns the index with the specified name in the specified table + * + * @param tableId The id of the table + * @param indexName The name of the index + * @return The Index + */ + public abstract CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException; + + /** + * Checks if there is an index with the specified name in the specified table. + * + * @param tableId The id of the table + * @param indexName The name to check for + * @return true if there is an index with this name, false if not. + */ + public abstract boolean checkIfExistsIndex( long tableId, String indexName ); + + /** + * Returns the index with the specified id + * + * @param indexId The id of the index + * @return The Index + */ + public abstract CatalogIndex getIndex( long indexId ); + + //// LOGICAL ENTITIES + @Deprecated + LogicalTable getLogicalTable( List names ); + + + LogicalTable getLogicalTable( long id ); + + LogicalTable getLogicalTable( long namespaceId, String name ); + + List getLogicalTables( long namespaceId, Pattern name ); + + LogicalColumn getLogicalColumn( long id ); + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java new file mode 100644 index 0000000000..2e3efa1b6f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java @@ -0,0 +1,40 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot; + +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalGraph; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; + +public interface PhysicalSnapshot { + + //// PHYSICAL ENTITIES + + PhysicalTable getPhysicalTable( long id ); + + PhysicalTable getPhysicalTable( long logicalId, long adapterId ); + + PhysicalCollection getPhysicalCollection( long id ); + + PhysicalCollection getPhysicalCollection( long logicalId, long adapterId ); + + + PhysicalGraph getPhysicalGraph( long id ); + + PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ); + +} diff --git a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java index 982cdbe9db..31af4164bc 100644 --- a/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java +++ b/core/src/main/java/org/polypheny/db/prepare/PolyphenyDbCatalogReader.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.stream.Collectors; import lombok.NonNull; +import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.MonikerType; import org.polypheny.db.algebra.constant.Syntax; @@ -73,30 +74,25 @@ public PolyphenyDbCatalogReader( @NonNull Snapshot snapshot, AlgDataTypeFactory @Override public LogicalTable getTable( final List names ) { - return snapshot.getLogicalTable( names ); + throw new NotImplementedException(); } @Override public LogicalCollection getCollection( final List names ) { - return snapshot.getLogicalCollection( names ); + throw new NotImplementedException(); } @Override public LogicalGraph getGraph( final String name ) { - return snapshot.getLogicalGraph( List.of( name ) ); + throw new NotImplementedException(); } @Override public AlgDataType getNamedType( Identifier typeName ) { - LogicalTable table = snapshot.getLogicalTable( typeName.getNames() ); - if ( table != null ) { - return table.getRowType(); - } else { - return null; - } + throw new NotImplementedException(); } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 9231e40c2a..43ae377734 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -16,25 +16,33 @@ package org.polypheny.db.catalog; +import java.beans.PropertyChangeListener; import java.util.List; import java.util.Map; -import lombok.NonNull; import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; +import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; +import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; +import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; +import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; +import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; +import org.polypheny.db.catalog.catalogs.PhysicalCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.AllocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; +import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Transaction; @@ -46,214 +54,239 @@ */ public abstract class MockCatalog extends Catalog { - @Override - public void commit() throws NoTablePrimaryKeyException { + public LogicalRelationalCatalog getLogicalRel( long namespaceId ) { throw new NotImplementedException(); } @Override - public long addUser( String name, String password ) { + public LogicalDocumentCatalog getLogicalDoc( long namespaceId ) { throw new NotImplementedException(); } @Override - public void rollback() { + public LogicalGraphCatalog getLogicalGraph( long namespaceId ) { throw new NotImplementedException(); } @Override - public void validateColumns() { + public AllocationRelationalCatalog getAllocRel( long namespaceId ) { throw new NotImplementedException(); } @Override - public void restoreColumnPlacements( Transaction transaction ) { + public AllocationDocumentCatalog getAllocDoc( long namespaceId ) { throw new NotImplementedException(); } @Override - public Map getNodeInfo() { + public AllocationGraphCatalog getAllocGraph( long namespaceId ) { throw new NotImplementedException(); } @Override - public void restoreViews( Transaction transaction ) { + public LogicalEntity getLogicalEntity( String entityName ) { throw new NotImplementedException(); } - private List getDatabases( Pattern pattern ) { + @Override + public LogicalEntity getLogicalEntity( long id ) { throw new NotImplementedException(); } - private CatalogDatabase getDatabase( String databaseName ) { + @Override + public PhysicalCatalog getPhysical( long namespaceId ) { throw new NotImplementedException(); } - private CatalogDatabase getDatabase( long databaseId ) { + @Override + public PhysicalEntity getPhysicalEntity( long id ) { throw new NotImplementedException(); } @Override - public @NonNull List getNamespaces( Pattern name ) { - throw new NotImplementedException(); + public void addObserver( PropertyChangeListener listener ) { + super.addObserver( listener ); } - private List getSchemas( long databaseId, Pattern schemaNamePattern ) { - throw new NotImplementedException(); + @Override + public void removeObserver( PropertyChangeListener listener ) { + super.removeObserver( listener ); } @Override - public LogicalNamespace getNamespace( long id ) { + public Snapshot getSnapshot( long namespaceId ) { throw new NotImplementedException(); } @Override - public LogicalNamespace getNamespace( String name ) { + public LogicalDocSnapshot getDocSnapshot( long namespaceId ) { throw new NotImplementedException(); } - private LogicalNamespace getNamespace( long databaseId, String schemaName ) throws UnknownSchemaException { + @Override + public LogicalGraphSnapshot getGraphSnapshot( long namespaceId ) { throw new NotImplementedException(); } @Override - public long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ) { + public LogicalRelSnapshot getRelSnapshot( long namespaceId ) { throw new NotImplementedException(); } @Override - public boolean checkIfExistsNamespace( String name ) { + public PhysicalSnapshot getPhysicalSnapshot() { throw new NotImplementedException(); } @Override - public void renameNamespace( long schemaId, String name ) { + public AllocSnapshot getAllocSnapshot() { throw new NotImplementedException(); } @Override - public void deleteNamespace( long id ) { + public void commit() throws NoTablePrimaryKeyException { throw new NotImplementedException(); } @Override - public CatalogUser getUser( String name ) throws UnknownUserException { + public long addUser( String name, String password ) { throw new NotImplementedException(); } @Override - public CatalogUser getUser( long id ) { + public void rollback() { throw new NotImplementedException(); } @Override - public List getAdapters() { + public void validateColumns() { throw new NotImplementedException(); } @Override - public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { + public void restoreColumnPlacements( Transaction transaction ) { throw new NotImplementedException(); } @Override - public CatalogAdapter getAdapter( long id ) { + public Map getNodeInfo() { throw new NotImplementedException(); } @Override - public boolean checkIfExistsAdapter( long id ) { + public void restoreViews( Transaction transaction ) { throw new NotImplementedException(); } - @Override - public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { + private List getDatabases( Pattern pattern ) { + throw new NotImplementedException(); + } + + + private CatalogDatabase getDatabase( String databaseName ) { + throw new NotImplementedException(); + } + + + private CatalogDatabase getDatabase( long databaseId ) { + throw new NotImplementedException(); + } + + + + private List getSchemas( long databaseId, Pattern schemaNamePattern ) { + throw new NotImplementedException(); + } + + + private LogicalNamespace getNamespace( long databaseId, String schemaName ) throws UnknownSchemaException { throw new NotImplementedException(); } @Override - public void updateAdapterSettings( long adapterId, Map newSettings ) { + public long addNamespace( String name, NamespaceType namespaceType, boolean caseSensitive ) { throw new NotImplementedException(); } + @Override - public void deleteAdapter( long id ) { + public void renameNamespace( long schemaId, String name ) { throw new NotImplementedException(); } @Override - public List getQueryInterfaces() { + public void deleteNamespace( long id ) { throw new NotImplementedException(); } @Override - public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { + public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { throw new NotImplementedException(); } @Override - public CatalogQueryInterface getQueryInterface( long id ) { + public void updateAdapterSettings( long adapterId, Map newSettings ) { throw new NotImplementedException(); } @Override - public long addQueryInterface( String uniqueName, String clazz, Map settings ) { + public void deleteAdapter( long id ) { throw new NotImplementedException(); } @Override - public void deleteQueryInterface( long id ) { + public long addQueryInterface( String uniqueName, String clazz, Map settings ) { throw new NotImplementedException(); } @Override - public void close() { + public void deleteQueryInterface( long id ) { throw new NotImplementedException(); } @Override - public void clear() { + public void close() { throw new NotImplementedException(); } @Override - public List> getPhysicalsOnAdapter( long tableId ) { + public void clear() { throw new NotImplementedException(); } diff --git a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java index a1b70175b8..792edc792f 100644 --- a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java +++ b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java @@ -43,7 +43,7 @@ public long addAdapter( String uniqueName, String clazz, AdapterType type, Map collections; - - ConcurrentHashMap names; @Getter @Serialize public LogicalNamespace logicalNamespace; @@ -67,8 +63,6 @@ public DocumentCatalog( this.logicalNamespace = logicalNamespace; this.collections = new PusherMap<>( collections ); - this.names = new ConcurrentHashMap<>(); - this.collections.addRowConnection( this.names, ( k, v ) -> logicalNamespace.caseSensitive ? v.name : v.name.toLowerCase(), ( k, v ) -> v ); } @@ -83,42 +77,6 @@ public DocumentCatalog copy() { } - @Override - public boolean checkIfExistsEntity( String entityName ) { - return false; - } - - - @Override - public boolean checkIfExistsEntity( long tableId ) { - return false; - } - - - @Override - public LogicalEntity getEntity( String name ) { - return names.get( name ); - } - - - @Override - public LogicalEntity getEntity( long id ) { - return collections.get( id ); - } - - - @Override - public LogicalCollection getCollection( long collectionId ) { - return null; - } - - - @Override - public List getCollections( Pattern namePattern ) { - return null; - } - - @Override public long addCollection( Long id, String name, EntityType entity, boolean modifiable ) { return 0; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java index 65f4824c74..c402b9433b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java @@ -48,8 +48,8 @@ public class FullSnapshot implements Snapshot { @Getter long id; - Map physicalCatalogs; - Map logicalCatalogs; + PhysicalCatalog physicalCatalog; + LogicalCatalog logicalCatalog; Map allocationCatalogs; ImmutableMap namespaceIds; @@ -73,18 +73,18 @@ public class FullSnapshot implements Snapshot { ImmutableMap graphName; - public FullSnapshot( long id, Map logicalCatalogs, Map allocationCatalogs, Map physicalCatalogs ) { + public FullSnapshot( long id, LogicalCatalog logicalCatalog, Map allocationCatalogs, PhysicalCatalog physicalCatalog ) { this.id = id; - this.logicalCatalogs = logicalCatalogs; + this.logicalCatalog = logicalCatalog; this.allocationCatalogs = allocationCatalogs; - this.physicalCatalogs = physicalCatalogs; + this.physicalCatalog = physicalCatalog; namespaceIds = ImmutableMap.copyOf( logicalCatalogs.values().stream().map( LogicalCatalog::getLogicalNamespace ).collect( Collectors.toMap( n -> n.id, n -> n ) ) ); namespaceNames = ImmutableMap.copyOf( namespaceIds.values().stream().collect( Collectors.toMap( n -> n.name, n -> n ) ) ); - tableIds = ImmutableMap.copyOf( logicalCatalogs.values().stream() + tableIds = logicalCatalogs.values().stream() .filter( c -> c.getLogicalNamespace().namespaceType == NamespaceType.RELATIONAL ) - .map( c -> (LogicalRelationalCatalog) c ).flatMap( c -> c. ) ) + .map( c -> (LogicalRelationalCatalog) c ).flatMap( c -> c. ) } From b67a1c2c9d851a20063bd8d110be83777ec3d116 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 8 Mar 2023 15:37:09 +0100 Subject: [PATCH 039/436] moved into separate snapshots per model --- .../db/catalog/snapshot/Snapshot.java | 223 ++++++++++++------ 1 file changed, 149 insertions(+), 74 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index ee76453c9d..bff0b8463c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -17,24 +17,26 @@ package org.polypheny.db.catalog.snapshot; import java.util.List; +import lombok.NonNull; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.operators.OperatorTable; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogQueryInterface; +import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.allocation.AllocationCollection; -import org.polypheny.db.catalog.entity.allocation.AllocationGraph; -import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalCollection; -import org.polypheny.db.catalog.entity.physical.PhysicalGraph; -import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; +import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.nodes.Identifier; @@ -52,17 +54,126 @@ default Expression getSnapshotExpression( long id ) { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getSnapshot", Expressions.constant( id ) ); } - //// NAMESPACES - LogicalNamespace getNamespace( long id ); - - LogicalNamespace getNamespace( String name ); - - List getNamespaces( Pattern name ); + /** + * Get all schemas which fit to the specified filter pattern. + * getNamespaces(xid, null, null) returns all schemas of all databases. + * + * @param name Pattern for the schema name. null returns all. + * @return List of schemas which fit to the specified filter. If there is no schema which meets the criteria, an empty list is returned. + */ + public abstract @NonNull List getNamespaces( Pattern name ); + + /** + * Returns the schema with the specified id. + * + * @param id The id of the schema + * @return The schema + */ + public abstract LogicalNamespace getNamespace( long id ); + + /** + * Returns the schema with the given name in the specified database. + * + * @param name The name of the schema + * @return The schema + */ + public abstract LogicalNamespace getNamespace( String name ); + + + /** + * Checks weather a schema with the specified name exists in a database. + * + * @param name The name of the schema to check + * @return True if there is a schema with this name. False if not. + */ + public abstract boolean checkIfExistsNamespace( String name ); + + + /** + * Get the user with the specified name + * + * @param name The name of the user + * @return The user + * @throws UnknownUserException If there is no user with the specified name + */ + public abstract CatalogUser getUser( String name ) throws UnknownUserException; + + /** + * Get the user with the specified id. + * + * @param id The id of the user + * @return The user + */ + public abstract CatalogUser getUser( long id ); + + /** + * Get list of all adapters + * + * @return List of adapters + */ + public abstract List getAdapters(); + + /** + * Get an adapter by its unique name + * + * @return The adapter + */ + public abstract CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException; + + /** + * Get an adapter by its id + * + * @return The adapter + */ + public abstract CatalogAdapter getAdapter( long id ); + + /** + * Check if an adapter with the given id exists + * + * @param id the id of the adapter + * @return if the adapter exists + */ + public abstract boolean checkIfExistsAdapter( long id ); + + + /* + * Get list of all query interfaces + * + * @return List of query interfaces + */ + public abstract List getQueryInterfaces(); + + /** + * Get a query interface by its unique name + * + * @param uniqueName The unique name of the query interface + * @return The CatalogQueryInterface + */ + public abstract CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException; + + /** + * Get a query interface by its id + * + * @param id The id of the query interface + * @return The CatalogQueryInterface + */ + public abstract CatalogQueryInterface getQueryInterface( long id ); + + + public abstract List> getAllocationsOnAdapter( long id ); + + + public abstract List> getPhysicalsOnAdapter( long adapterId ); + + + public abstract List getIndexes(); + + + public abstract List getTablesForPeriodicProcessing(); //// ENTITIES - CatalogEntity getEntity( long id ); CatalogEntity getEntity( long namespaceId, String name ); @@ -72,62 +183,6 @@ default Expression getSnapshotExpression( long id ) { @Deprecated CatalogEntity getEntity( List names ); - //// LOGICAL ENTITIES - @Deprecated - LogicalTable getLogicalTable( List names ); - - @Deprecated - LogicalCollection getLogicalCollection( List names ); - - @Deprecated - LogicalGraph getLogicalGraph( List names ); - - LogicalTable getLogicalTable( long id ); - - LogicalTable getLogicalTable( long namespaceId, String name ); - - List getLogicalTables( long namespaceId, Pattern name ); - - LogicalColumn getLogicalColumn( long id ); - - LogicalCollection getLogicalCollection( long id ); - - LogicalCollection getLogicalCollection( long namespaceId, String name ); - - List getLogicalCollections( long namespaceId, Pattern name ); - - LogicalGraph getLogicalGraph( long id ); - - LogicalGraph getLogicalGraph( long namespaceId, String name ); - - List getLogicalGraphs( long namespaceId, Pattern name ); - - //// ALLOCATION ENTITIES - - AllocationTable getAllocTable( long id ); - - AllocationCollection getAllocCollection( long id ); - - AllocationGraph getAllocGraph( long id ); - - //// PHYSICAL ENTITIES - - PhysicalTable getPhysicalTable( long id ); - - PhysicalTable getPhysicalTable( long logicalId, long adapterId ); - - PhysicalCollection getPhysicalCollection( long id ); - - PhysicalCollection getPhysicalCollection( long logicalId, long adapterId ); - - - PhysicalGraph getPhysicalGraph( long id ); - - PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ); - - //// LOGISTICS - - boolean isPartitioned( long id ); //// OTHERS @@ -138,10 +193,30 @@ default void lookupOperatorOverloads( Identifier opName, FunctionCategory catego @Override default List getOperatorList() { - return null; + return List.of(); } - LogicalColumn getColumn( long columnId ); + + /** + * Checks if there is a table with the specified name in the specified schema. + * + * @param entityName The name to check for + * @return true if there is a table with this name, false if not. + */ + public abstract boolean checkIfExistsEntity( String entityName ); + + /** + * Checks if there is a table with the specified id. + * + * @param tableId id of the table + * @return true if there is a table with this id, false if not. + */ + public abstract boolean checkIfExistsEntity( long tableId ); + + LogicalNamespace getLogicalNamespace(); + + + LogicalEntity getEntity( String name ); } From f8a6af945fd5f0a72dd0d791528e6379aa7e5fa2 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 8 Mar 2023 21:04:24 +0100 Subject: [PATCH 040/436] removed unused getter methods from relational catalog --- .../db/catalog/logical/RelationalCatalog.java | 257 ------------------ 1 file changed, 257 deletions(-) diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 4e2c5678a6..1f1eab7a84 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -20,14 +20,11 @@ import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.beans.PropertyChangeSupport; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; -import javax.annotation.Nullable; import lombok.Builder; import lombok.Getter; import lombok.Value; @@ -41,31 +38,19 @@ import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; -import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; -import org.polypheny.db.catalog.exceptions.UnknownIndexIdRuntimeException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.IndexType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.type.PolyType; @@ -143,68 +128,12 @@ public RelationalCatalog copy() { } - @Override - public boolean checkIfExistsEntity( String entityName ) { - return false; - } - - - @Override - public boolean checkIfExistsEntity( long tableId ) { - return false; - } - - - @Override - public LogicalEntity getEntity( String name ) { - return names.get( name ); - } - - - @Override - public LogicalEntity getEntity( long id ) { - return tables.get( id ); - } - - @Override public LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ) { return toBuilder().logicalNamespace( namespace ).build(); } - @Override - public List getTables( @Nullable Pattern name ) { - if ( name == null ) { - return List.copyOf( tables.values() ); - } - return tables - .values() - .stream() - .filter( t -> logicalNamespace.caseSensitive ? - t.name.toLowerCase().matches( name.toRegex() ) : - t.name.matches( name.toRegex() ) ).collect( Collectors.toList() ); - } - - - @Override - public LogicalTable getTable( long tableId ) { - return tables.get( tableId ); - } - - - @Override - public LogicalTable getTable( String tableName ) throws UnknownTableException { - return names.get( tableName ); - } - - - @Override - public LogicalTable getTableFromPartition( long partitionId ) { - return null; - } - - @Override public long addTable( String name, EntityType entityType, boolean modifiable ) { long id = idBuilder.getNewEntityId(); @@ -250,68 +179,6 @@ public void setPrimaryKey( long tableId, Long keyId ) { } - @Override - public List getIndexes( CatalogKey key ) { - return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); - } - - - @Override - public List getForeignKeys( CatalogKey key ) { - return indexes.values().stream().filter( i -> i.keyId == key.id ).collect( Collectors.toList() ); - } - - - @Override - public List getIndexes( long tableId, boolean onlyUnique ) { - if ( !onlyUnique ) { - return indexes.values().stream().filter( i -> i.key.tableId == tableId ).collect( Collectors.toList() ); - } else { - return indexes.values().stream().filter( i -> i.key.tableId == tableId && i.unique ).collect( Collectors.toList() ); - } - } - - - @Override - public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { - try { - return indexes.values().stream() - .filter( i -> i.key.tableId == tableId && i.name.equals( indexName ) ) - .findFirst() - .orElseThrow( NullPointerException::new ); - } catch ( NullPointerException e ) { - throw new UnknownIndexException( tableId, indexName ); - } - } - - - @Override - public boolean checkIfExistsIndex( long tableId, String indexName ) { - try { - getIndex( tableId, indexName ); - return true; - } catch ( UnknownIndexException e ) { - return false; - } - } - - - @Override - public CatalogIndex getIndex( long indexId ) { - try { - return Objects.requireNonNull( indexes.get( indexId ) ); - } catch ( NullPointerException e ) { - throw new UnknownIndexIdRuntimeException( indexId ); - } - } - - - @Override - public List getIndexes() { - return new ArrayList<>( indexes.values() ); - } - - @Override public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ) throws GenericCatalogException { long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); @@ -379,52 +246,6 @@ public void deleteIndex( long indexId ) { } - @Override - public List getKeys() { - return null; - } - - - @Override - public List getTableKeys( long tableId ) { - return null; - } - - - @Override - public List getColumns( long tableId ) { - return null; - } - - - @Override - public List getColumns( @Nullable Pattern tableNamePattern, @Nullable Pattern columnNamePattern ) { - List tables = getTables( tableNamePattern ); - if ( columnNamePattern == null ) { - return tables.stream().flatMap( t -> t.columns.stream() ).collect( Collectors.toList() ); - } - return tables.stream().flatMap( t -> t.columns.stream() ).filter( c -> c.name.matches( columnNamePattern.toRegex() ) ).collect( Collectors.toList() ); - } - - - @Override - public LogicalColumn getColumn( long columnId ) { - return columns.get( columnId ); - } - - - @Override - public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { - return tables.get( tableId ).columns.stream().filter( c -> logicalNamespace.isCaseSensitive() ? c.name.equals( columnName ) : c.name.equalsIgnoreCase( columnName ) ).findFirst().orElse( null ); - } - - - @Override - public LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { - return null; - } - - @Override public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { long id = idBuilder.getNewFieldId(); @@ -465,12 +286,6 @@ public void setCollation( long columnId, Collation collation ) { } - @Override - public boolean checkIfExistsColumn( long tableId, String columnName ) { - return false; - } - - @Override public void deleteColumn( long columnId ) { @@ -489,78 +304,12 @@ public void deleteDefaultValue( long columnId ) { } - @Override - public CatalogPrimaryKey getPrimaryKey( long key ) { - return null; - } - - - @Override - public boolean isPrimaryKey( long keyId ) { - return false; - } - - - @Override - public boolean isForeignKey( long keyId ) { - return false; - } - - - @Override - public boolean isIndex( long keyId ) { - return false; - } - - - @Override - public boolean isConstraint( long keyId ) { - return false; - } - - @Override public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { } - @Override - public List getForeignKeys( long tableId ) { - return null; - } - - - @Override - public List getExportedKeys( long tableId ) { - return null; - } - - - @Override - public List getConstraints( long tableId ) { - return null; - } - - - @Override - public List getConstraints( CatalogKey key ) { - return null; - } - - - @Override - public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { - return null; - } - - - @Override - public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { - return null; - } - - @Override public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { @@ -603,12 +352,6 @@ public void updateMaterializedViewRefreshTime( long materializedViewId ) { } - @Override - public List getTablesForPeriodicProcessing() { - return null; - } - - @Override public void flagTableForDeletion( long tableId, boolean flag ) { From dfe715a333cbe4e90452403387afd56ce8844ad4 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 8 Mar 2023 22:59:21 +0100 Subject: [PATCH 041/436] adjusting calls to snapshots --- .../org/polypheny/db/adapter/Adapter.java | 4 +- .../polypheny/db/adapter/AdapterManager.java | 6 +- .../db/adapter/index/IndexManager.java | 8 +- .../common/LogicalConstraintEnforcer.java | 27 +++--- .../org/polypheny/db/catalog/Adapter.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 3 +- .../catalog/catalogs/AllocationCatalog.java | 4 - .../entity/CatalogColumnPlacement.java | 15 +-- .../catalog/entity/CatalogDataPlacement.java | 7 +- .../db/catalog/entity/CatalogForeignKey.java | 7 +- .../db/catalog/entity/CatalogKey.java | 5 +- .../catalog/entity/CatalogPartitionGroup.java | 8 +- .../catalog/entity/logical/LogicalColumn.java | 6 +- .../entity/physical/PhysicalTable.java | 2 +- .../db/catalog/snapshot/AllocSnapshot.java | 3 + .../catalog/snapshot/LogicalRelSnapshot.java | 5 +- .../db/catalog/snapshot/Snapshot.java | 13 +++ .../polypheny/db/docker/DockerInstance.java | 5 +- .../db/iface/QueryInterfaceManager.java | 9 +- .../db/schema/AbstractPolyphenyDbSchema.java | 2 +- .../java/org/polypheny/db/schema/Entity.java | 27 ------ .../java/org/polypheny/db/schema/Schemas.java | 2 +- .../org/polypheny/db/tools/AlgBuilder.java | 9 +- .../polypheny/db/tools/RoutedAlgBuilder.java | 2 +- .../org/polypheny/db/catalog/MockCatalog.java | 2 +- .../db/docker/MockCatalogDocker.java | 2 +- .../java/org/polypheny/db/PolyphenyDb.java | 6 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 14 +-- .../db/transaction/TransactionImpl.java | 33 ++++++- .../statistics/DashboardInformation.java | 9 +- .../db/monitoring/statistics/QueryResult.java | 2 +- .../statistics/StatisticColumn.java | 11 ++- .../statistics/StatisticQueryProcessor.java | 23 ++--- .../statistics/StatisticsManagerImpl.java | 11 +-- .../org/polypheny/db/avatica/DbmsMeta.java | 19 ++-- .../org/polypheny/db/catalog/PolyCatalog.java | 2 +- .../polypheny/db/restapi/RequestParser.java | 2 +- .../java/org/polypheny/db/restapi/Rest.java | 6 +- .../java/org/polypheny/db/webui/Crud.java | 95 ++++++++++--------- .../polypheny/db/webui/crud/LanguageCrud.java | 16 ++-- .../models/requests/BatchUpdateRequest.java | 7 +- 41 files changed, 226 insertions(+), 215 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index dfa17fd5a3..44830a311e 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -483,10 +483,10 @@ public void addInformationPhysicalNames() { ); informationElements.add( physicalColumnNames ); - Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); group.setRefreshFunction( () -> { physicalColumnNames.reset(); - List> physicalsOnAdapter = catalog.getPhysicalsOnAdapter( adapterId ); + List> physicalsOnAdapter = snapshot.getPhysicalsOnAdapter( adapterId ); for ( PhysicalEntity entity : physicalsOnAdapter ) { if ( entity.namespaceType != NamespaceType.RELATIONAL ) { diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index 4dbdd6e1ad..9980c456bf 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -241,10 +241,10 @@ public void removeAdapter( long adapterId ) { if ( adapterInstance == null ) { throw new RuntimeException( "Unknown adapter instance with id: " + adapterId ); } - CatalogAdapter catalogAdapter = Catalog.getInstance().getAdapter( adapterId ); + CatalogAdapter catalogAdapter = Catalog.getInstance().getSnapshot().getAdapter( adapterId ); // Check if the store has any placements - List> placements = Catalog.getInstance().getAllocationsOnAdapter( catalogAdapter.id ); + List> placements = Catalog.getInstance().getAllocSnapshot().getAllocationsOnAdapter( catalogAdapter.id ); if ( placements.size() != 0 ) { throw new RuntimeException( "There is still data placed on this data store" ); } @@ -266,7 +266,7 @@ public void removeAdapter( long adapterId ) { */ public void restoreAdapters() { try { - List adapters = Catalog.getInstance().getAdapters(); + List adapters = Catalog.getInstance().getSnapshot().getAdapters(); for ( CatalogAdapter adapter : adapters ) { Adapter instance = instantiate( adapter.id, adapter.adapterName, adapter.uniqueName, adapter.type, adapter.settings ); adapterByName.put( instance.getUniqueName(), instance ); diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index d40dfc38d2..7ed0ffc1b9 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -149,7 +149,7 @@ public void initialize( final TransactionManager transactionManager ) { public void restoreIndexes() throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, TransactionException { - for ( final CatalogIndex index : Catalog.getInstance().getIndexes() ) { + for ( final CatalogIndex index : Catalog.getInstance().getSnapshot().getIndexes() ) { if ( index.location == 0 ) { addIndex( index ); } @@ -173,15 +173,15 @@ protected void addIndex( final long id, final String name, final CatalogKey key, .filter( it -> it.canProvide( method, unique, persistent ) ) .findFirst() .orElseThrow( IllegalArgumentException::new ); - final LogicalTable table = Catalog.getInstance().getLogicalRel( key.namespaceId ).getTable( key.tableId ); - final CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( key.namespaceId ).getPrimaryKey( table.primaryKey ); + final LogicalTable table = statement.getTransaction().getSnapshot().getRelSnapshot( key.namespaceId ).getTable( key.tableId ); + final CatalogPrimaryKey pk = statement.getTransaction().getSnapshot().getRelSnapshot( key.namespaceId ).getPrimaryKey( table.primaryKey ); final Index index = factory.create( id, name, method, unique, persistent, - Catalog.getInstance().getNamespace( key.namespaceId ), + Catalog.getInstance().getSnapshot().getNamespace( key.namespaceId ), table, key.getColumnNames(), pk.getColumnNames() ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 0ffafa276a..841a91de10 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -44,6 +44,7 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCluster; @@ -93,26 +94,26 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = modify.getCluster().getRexBuilder(); + LogicalRelSnapshot snapshot = Catalog.getInstance().getRelSnapshot( table.namespaceId ); + EnforcementTime enforcementTime = EnforcementTime.ON_QUERY; - final List constraints = new ArrayList<>( Catalog.getInstance().getLogicalRel( table.namespaceId ).getConstraints( table.id ) ) + final List constraints = new ArrayList<>( snapshot.getConstraints( table.id ) ) .stream() .filter( f -> f.key.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); - final List foreignKeys = Catalog.getInstance() - .getLogicalRel( table.namespaceId ) + final List foreignKeys = snapshot .getForeignKeys( table.id ) .stream() .filter( f -> f.enforcementTime == enforcementTime ) .collect( Collectors.toList() ); - final List exportedKeys = Catalog.getInstance() - .getLogicalRel( table.namespaceId ) + final List exportedKeys = snapshot .getExportedKeys( table.id ) .stream() .filter( f -> f.enforcementTime == enforcementTime ) .collect( Collectors.toList() ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); + CatalogPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); if ( pk.enforcementTime == enforcementTime ) { final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); @@ -162,8 +163,8 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { for ( final CatalogForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { builder.clear(); - final LogicalTable scanOptTable = statement.getDataContext().getSnapshot().getLogicalTable( foreignKey.tableId ); - final LogicalTable refOptTable = statement.getDataContext().getSnapshot().getLogicalTable( foreignKey.referencedKeyTableId ); + final LogicalTable scanOptTable = snapshot.getLogicalTable( foreignKey.tableId ); + final LogicalTable refOptTable = snapshot.getLogicalTable( foreignKey.referencedKeyTableId ); final AlgNode scan = LogicalRelScan.create( modify.getCluster(), scanOptTable ); final LogicalRelScan ref = LogicalRelScan.create( modify.getCluster(), refOptTable ); @@ -222,24 +223,24 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = builder.getRexBuilder(); + LogicalRelSnapshot snapshot = Catalog.getInstance().getRelSnapshot( table.namespaceId ); - final List constraints = Catalog.getInstance() - .getLogicalRel( table.namespaceId ) + final List constraints = snapshot .getConstraints( table.id ) .stream() .filter( c -> c.key.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); - final List foreignKeys = Catalog.getInstance().getLogicalRel( table.namespaceId ).getForeignKeys( table.id ) + final List foreignKeys = snapshot.getForeignKeys( table.id ) .stream() .filter( c -> c.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); - final List exportedKeys = Catalog.getInstance().getLogicalRel( table.namespaceId ).getExportedKeys( table.id ) + final List exportedKeys = snapshot.getExportedKeys( table.id ) .stream() .filter( c -> c.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); + CatalogPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); if ( pk.enforcementTime == enforcementTime ) { final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Adapter.java b/core/src/main/java/org/polypheny/db/catalog/Adapter.java index 301fcbd537..470175b7fd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Adapter.java +++ b/core/src/main/java/org/polypheny/db/catalog/Adapter.java @@ -75,7 +75,7 @@ public static void addAdapter( Class clazz, String adapterName, Map clazz, String adapterName ) { - if ( Catalog.getInstance().getAdapters().stream().anyMatch( a -> a.adapterName.equals( adapterName ) ) ) { + if ( Catalog.getInstance().getSnapshot().getAdapters().stream().anyMatch( a -> a.adapterName.equals( adapterName ) ) ) { throw new RuntimeException( "Adapter is still deployed!" ); } REGISTER.remove( getKey( clazz, adapterName ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 5de6e24595..0b94d64dfa 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -243,7 +243,7 @@ protected final boolean isValidIdentifier( final String str ) { public abstract void clear(); - public abstract Snapshot getSnapshot( long namespaceId ); + public abstract Snapshot getSnapshot(); public abstract LogicalDocSnapshot getDocSnapshot( long namespaceId ); @@ -252,7 +252,6 @@ protected final boolean isValidIdentifier( final String str ) { public abstract LogicalRelSnapshot getRelSnapshot( long namespaceId ); - public abstract PhysicalSnapshot getPhysicalSnapshot(); public abstract AllocSnapshot getAllocSnapshot(); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java index ffd165da6f..364256badc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java @@ -16,11 +16,7 @@ package org.polypheny.db.catalog.catalogs; -import java.util.List; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; - public interface AllocationCatalog { - List> getAllocationsOnAdapter( long id ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java index 09fb31f851..11fc0e8566 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java @@ -23,8 +23,6 @@ import lombok.NonNull; import lombok.SneakyThrows; import lombok.Value; -import org.apache.commons.lang.NotImplementedException; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.PlacementType; @@ -72,27 +70,24 @@ public CatalogColumnPlacement( } - @SneakyThrows - public String getLogicalSchemaName() { - throw new NotImplementedException(); - } - @SneakyThrows public String getLogicalTableName() { - return Catalog.getInstance().getLogicalRel( namespaceId ).getTable( tableId ).name; + throw new org.apache.commons.lang3.NotImplementedException(); } @SneakyThrows public String getLogicalColumnName() { - return Catalog.getInstance().getLogicalRel( namespaceId ).getColumn( columnId ).name; + //return Catalog.getInstance().getLogicalRel( namespaceId ).getColumn( columnId ).name; + throw new org.apache.commons.lang3.NotImplementedException(); } @SneakyThrows public String getAdapterUniqueName() { - return Catalog.getInstance().getAdapter( adapterId ).uniqueName; + // return Catalog.getInstance().getAdapter( adapterId ).uniqueName; + throw new org.apache.commons.lang3.NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java index d6f7934d4d..86254bd655 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java @@ -20,19 +20,15 @@ import com.google.common.collect.ImmutableMap; import java.io.Serializable; import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import lombok.NonNull; import lombok.SneakyThrows; import lombok.Value; -import lombok.With; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; import org.apache.commons.lang.NotImplementedException; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PlacementType; @@ -97,7 +93,8 @@ public String getTableName() { @SneakyThrows public String getAdapterName() { - return Catalog.getInstance().getAdapter( adapterId ).uniqueName; + // return Catalog.getInstance().getAdapter( adapterId ).uniqueName; + throw new NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java index cf9f684eb1..a6e2b8979e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java @@ -26,7 +26,6 @@ import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import org.apache.commons.lang.NotImplementedException; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.ForeignKeyOption; @@ -67,13 +66,15 @@ public CatalogForeignKey( @SneakyThrows public String getReferencedKeySchemaName() { - return Catalog.getInstance().getNamespace( referencedKeySchemaId ).name; + // return Catalog.getInstance().getNamespace( referencedKeySchemaId ).name; + throw new NotImplementedException(); } @SneakyThrows public String getReferencedKeyTableName() { - return Catalog.getInstance().getLogicalRel( referencedKeySchemaId ).getTable( referencedKeyTableId ).name; + // return Catalog.getInstance().getLogicalRel( referencedKeySchemaId ).getTable( referencedKeyTableId ).name; + throw new NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java index af639785ee..c58e1f22f0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java @@ -22,12 +22,10 @@ import java.io.Serializable; import java.util.List; import lombok.EqualsAndHashCode; -import lombok.NonNull; import lombok.SneakyThrows; import lombok.Value; import lombok.experimental.NonFinal; import org.apache.commons.lang.NotImplementedException; -import org.polypheny.db.catalog.Catalog; @EqualsAndHashCode @@ -65,7 +63,8 @@ public CatalogKey( @SneakyThrows public String getSchemaName() { - return Catalog.getInstance().getNamespace( namespaceId ).name; + // return Catalog.getInstance().getNamespace( namespaceId ).name; + throw new NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java index 804de5b715..f44ea6f189 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPartitionGroup.java @@ -21,7 +21,7 @@ import java.util.List; import lombok.EqualsAndHashCode; import lombok.SneakyThrows; -import org.polypheny.db.catalog.Catalog; +import org.apache.commons.lang.NotImplementedException; @EqualsAndHashCode @@ -68,14 +68,16 @@ public CatalogPartitionGroup( @SneakyThrows public String getTableName() { - return Catalog.getInstance().getLogicalRel( schemaId ).getTable( tableId ).name; + // return Catalog.getInstance().getLogicalRel( schemaId ).getTable( tableId ).name; + throw new NotImplementedException(); } @SneakyThrows public String getSchemaName() { - return Catalog.getInstance().getNamespace( schemaId ).name; + // return Catalog.getInstance().getNamespace( schemaId ).name; + throw new NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java index e6419a08be..d31487738d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java @@ -25,9 +25,9 @@ import lombok.SneakyThrows; import lombok.Value; import lombok.experimental.NonFinal; +import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.logistic.Collation; @@ -145,13 +145,13 @@ public AlgDataType getAlgDataType( final AlgDataTypeFactory typeFactory ) { @SneakyThrows public String getSchemaName() { - return Catalog.getInstance().getNamespace( namespaceId ).name; + throw new NotImplementedException(); } @SneakyThrows public String getTableName() { - return Catalog.getInstance().getSnapshot( 0 ).getLogicalTable( tableId ).name; + throw new NotImplementedException(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index bb21e4d861..212da38ef8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -78,7 +78,7 @@ public AlgProtoDataType buildProto() { final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); for ( CatalogColumnPlacement placement : placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot( 0 ).getLogicalColumn( placement.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getRelSnapshot( namespaceId ).getLogicalColumn( placement.columnId ); AlgDataType sqlType = logicalColumn.getAlgDataType( typeFactory ); fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index a09c0c4abe..39ff850c28 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -16,6 +16,7 @@ package org.polypheny.db.catalog.snapshot; +import java.util.List; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; public interface AllocSnapshot { @@ -28,6 +29,8 @@ public interface AllocSnapshot { // AllocationGraph getAllocGraph( long id ); + List> getAllocationsOnAdapter( long id ); + AllocationEntity getAllocEntity( long id ); //// LOGISTICS diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 27c66f19f0..4cf93cc914 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -24,6 +24,7 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -292,10 +293,12 @@ public interface LogicalRelSnapshot { LogicalTable getLogicalTable( long id ); - LogicalTable getLogicalTable( long namespaceId, String name ); + LogicalTable getLogicalTable( String name ); List getLogicalTables( long namespaceId, Pattern name ); LogicalColumn getLogicalColumn( long id ); + LogicalNamespace getNamespace( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index bff0b8463c..b18804a9c1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -219,4 +219,17 @@ default List getOperatorList() { LogicalEntity getEntity( String name ); + public abstract LogicalDocSnapshot getDocSnapshot( long namespaceId ); + + public abstract LogicalGraphSnapshot getGraphSnapshot( long namespaceId ); + + + public abstract LogicalRelSnapshot getRelSnapshot( long namespaceId ); + + + public abstract PhysicalSnapshot getPhysicalSnapshot(); + + public abstract AllocSnapshot getAllocSnapshot(); + + } diff --git a/core/src/main/java/org/polypheny/db/docker/DockerInstance.java b/core/src/main/java/org/polypheny/db/docker/DockerInstance.java index 66c2a00ab8..9727541e7c 100644 --- a/core/src/main/java/org/polypheny/db/docker/DockerInstance.java +++ b/core/src/main/java/org/polypheny/db/docker/DockerInstance.java @@ -60,6 +60,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.time.StopWatch; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.ConfigDocker; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.docker.exceptions.NameExistsRuntimeException; @@ -127,7 +128,7 @@ private void updateUsedValues( DockerClient client ) { } ); Map idsToRemove = new HashMap<>(); - Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); outer: for ( com.github.dockerjava.api.model.Container container : client.listContainersCmd().withShowAll( true ).exec() ) {// Docker returns the names with a prefixed "/", so we remove it @@ -150,7 +151,7 @@ private void updateUsedValues( DockerClient client ) { } int adapterId = Integer.parseInt( unparsedAdapterId ); - if ( !catalog.checkIfExistsAdapter( adapterId ) || !catalog.getAdapter( adapterId ).uniqueName.equals( splits[0] ) || isTestContainer || Catalog.resetDocker ) { + if ( !snapshot.checkIfExistsAdapter( adapterId ) || !snapshot.getAdapter( adapterId ).uniqueName.equals( splits[0] ) || isTestContainer || Catalog.resetDocker ) { idsToRemove.put( container.getId(), container.getState().equalsIgnoreCase( "running" ) ); // As we remove this container later we skip the name and port adding continue outer; diff --git a/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java b/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java index ed83036050..02ed05cbc3 100644 --- a/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java +++ b/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java @@ -37,6 +37,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.iface.QueryInterface.QueryInterfaceSetting; import org.polypheny.db.transaction.TransactionManager; @@ -88,7 +89,7 @@ public static void addInterfaceType( String interfaceName, Class clazz ) { - for ( CatalogQueryInterface queryInterface : Catalog.getInstance().getQueryInterfaces() ) { + for ( CatalogQueryInterface queryInterface : Catalog.getInstance().getSnapshot().getQueryInterfaces() ) { if ( queryInterface.clazz.equals( clazz.getName() ) ) { throw new RuntimeException( "Cannot remove the interface type, there is still a interface active." ); } @@ -129,9 +130,9 @@ public List getAvailableQueryInterfaceTypes() { /** * Restores query interfaces from catalog */ - public void restoreInterfaces( Catalog catalog ) { + public void restoreInterfaces( Snapshot snapshot ) { try { - List interfaces = catalog.getQueryInterfaces(); + List interfaces = snapshot.getQueryInterfaces(); for ( CatalogQueryInterface iface : interfaces ) { String[] split = iface.clazz.split( "\\$" ); split = split[split.length - 1].split( "\\." ); @@ -205,7 +206,7 @@ public void removeQueryInterface( Catalog catalog, String uniqueName ) throws Un if ( !interfaceByName.containsKey( uniqueName ) ) { throw new RuntimeException( "Unknown query interface: " + uniqueName ); } - CatalogQueryInterface catalogQueryInterface = catalog.getQueryInterface( uniqueName ); + CatalogQueryInterface catalogQueryInterface = catalog.getSnapshot().getQueryInterface( uniqueName ); // Shutdown interface interfaceByName.get( uniqueName ).shutdown(); diff --git a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java index ca0ed3da09..453c8b32c8 100644 --- a/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/AbstractPolyphenyDbSchema.java @@ -83,7 +83,7 @@ public AbstractPolyphenyDbSchema( * Creates a root schema. */ public static Snapshot createSnapshot() { - return Catalog.getInstance().getSnapshot( 0 ); + return Catalog.getInstance().getSnapshot(); } diff --git a/core/src/main/java/org/polypheny/db/schema/Entity.java b/core/src/main/java/org/polypheny/db/schema/Entity.java index 6f27987b91..d7207455fb 100644 --- a/core/src/main/java/org/polypheny/db/schema/Entity.java +++ b/core/src/main/java/org/polypheny/db/schema/Entity.java @@ -35,9 +35,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.nodes.Call; import org.polypheny.db.nodes.Node; @@ -88,34 +85,10 @@ default AlgDataTypeFactory getTypeFactory() { */ Long getId(); - @Deprecated // whole entity might get replaced - default CatalogEntity getCatalogEntity() { - if ( getId() == null ) { - return null; - } - switch ( getNamespaceType() ) { - case RELATIONAL: - return Catalog.getInstance().getLogicalRel( -1 ).getTable( getId() ); - case DOCUMENT: - return Catalog.getInstance().getLogicalDoc( -1 ).getCollection( getId() ); - case GRAPH: - return Catalog.getInstance().getLogicalGraph( -1 ).getGraph( getId() ); - } - return null; - } - Long getPartitionId(); Long getAdapterId(); - @Deprecated // whole entity might get replaced - default CatalogPartitionPlacement getPartitionPlacement() { - if ( getAdapterId() == null || getPartitionId() == null ) { - return null; - } - return Catalog.getInstance().getAllocRel( -1 ).getPartitionPlacement( Math.toIntExact( getAdapterId() ), getPartitionId() ); - } - /** * Type of table. */ diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index f94994371f..40d2d793dc 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -206,7 +206,7 @@ public static Queryable queryable( DataContext root, Class clazz, Iter */ public static Queryable queryable( DataContext root, Snapshot snapshot, Class clazz, String tableName ) { //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); - LogicalTable table = snapshot.getLogicalTable( List.of( tableName ) ); + LogicalTable table = snapshot.getRelSnapshot( 0 ).getLogicalTable( List.of( tableName ) ); return table.unwrap( QueryableEntity.class ).asQueryable( root, snapshot, table.id ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index b5881132e0..3f35fc213b 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -102,8 +102,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.StructKind; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -1327,9 +1327,10 @@ public RexNode patternExclude( RexNode node ) { * * @param tableNames Name of table (can optionally be qualified) */ - public AlgBuilder scan( Iterable tableNames ) { + public AlgBuilder scan( List tableNames ) { final List names = ImmutableList.copyOf( tableNames ); - final LogicalTable entity = snapshot.getLogicalTable( names ); + LogicalNamespace namespace = snapshot.getNamespace( tableNames.get( 0 ) ); + final LogicalTable entity = snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ); if ( entity == null ) { throw RESOURCE.tableNotFound( String.join( ".", names ) ).ex(); } @@ -1375,7 +1376,7 @@ public AlgBuilder documentProject( List projects, List> tupleList, @Override - public RoutedAlgBuilder scan( Iterable tableNames ) { + public RoutedAlgBuilder scan( List tableNames ) { super.scan( tableNames ); return this; } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 43ae377734..af9f96e0b5 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -127,7 +127,7 @@ public void removeObserver( PropertyChangeListener listener ) { @Override - public Snapshot getSnapshot( long namespaceId ) { + public Snapshot getSnapshot() { throw new NotImplementedException(); } diff --git a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java index 792edc792f..2fe662cfac 100644 --- a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java +++ b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java @@ -43,7 +43,7 @@ public long addAdapter( String uniqueName, String clazz, AdapterType type, Map stores, boolean onl } LogicalGraph graph = catalog.getLogicalGraph( graphId ).getGraph( graphId ); - Catalog.getInstance().getSnapshot( 0 ); + Catalog.getInstance().getSnapshot(); List preExistingPlacements = graph.placements .stream() @@ -1937,7 +1937,7 @@ public void removeGraphDatabasePlacement( long graphId, DataStore store, Stateme catalog.getAllocGraph( graphId ).deleteGraphPlacement( store.getAdapterId(), graphId ); - Catalog.getInstance().getSnapshot( 0 ); + Catalog.getInstance().getSnapshot(); } @@ -2199,7 +2199,7 @@ public void createTable( long namespaceId, String name, List f LogicalTable catalogTable = catalog.getLogicalRel( namespaceId ).getTable( tableId ); // Trigger rebuild of schema; triggers schema creation on adapters - Catalog.getInstance().getSnapshot( 0 ); + Catalog.getInstance().getSnapshot(); for ( DataStore store : stores ) { catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( @@ -2250,7 +2250,7 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists LogicalCollection catalogCollection = catalog.getLogicalDoc( namespaceId ).getCollection( collectionId ); // Trigger rebuild of schema; triggers schema creation on adapters - Catalog.getInstance().getSnapshot( 0 ); + Catalog.getInstance().getSnapshot(); for ( DataStore store : stores ) { catalog.getAllocDoc( namespaceId ).addCollectionPlacement( @@ -2311,7 +2311,7 @@ public void addCollectionPlacement( long namespaceId, String name, List { @Override public Snapshot getSnapshot() { - return Catalog.getInstance().getSnapshot( getId() ); + return Catalog.getInstance().getSnapshot(); + } + + + public LogicalRelSnapshot getRelSnapshot() { + return Catalog.getInstance().getRelSnapshot(); + } + + + public LogicalGraphSnapshot getGraphSnapshot() { + return Catalog.getInstance().getGraphSnapshot(); + } + + + public LogicalDocSnapshot getDocSnapshot() { + return Catalog.getInstance().getDocSnapshot(); + } + + + public PhysicalSnapshot getPhysicalSnapshot() { + return Catalog.getInstance().getPhysicalSnapshot(); + } + + + public AllocSnapshot getAllocSnapshot() { + return Catalog.getInstance().getAllocSnapshot(); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java index d48adc2a89..afc62d774c 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/DashboardInformation.java @@ -23,6 +23,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.monitoring.core.MonitoringServiceProvider; import org.polypheny.db.monitoring.events.metrics.DmlDataPoint; import org.polypheny.db.monitoring.events.metrics.QueryDataPointImpl; @@ -73,17 +74,17 @@ public DashboardInformation() { public void updatePolyphenyStatistic() { - Catalog catalog = Catalog.getInstance(); - this.catalogPersistent = catalog.isPersistent; + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + this.catalogPersistent = Catalog.getInstance().isPersistent; this.numberOfQueries = MonitoringServiceProvider.getInstance().getAllDataPoints( QueryDataPointImpl.class ).size(); this.numberOfWorkloads = MonitoringServiceProvider.getInstance().getAllDataPoints( DmlDataPoint.class ).size(); this.numberOfPendingEvents = MonitoringServiceProvider.getInstance().getNumberOfElementsInQueue(); - catalog.getAdapters().forEach( v -> { + snapshot.getAdapters().forEach( v -> { this.availableAdapter.put( v.uniqueName, Pair.of( v.adapterTypeName, v.type ) ); } ); - catalog.getNamespaces( null ).forEach( v -> { + snapshot.getNamespaces( null ).forEach( v -> { availableSchemas.put( v.id, Pair.of( v.name, v.namespaceType ) ); } ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java index 6907f4e79a..f6ada03c13 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java @@ -42,7 +42,7 @@ class QueryResult { public static QueryResult fromCatalogColumn( LogicalColumn column ) { - return new QueryResult( Catalog.getInstance().getLogicalRel( column.namespaceId ).getTable( column.tableId ), column ); + return new QueryResult( Catalog.getInstance().getRelSnapshot( column.namespaceId ).getTable( column.tableId ), column ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java index ee60e8592a..8e2d0a2e53 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java @@ -24,6 +24,7 @@ import lombok.Getter; import lombok.Setter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.type.PolyType; @@ -85,11 +86,11 @@ public StatisticColumn( long schemaId, long tableId, long columnId, PolyType typ this.type = type; this.columnType = columnType; - Catalog catalog = Catalog.getInstance(); - if ( catalog.getLogicalEntity( tableId ) != null ) { - this.schema = catalog.getNamespace( schemaId ).name; - this.table = catalog.getLogicalRel( schemaId ).getTable( tableId ).name; - this.column = catalog.getLogicalRel( schemaId ).getColumn( columnId ).name; + LogicalRelSnapshot snapshot = Catalog.getInstance().getRelSnapshot( schemaId ); + if ( snapshot.getLogicalTable( tableId ) != null ) { + this.schema = snapshot.getNamespace( schemaId ).name; + this.table = snapshot.getTable( tableId ).name; + this.column = snapshot.getColumn( columnId ).name; } this.qualifiedColumnName = String.format( "%s.%s.%s", this.schema, this.table, this.column ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 9cd702bc72..ddfff66a06 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -34,6 +34,7 @@ import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.Authenticator; import org.polypheny.db.transaction.Statement; @@ -86,13 +87,13 @@ public StatisticQueryResult selectOneColumnStat( AlgNode node, Transaction trans * Method to get all schemas, tables, and their columns in a database */ public List> getSchemaTree() { - Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); List> result = new ArrayList<>(); List schemaTree = new ArrayList<>(); - List schemas = catalog.getNamespaces( null ); + List schemas = snapshot.getNamespaces( null ); for ( LogicalNamespace schema : schemas ) { List tables = new ArrayList<>(); - List childTables = catalog.getLogicalRel( schema.id ).getTables( null ); + List childTables = snapshot.getRelSnapshot( schema.id ).getTables( null ); for ( LogicalTable childTable : childTables ) { List table = new ArrayList<>(); for ( LogicalColumn logicalColumn : childTable.columns ) { @@ -115,11 +116,11 @@ public List> getSchemaTree() { * @return all the columns */ public List getAllColumns() { - Catalog catalog = Catalog.getInstance(); - return catalog.getNamespaces( null ) + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + return snapshot.getNamespaces( null ) .stream() .filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( n -> catalog.getLogicalRel( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> t.columns.stream() ) ) + .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> t.columns.stream() ) ) .map( QueryResult::fromCatalogColumn ) .collect( Collectors.toList() ); } @@ -131,9 +132,9 @@ public List getAllColumns() { * @return all the tables ids */ public List getAllTable() { - Catalog catalog = Catalog.getInstance(); - return catalog.getNamespaces( null ).stream().filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( n -> catalog.getLogicalRel( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ) ).collect( Collectors.toList() ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + return snapshot.getNamespaces( null ).stream().filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) + .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ) ).collect( Collectors.toList() ); } @@ -143,8 +144,8 @@ public List getAllTable() { * @return all columns */ public List getAllColumns( Long tableId ) { - Catalog catalog = Catalog.getInstance(); - return catalog.getNamespaces( null ).stream().flatMap( n -> catalog.getLogicalRel( n.id ).getTable( tableId ).columns.stream() ).map( QueryResult::fromCatalogColumn ).collect( Collectors.toList() ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + return snapshot.getNamespaces( null ).stream().flatMap( n -> snapshot.getRelSnapshot( n.id ).getTable( tableId ).columns.stream() ).map( QueryResult::fromCatalogColumn ).collect( Collectors.toList() ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index dc6644e09f..ce1deed7e5 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -59,9 +59,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.Config; @@ -194,7 +191,7 @@ public void updateSchemaName( LogicalNamespace logicalNamespace, String newName private Transaction getTransaction() { Transaction transaction; - transaction = statisticQueryInterface.getTransactionManager().startTransaction( Catalog.getInstance().getUser( Catalog.defaultUserId ), Catalog.getInstance().getNamespace( 0 ), false, "Statistic Manager" ); + transaction = statisticQueryInterface.getTransactionManager().startTransaction( Catalog.getInstance().getSnapshot().getUser( Catalog.defaultUserId ), Catalog.getInstance().getSnapshot().getNamespace( 0 ), false, "Statistic Manager" ); return transaction; } @@ -238,7 +235,7 @@ public void restart( Config c ) { private void resetAllIsFull() { this.statisticSchemaMap.values().forEach( s -> s.values().forEach( t -> t.values().forEach( c -> { - assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getSnapshot( 0 ).getColumn( c.getColumnId() ) ), NodeType.UNIQUE_VALUE ) ); + assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getSnapshot().getRelSnapshot( c.getSchemaId() ).getColumn( c.getColumnId() ) ), NodeType.UNIQUE_VALUE ) ); } ) ) ); } @@ -554,7 +551,7 @@ private AlgNode getQueryNode( QueryResult queryResult, NodeType nodeType ) { * Gets a tableScan for a given table. */ private LogicalRelScan getLogicalScan( long tableId, Snapshot snapshot, AlgOptCluster cluster ) { - return LogicalRelScan.create( cluster, snapshot.getLogicalTable( tableId ) ); + return LogicalRelScan.create( cluster, snapshot.getEntity( tableId ) ); } @@ -1189,7 +1186,7 @@ public > Object getTableStatistic( long schemaId, long t } else if ( v.getType().getFamily() == PolyTypeFamily.CHARACTER ) { alphabeticInfo.add( (AlphabeticStatisticColumn) v ); statisticTable.setAlphabeticColumn( alphabeticInfo ); - } else if ( PolyType.DATETIME_TYPES.contains( Catalog.getInstance().getSnapshot( 0 ).getColumn( k ).type ) ) { + } else if ( PolyType.DATETIME_TYPES.contains( Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ).getColumn( k ).type ) ) { temporalInfo.add( (TemporalStatisticColumn) v ); statisticTable.setTemporalColumn( temporalInfo ); } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 3d73a3f26c..5523d906cf 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -303,12 +303,12 @@ private List getLogicalTables( Pat schemaPattern, Pat tablePattern @NotNull private List getLogicalTables( Pattern schemaPattern, Pattern tablePattern ) { - List namespaces = catalog.getNamespaces( schemaPattern ); + List namespaces = catalog.getSnapshot().getNamespaces( schemaPattern ); return namespaces .stream() .flatMap( - n -> catalog.getLogicalRel( n.id ).getTables( tablePattern ).stream() ).collect( Collectors.toList() ); + n -> catalog.getRelSnapshot( n.id ).getTables( tablePattern ).stream() ).collect( Collectors.toList() ); } @@ -319,7 +319,7 @@ public MetaResultSet getColumns( final ConnectionHandle ch, final String databas if ( log.isTraceEnabled() ) { log.trace( "getColumns( ConnectionHandle {}, String {}, Pat {}, Pat {}, Pat {} )", ch, database, schemaPattern, tablePattern, columnPattern ); } - final List columns = getLogicalTables( schemaPattern, tablePattern ).stream().flatMap( t -> catalog.getLogicalRel( t.namespaceId ).getColumns( + final List columns = getLogicalTables( schemaPattern, tablePattern ).stream().flatMap( t -> catalog.getRelSnapshot( t.namespaceId ).getColumns( (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ), (columnPattern == null || columnPattern.s == null) ? null : new Pattern( columnPattern.s ) ).stream() ).collect( Collectors.toList() ); @@ -362,7 +362,7 @@ public MetaResultSet getSchemas( final ConnectionHandle ch, final String databas if ( log.isTraceEnabled() ) { log.trace( "getNamespaces( ConnectionHandle {}, String {}, Pat {} )", ch, database, schemaPattern ); } - final List schemas = catalog.getNamespaces( + final List schemas = catalog.getSnapshot().getNamespaces( (schemaPattern == null || schemaPattern.s == null) ? null : new Pattern( schemaPattern.s ) ); StatementHandle statementHandle = createStatement( ch ); @@ -532,7 +532,7 @@ public MetaResultSet getPrimaryKeys( final ConnectionHandle ch, final String dat List primaryKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.primaryKey != null ) { - final CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + final CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); primaryKeyColumns.addAll( primaryKey.getCatalogPrimaryKeyColumns() ); } } @@ -568,7 +568,7 @@ public MetaResultSet getImportedKeys( final ConnectionHandle ch, final String da final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List importedKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); + List importedKeys = catalog.getRelSnapshot( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); importedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -611,7 +611,7 @@ public MetaResultSet getExportedKeys( final ConnectionHandle ch, final String da final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List exportedKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getExportedKeys( catalogTable.id ); + List exportedKeys = catalog.getRelSnapshot( catalogTable.namespaceId ).getExportedKeys( catalogTable.id ); exportedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -727,7 +727,7 @@ public MetaResultSet getIndexInfo( final ConnectionHandle ch, final String datab final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List catalogIndexColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List catalogIndexInfos = catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, unique ); + List catalogIndexInfos = catalog.getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, unique ); catalogIndexInfos.forEach( info -> catalogIndexColumns.addAll( info.getCatalogIndexColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -1420,8 +1420,7 @@ public void openConnection( final ConnectionHandle ch, final Map // Authorizer.hasAccess( user, database ); // Check schema access - final LogicalNamespace schema; - schema = catalog.getNamespace( defaultSchemaName ); + final LogicalNamespace schema = catalog.getSnapshot().getNamespace( defaultSchemaName ); assert schema != null; // Authorizer.hasAccess( user, schema ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index ad9f8ed239..f631f6ff58 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -636,7 +636,7 @@ public void clear() { @Override - public Snapshot getSnapshot( long namespaceId ) { + public Snapshot getSnapshot() { return new FullSnapshot( idBuilder.getNewSnapshotId(), logicalCatalogs.get( namespaceId ), allocationCatalogs, physicalCatalogs.get( namespaceId ) ); } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 2da5d927c3..812bb4f787 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -362,7 +362,7 @@ List generateRequestColumnsWithProject( String projectionString, Set notYetAdded = new HashSet<>( validColumns ); notYetAdded.removeAll( projectedColumns ); for ( long columnId : notYetAdded ) { - LogicalColumn column = this.catalog.getSnapshot( 0 ).getColumn( columnId ); + LogicalColumn column = this.catalog.getSnapshot().getColumn( columnId ); int calculatedPosition = tableOffsets.get( column.tableId ) + column.position - 1; RequestColumn requestColumn = new RequestColumn( column, calculatedPosition, calculatedPosition, null, null, false ); columns.add( requestColumn ); diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 9baf59aa1f..74322d22a7 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -179,7 +179,7 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi // Table Modify AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot( 0 ) ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot() ); // Values AlgDataType tableRowType = table.getRowType(); @@ -236,7 +236,7 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, // Table Modify AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot( 0 ) ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot() ); AlgNode algNode = algBuilder.build(); RelModify modify = new LogicalRelModify( @@ -293,7 +293,7 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final // List valueColumnNames = this.valuesColumnNames( updateResourceRequest.values ); AlgOptPlanner planner = statement.getQueryProcessor().getPlanner(); - AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot( 0 ) ); + AlgOptCluster cluster = AlgOptCluster.create( planner, rexBuilder, null, Catalog.getInstance().getSnapshot() ); List valueColumnNames = this.valuesColumnNames( insertValueRequest.values ); List rexValues = this.valuesNode( statement, algBuilder, rexBuilder, insertValueRequest, tableRows, inputStreams ).get( 0 ); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index d3591e7850..bd5cbca23a 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -302,7 +302,7 @@ Result getTable( final UIRequest request ) { // determine if it is a view or a table LogicalTable catalogTable; try { - catalogTable = catalog.getLogicalRel( catalog.getNamespace( t[0] ).id ).getTable( t[1] ); + catalogTable = catalog.getRelSnapshot( catalog.getSnapshot().getNamespace( t[0] ).id ).getTable( t[1] ); result.setNamespaceType( catalogTable.getNamespaceType() ); if ( catalogTable.modifiable ) { result.setType( ResultType.TABLE ); @@ -318,12 +318,12 @@ Result getTable( final UIRequest request ) { ArrayList cols = new ArrayList<>(); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); } - for ( LogicalColumn logicalColumn : catalog.getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : catalog.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( @@ -374,7 +374,7 @@ void getSchemaTree( final Context ctx ) { ctx.json( new ArrayList<>() ); } - List schemas = catalog.getNamespaces( null ); + List schemas = catalog.getSnapshot().getNamespaces( null ); // remove unwanted namespaces schemas = schemas.stream().filter( s -> request.dataModels.contains( s.namespaceType ) ).collect( Collectors.toList() ); for ( LogicalNamespace schema : schemas ) { @@ -384,7 +384,7 @@ void getSchemaTree( final Context ctx ) { ArrayList tableTree = new ArrayList<>(); ArrayList viewTree = new ArrayList<>(); ArrayList collectionTree = new ArrayList<>(); - List tables = catalog.getLogicalRel( schema.id ).getTables( null ); + List tables = catalog.getRelSnapshot( schema.id ).getTables( null ); for ( LogicalTable table : tables ) { String icon = "fa fa-table"; if ( table.entityType == EntityType.SOURCE ) { @@ -398,7 +398,7 @@ void getSchemaTree( final Context ctx ) { SidebarElement tableElement = new SidebarElement( schema.name + "." + table.name, table.name, schema.namespaceType, request.routerLinkRoot, icon ); if ( request.depth > 2 ) { - List columns = catalog.getLogicalRel( table.namespaceId ).getColumns( table.id ); + List columns = catalog.getRelSnapshot( table.namespaceId ).getColumns( table.id ); for ( LogicalColumn column : columns ) { tableElement.addChild( new SidebarElement( schema.name + "." + table.name + "." + column.name, column.name, schema.namespaceType, request.routerLinkRoot, icon ).setCssClass( "sidebarColumn" ) ); } @@ -458,7 +458,7 @@ void getTables( final Context ctx ) { if ( request.schema != null ) { requestedSchema = request.schema; } else { - requestedSchema = catalog.getNamespace( schemaId ).name; + requestedSchema = catalog.getSnapshot().getNamespace( schemaId ).name; } try { @@ -471,7 +471,7 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getLogicalRel( schemaId ).getTables( null ); + List tables = catalog.getRelSnapshot( schemaId ).getTables( null ); ArrayList result = new ArrayList<>(); for ( LogicalTable t : tables ) { result.add( new DbTable( t.name, t.getNamespaceName(), t.modifiable, t.entityType ) ); @@ -657,7 +657,7 @@ void insertRow( final Context ctx ) { StringJoiner columns = new StringJoiner( ",", "(", ")" ); StringJoiner values = new StringJoiner( ",", "(", ")" ); - List logicalColumns = catalog.getLogicalRel( catalog.getLogicalEntity( tableId ).namespaceId ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getRelSnapshot( catalog.getLogicalEntity( tableId ).namespaceId ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); try { int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -949,9 +949,9 @@ private String computeWherePK( final String tableName, final String columnName, Map catalogColumns = getCatalogColumns( tableName, columnName ); LogicalTable catalogTable; catalogTable = catalog.getLogicalEntity( tableName ).unwrap( LogicalTable.class ); - CatalogPrimaryKey pk = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey pk = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( long colId : pk.columnIds ) { - String colName = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( colId ).name; + String colName = catalog.getRelSnapshot( catalogTable.namespaceId ).getColumn( colId ).name; String condition; if ( filter.containsKey( colName ) ) { String val = filter.get( colName ); @@ -1021,9 +1021,9 @@ void updateRow( final Context ctx ) throws ServletException, IOException { Statement statement = transaction.createStatement(); StringJoiner setStatements = new StringJoiner( ",", "", "" ); - LogicalNamespace namespace = catalog.getNamespace( split[0] ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); - List logicalColumns = catalog.getLogicalRel( namespace.id ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getRelSnapshot( namespace.id ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -1120,16 +1120,16 @@ void getColumns( final Context ctx ) { ArrayList cols = new ArrayList<>(); try { - LogicalNamespace namespace = catalog.getNamespace( t[0] ); - LogicalTable catalogTable = catalog.getLogicalRel( namespace.id ).getTable( t[1] ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); + LogicalTable catalogTable = catalog.getRelSnapshot( namespace.id ).getTable( t[1] ); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getLogicalRel( namespace.id ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( namespace.id ).getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); } - for ( LogicalColumn logicalColumn : catalog.getLogicalRel( namespace.id ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : catalog.getRelSnapshot( namespace.id ).getColumns( catalogTable.id ) ) { String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( @@ -1166,8 +1166,8 @@ void getColumns( final Context ctx ) { void getDataSourceColumns( final Context ctx ) throws UnknownTableException, UnknownSchemaException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalNamespace namespace = catalog.getNamespace( request.getSchemaName() ); - LogicalTable catalogTable = catalog.getLogicalRel( namespace.id ).getTable( request.getTableName() ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( request.getSchemaName() ); + LogicalTable catalogTable = catalog.getRelSnapshot( namespace.id ).getTable( request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { ImmutableMap> underlyingTable = ((CatalogView) catalogTable).getUnderlyingTables(); @@ -1196,11 +1196,11 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk } long adapterId = allocs.get( 0 ).adapterId; - CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); List pkColumnNames = primaryKey.getColumnNames(); List columns = new ArrayList<>(); for ( CatalogColumnPlacement ccp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { - LogicalColumn col = catalog.getLogicalRel( namespace.id ).getColumn( ccp.columnId ); + LogicalColumn col = catalog.getRelSnapshot( namespace.id ).getColumn( ccp.columnId ); columns.add( new DbColumn( col.name, col.type.getName(), @@ -1225,8 +1225,8 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk void getAvailableSourceColumns( final Context ctx ) throws UnknownTableException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalNamespace namespace = catalog.getNamespace( request.getSchemaName() ); - LogicalTable table = catalog.getLogicalRel( namespace.id ).getTable( request.getTableName() ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( request.getSchemaName() ); + LogicalTable table = catalog.getRelSnapshot( namespace.id ).getTable( request.getTableName() ); ImmutableMap> placements = catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ); Set adapterIds = placements.keySet(); if ( adapterIds.size() > 1 ) { @@ -1298,9 +1298,9 @@ void getMaterializedInfo( final Context ctx ) throws UnknownTableException, Unkn private LogicalTable getLogicalTable( String schema, String table ) throws UnknownTableException { - LogicalNamespace namespace = catalog.getNamespace( schema ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schema ); - return catalog.getLogicalRel( namespace.id ).getTable( table ); + return catalog.getRelSnapshot( namespace.id ).getTable( table ); } @@ -1604,7 +1604,7 @@ void getConstraints( final Context ctx ) { // get primary key if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : primaryKey.getColumnNames() ) { if ( !temp.containsKey( "" ) ) { temp.put( "", new ArrayList<>() ); @@ -1618,7 +1618,7 @@ void getConstraints( final Context ctx ) { // get unique constraints. temp.clear(); - List constraints = catalog.getLogicalRel( catalogTable.namespaceId ).getConstraints( catalogTable.id ); + List constraints = catalog.getRelSnapshot( catalogTable.namespaceId ).getConstraints( catalogTable.id ); for ( CatalogConstraint catalogConstraint : constraints ) { if ( catalogConstraint.type == ConstraintType.UNIQUE ) { temp.put( catalogConstraint.name, new ArrayList<>( catalogConstraint.key.getColumnNames() ) ); @@ -1759,7 +1759,7 @@ void getIndexes( final Context ctx ) { Result result; try { LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); - List catalogIndexes = catalog.getLogicalRel( catalogTable.id ).getIndexes( catalogTable.id, false ); + List catalogIndexes = catalog.getRelSnapshot( catalogTable.id ).getIndexes( catalogTable.id, false ); DbColumn[] header = { new DbColumn( "Name" ), @@ -1778,7 +1778,7 @@ void getIndexes( final Context ctx ) { // a polystore index storeUniqueName = "Polypheny-DB"; } else { - storeUniqueName = catalog.getAdapter( catalogIndex.location ).uniqueName; + storeUniqueName = catalog.getSnapshot().getAdapter( catalogIndex.location ).uniqueName; } arr[0] = catalogIndex.name; arr[1] = String.join( ", ", catalogIndex.key.getColumnNames() ); @@ -1894,9 +1894,9 @@ void getUnderlyingTable( final Context ctx ) throws UnknownTableException { for ( Entry> entry : underlyingTableOriginal.entrySet() ) { List columns = new ArrayList<>(); for ( Long ids : entry.getValue() ) { - columns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( ids ).name ); + columns.add( catalog.getRelSnapshot( catalogTable.namespaceId ).getColumn( ids ).name ); } - underlyingTable.put( catalog.getLogicalRel( catalogTable.namespaceId ).getTable( entry.getKey() ).name, columns ); + underlyingTable.put( catalog.getRelSnapshot( catalogTable.namespaceId ).getTable( entry.getKey() ).name, columns ); } ctx.json( new UnderlyingTables( underlyingTable ) ); } else { @@ -1925,8 +1925,8 @@ private Placement getPlacements( final Index index ) { return p; } else { long pkid = table.primaryKey; - List pkColumnIds = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + List pkColumnIds = Catalog.getInstance().getRelSnapshot( table.namespaceId ).getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = Catalog.getInstance().getRelSnapshot( table.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); List pkPlacements = catalog.getAllocRel( table.namespaceId ).getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement placement : pkPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); @@ -2059,9 +2059,9 @@ void getPartitionFunctionModel( final Context ctx ) throws UnknownColumnExceptio // Check whether the selected partition function supports the selected partition column LogicalColumn partitionColumn; - LogicalNamespace namespace = Catalog.getInstance().getNamespace( request.schemaName ); + LogicalNamespace namespace = Catalog.getInstance().getSnapshot().getNamespace( request.schemaName ); - partitionColumn = Catalog.getInstance().getLogicalRel( namespace.id ).getColumn( request.tableName, request.column ); + partitionColumn = Catalog.getInstance().getRelSnapshot( namespace.id ).getColumn( request.tableName, request.column ); if ( !partitionManager.supportsColumnOfType( partitionColumn.type ) ) { ctx.json( new PartitionFunctionModel( "The partition function " + request.method + " does not support columns of type " + partitionColumn.type ) ); @@ -2512,15 +2512,15 @@ void getUml( final Context ctx ) { ArrayList fKeys = new ArrayList<>(); ArrayList tables = new ArrayList<>(); - List catalogEntities = Catalog.getInstance().getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( request.schema ) ) + List catalogEntities = Catalog.getInstance().getSnapshot().getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( request.schema ) ) .stream() .filter( s -> s.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( s -> catalog.getLogicalRel( s.id ).getTables( null ).stream() ).collect( Collectors.toList() ); + .flatMap( s -> catalog.getRelSnapshot( s.id ).getTables( null ).stream() ).collect( Collectors.toList() ); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { // get foreign keys - List foreignKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); + List foreignKeys = catalog.getRelSnapshot( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); for ( CatalogForeignKey catalogForeignKey : foreignKeys ) { for ( int i = 0; i < catalogForeignKey.getReferencedKeyColumnNames().size(); i++ ) { fKeys.add( ForeignKey.builder() @@ -2545,14 +2545,14 @@ void getUml( final Context ctx ) { // get primary key with its columns if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey catalogPrimaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey catalogPrimaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : catalogPrimaryKey.getColumnNames() ) { table.addPrimaryKeyField( columnName ); } } // get unique constraints - List catalogConstraints = catalog.getLogicalRel( catalogTable.namespaceId ).getConstraints( catalogTable.id ); + List catalogConstraints = catalog.getRelSnapshot( catalogTable.namespaceId ).getConstraints( catalogTable.id ); for ( CatalogConstraint catalogConstraint : catalogConstraints ) { if ( catalogConstraint.type == ConstraintType.UNIQUE ) { // TODO: unique constraints can be over multiple columns. @@ -2566,7 +2566,7 @@ void getUml( final Context ctx ) { } // get unique indexes - List catalogIndexes = catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, true ); + List catalogIndexes = catalog.getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, true ); for ( CatalogIndex catalogIndex : catalogIndexes ) { // TODO: unique indexes can be over multiple columns. if ( catalogIndex.key.getColumnNames().size() == 1 && @@ -2898,7 +2898,7 @@ void schemaRequest( final Context ctx ) { // drop schema else if ( !schema.isCreate() && schema.isDrop() ) { if ( type == null ) { - List namespaces = catalog.getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( schema.getName() ) ); + List namespaces = catalog.getSnapshot().getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( schema.getName() ) ); assert namespaces.size() == 1; type = namespaces.get( 0 ).namespaceType; @@ -3196,9 +3196,9 @@ public static Result executeSqlSelect( final Statement statement, final UIReques if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); try { - LogicalNamespace namespace = crud.catalog.getNamespace( t[0] ); + LogicalNamespace namespace = crud.catalog.getSnapshot().getNamespace( t[0] ); - catalogTable = crud.catalog.getLogicalRel( namespace.id ).getTable( t[1] ); + catalogTable = crud.catalog.getRelSnapshot( namespace.id ).getTable( t[1] ); entityType = catalogTable.entityType; } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); @@ -3232,7 +3232,7 @@ public static Result executeSqlSelect( final Statement statement, final UIReques // Get column default values if ( catalogTable != null ) { try { - LogicalColumn logicalColumn = crud.catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = crud.catalog.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); if ( logicalColumn != null ) { if ( logicalColumn.defaultValue != null ) { dbCol.defaultValue = logicalColumn.defaultValue.value; @@ -3606,7 +3606,7 @@ private Map getCatalogColumns( String schemaName, String Map dataTypes = new HashMap<>(); try { LogicalTable table = getLogicalTable( schemaName, tableName ); - List logicalColumns = catalog.getLogicalRel( table.namespaceId ).getColumns( table.id ); + List logicalColumns = catalog.getRelSnapshot( table.namespaceId ).getColumns( table.id ); for ( LogicalColumn logicalColumn : logicalColumns ) { dataTypes.put( logicalColumn.name, logicalColumn ); } @@ -3619,7 +3619,8 @@ private Map getCatalogColumns( String schemaName, String void getTypeSchemas( final Context ctx ) { ctx.json( catalog - .getNamespaces( null ) + .getSnapshot(). + getNamespaces( null ) .stream() .collect( Collectors.toMap( LogicalNamespace::getName, LogicalNamespace::getNamespaceType ) ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index b8fbaa4bd7..49d96a51ac 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -42,8 +42,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownCollectionException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -192,9 +190,9 @@ public static Result getResult( QueryLanguage language, Statement statement, Que LogicalTable catalogTable = null; if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); - LogicalNamespace namespace = catalog.getNamespace( t[0] ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); try { - catalogTable = catalog.getLogicalRel( namespace.id ).getTable( t[1] ); + catalogTable = catalog.getRelSnapshot( namespace.id ).getTable( t[1] ); } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); } @@ -301,7 +299,7 @@ public void createCollection( final Context ctx ) { * as a query result */ public void getDocumentDatabases( final Context ctx ) { - Map names = Catalog.getInstance() + Map names = Catalog.getInstance().getSnapshot() .getNamespaces( null ) .stream() .collect( Collectors.toMap( LogicalNamespace::getName, s -> s.namespaceType.name() ) ); @@ -320,11 +318,11 @@ public void getGraphPlacements( final Context ctx ) { private Placement getPlacements( final Index index ) { Catalog catalog = Catalog.getInstance(); String graphName = index.getSchema(); - List namespaces = catalog.getNamespaces( new Pattern( graphName ) ); + List namespaces = catalog.getSnapshot().getNamespaces( new Pattern( graphName ) ); if ( namespaces.size() != 1 ) { throw new RuntimeException(); } - List graphs = catalog.getLogicalGraph( namespaces.get( 0 ).id ).getGraphs( new Pattern( graphName ) ); + List graphs = catalog.getGraphSnapshot( namespaces.get( 0 ).id ).getGraphs( new Pattern( graphName ) ); if ( graphs.size() != 1 ) { log.error( "The requested graph does not exist." ); return new Placement( new RuntimeException( "The requested graph does not exist." ) ); @@ -369,8 +367,8 @@ public void getCollectionPlacements( Context context ) { String collectionName = index.getTable(); Catalog catalog = Catalog.getInstance(); long namespaceId; - namespaceId = catalog.getNamespace( namespace ).id; - List collections = catalog.getLogicalDoc( namespaceId ).getCollections( new Pattern( collectionName ) ); + namespaceId = catalog.getSnapshot().getNamespace( namespace ).id; + List collections = catalog.getDocSnapshot( namespaceId ).getCollections( new Pattern( collectionName ) ); if ( collections.size() != 1 ) { context.json( new Placement( new UnknownCollectionException( 0 ) ) ); diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java index 7f9e689b3a..56570a990a 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java @@ -34,7 +34,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyTypeFamily; @@ -66,9 +65,9 @@ public String getQuery( String tableId, Statement statement, HttpServletRequest String[] split = tableId.split( "\\." ); LogicalColumn logicalColumn; try { - LogicalNamespace namespace = catalog.getNamespace( split[0] ); - LogicalTable table = catalog.getLogicalRel( namespace.id ).getTable( split[1] ); - logicalColumn = catalog.getLogicalRel( table.namespaceId ).getColumn( table.id, entry.getKey() ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); + LogicalTable table = catalog.getRelSnapshot( namespace.id ).getTable( split[1] ); + logicalColumn = catalog.getRelSnapshot( table.namespaceId ).getColumn( table.id, entry.getKey() ); } catch ( UnknownColumnException | UnknownTableException e ) { log.error( "Could not determine column type", e ); return null; From e4cde75c810a4c4bc8547b298c658f8dac2e72a9 Mon Sep 17 00:00:00 2001 From: datomo Date: Thu, 9 Mar 2023 15:59:17 +0100 Subject: [PATCH 042/436] added boilerplate for specific snapshots --- .../polypheny/db/adapter/AdapterManager.java | 2 +- .../common/LogicalConstraintEnforcer.java | 4 +- .../org/polypheny/db/catalog/Catalog.java | 16 - .../catalogs/AllocationRelationalCatalog.java | 318 ---------------- .../db/catalog/catalogs/LogicalCatalog.java | 2 + .../catalogs/LogicalDocumentCatalog.java | 4 + .../catalog/catalogs/LogicalGraphCatalog.java | 5 +- .../catalogs/LogicalRelationalCatalog.java | 17 + .../entity/physical/PhysicalTable.java | 2 +- .../db/catalog/snapshot/AllocSnapshot.java | 333 +++++++++++++++++ .../catalog/snapshot/LogicalRelSnapshot.java | 16 +- .../db/catalog/snapshot/PhysicalSnapshot.java | 4 + .../snapshot/impl/AllocSnapshotImpl.java | 293 +++++++++++++++ .../snapshot/impl/LogicalDocSnapshotImpl.java | 67 ++++ .../impl/LogicalGraphSnapshotImpl.java | 66 ++++ .../snapshot/impl/LogicalRelSnapshotImpl.java | 337 +++++++++++++++++ .../snapshot/impl/PhysicalSnapshotImpl.java | 75 ++++ .../snapshot/impl/SnapshotBuilder.java | 83 +++++ .../catalog/snapshot/impl/SnapshotImpl.java | 253 +++++++++++++ .../db/docker/MockCatalogDocker.java | 11 - .../org/polypheny/db/ddl/DdlManagerImpl.java | 344 +++++++++--------- .../partition/AbstractPartitionManager.java | 10 +- .../db/partition/FrequencyMapImpl.java | 20 +- .../db/partition/ListPartitionManager.java | 4 +- .../db/partition/RangePartitionManager.java | 2 +- .../db/processing/AbstractQueryProcessor.java | 4 +- .../db/processing/AuthenticatorImpl.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 27 +- .../db/processing/DataMigratorImpl.java | 62 ++-- .../db/routing/UiRoutingPageUtil.java | 17 +- .../db/routing/routers/AbstractDqlRouter.java | 3 +- .../db/routing/routers/BaseRouter.java | 40 +- .../db/routing/routers/CachedPlanRouter.java | 2 +- .../db/routing/routers/DmlRouterImpl.java | 73 ++-- .../routers/FullPlacementQueryRouter.java | 4 +- .../CreateAllPlacementStrategy.java | 2 +- .../CreateSinglePlacementStrategy.java | 2 +- .../db/transaction/EntityAccessMap.java | 2 +- .../db/transaction/TransactionImpl.java | 30 -- .../db/view/MaterializedViewManagerImpl.java | 28 +- .../db/monitoring/statistics/QueryResult.java | 2 +- .../statistics/StatisticColumn.java | 2 +- .../org/polypheny/db/avatica/DbmsMeta.java | 12 +- .../org/polypheny/db/cql/ColumnIndex.java | 4 +- .../java/org/polypheny/db/cql/TableIndex.java | 5 +- .../polypheny/db/adapter/csv/CsvSchema.java | 5 +- .../polypheny/db/adapter/csv/CsvSource.java | 2 + .../admin/CypherAlterDatabaseAlias.java | 4 +- .../admin/CypherCreateDatabaseAlias.java | 4 +- .../db/cypher/admin/CypherDropAlias.java | 4 +- .../db/cypher/admin/CypherDropDatabase.java | 4 +- .../cypher2alg/CypherToAlgConverter.java | 6 +- .../db/cypher/ddl/CypherAddPlacement.java | 6 +- .../db/cypher/ddl/CypherDropPlacement.java | 5 +- .../db/hsqldb/stores/HsqldbStore.java | 6 +- .../jdbc/sources/AbstractJdbcSource.java | 5 +- .../jdbc/stores/AbstractJdbcStore.java | 10 +- .../db/languages/MqlProcessorImpl.java | 3 +- .../db/languages/mql/MqlAddPlacement.java | 6 +- .../db/languages/mql/MqlCreateCollection.java | 7 +- .../db/languages/mql/MqlCreateView.java | 7 +- .../db/languages/mql/MqlDeletePlacement.java | 9 +- .../polypheny/db/languages/mql/MqlDrop.java | 8 +- .../db/languages/mql/MqlRenameCollection.java | 7 +- .../org/polypheny/db/tools/PigAlgBuilder.java | 2 +- .../org/polypheny/db/catalog/PolyCatalog.java | 189 ++-------- .../allocation/PolyAllocDocCatalog.java | 6 - .../allocation/PolyAllocGraphCatalog.java | 5 - .../allocation/PolyAllocRelCatalog.java | 236 ------------ .../db/catalog/logical/DocumentCatalog.java | 1 + .../db/catalog/logical/GraphCatalog.java | 50 +-- .../db/catalog/logical/RelationalCatalog.java | 18 +- .../db/catalog/snapshot/FullSnapshot.java | 276 -------------- .../polypheny/db/restapi/RequestParser.java | 18 +- .../java/org/polypheny/db/restapi/Rest.java | 15 +- .../polypheny/db/sql/SqlProcessorImpl.java | 12 +- .../org/polypheny/db/sql/language/SqlDdl.java | 13 +- .../polypheny/db/sql/language/SqlUtil.java | 4 +- .../ddl/SqlCreateMaterializedView.java | 9 +- .../db/sql/language/ddl/SqlCreateTable.java | 8 +- .../db/sql/language/ddl/SqlCreateView.java | 11 +- .../SqlAlterTableModifyPartitions.java | 8 +- .../db/sql/language/validate/EmptyScope.java | 9 +- .../language/validate/SqlValidatorImpl.java | 2 +- .../language/validate/SqlValidatorUtil.java | 8 +- .../db/sql/language/validate/WithScope.java | 7 +- .../db/sql/web/SchemaToJsonMapper.java | 4 +- .../java/org/polypheny/db/webui/Crud.java | 86 ++--- .../polypheny/db/webui/QueryPlanBuilder.java | 3 +- .../org/polypheny/db/webui/WebSocket.java | 2 +- .../polypheny/db/webui/crud/LanguageCrud.java | 6 +- .../db/webui/crud/StatisticCrud.java | 5 +- .../models/requests/BatchUpdateRequest.java | 4 +- 93 files changed, 2122 insertions(+), 1614 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java create mode 100644 core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java delete mode 100644 plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index 9980c456bf..5d394a447a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -244,7 +244,7 @@ public void removeAdapter( long adapterId ) { CatalogAdapter catalogAdapter = Catalog.getInstance().getSnapshot().getAdapter( adapterId ); // Check if the store has any placements - List> placements = Catalog.getInstance().getAllocSnapshot().getAllocationsOnAdapter( catalogAdapter.id ); + List> placements = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAllocationsOnAdapter( catalogAdapter.id ); if ( placements.size() != 0 ) { throw new RuntimeException( "There is still data placed on this data store" ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 841a91de10..7a862d81fd 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -94,7 +94,7 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = modify.getCluster().getRexBuilder(); - LogicalRelSnapshot snapshot = Catalog.getInstance().getRelSnapshot( table.namespaceId ); + LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( table.namespaceId ); EnforcementTime enforcementTime = EnforcementTime.ON_QUERY; final List constraints = new ArrayList<>( snapshot.getConstraints( table.id ) ) @@ -223,7 +223,7 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = builder.getRexBuilder(); - LogicalRelSnapshot snapshot = Catalog.getInstance().getRelSnapshot( table.namespaceId ); + LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( table.namespaceId ); final List constraints = snapshot .getConstraints( table.id ) diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 0b94d64dfa..fd42c64263 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -36,11 +36,6 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.snapshot.AllocSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; -import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Transaction; @@ -245,16 +240,5 @@ protected final boolean isValidIdentifier( final String str ) { public abstract Snapshot getSnapshot(); - public abstract LogicalDocSnapshot getDocSnapshot( long namespaceId ); - - public abstract LogicalGraphSnapshot getGraphSnapshot( long namespaceId ); - - - public abstract LogicalRelSnapshot getRelSnapshot( long namespaceId ); - - public abstract PhysicalSnapshot getPhysicalSnapshot(); - - public abstract AllocSnapshot getAllocSnapshot(); - } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index e03d196706..70e9b6e25b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -16,21 +16,12 @@ package org.polypheny.db.catalog.catalogs; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import java.util.List; -import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.partition.properties.PartitionProperty; @@ -60,83 +51,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void deleteColumnPlacement( long adapterId, long columnId, boolean columnOnly ); - /** - * Gets a collective list of column placements per column on an adapter. - * Effectively used to retrieve all relevant placements including partitions. - * - * @param adapterId The id of the adapter - * @param columnId The id of the column - * @return The specific column placement - */ - CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ); - - /** - * Checks if there is a column with the specified name in the specified table. - * - * @param adapterId The id of the adapter - * @param columnId The id of the column - * @return true if there is a column placement, false if not. - */ - boolean checkIfExistsColumnPlacement( long adapterId, long columnId ); - - /** - * Get all column placements of a column - * - * @param columnId The id of the specific column - * @return List of column placements of specific column - */ - List getColumnPlacements( long columnId ); - - /** - * Get column placements of a specific table on a specific adapter on column detail level. - * Only returns one ColumnPlacement per column on adapter. Ignores multiplicity due to different partitionsIds - * - * @param adapterId The id of the adapter - * @return List of column placements of the table on the specified adapter - */ - List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ); - - /** - * Get column placements on a adapter. On column detail level - * Only returns one ColumnPlacement per column on adapter. Ignores multiplicity due to different partitionsIds - * - * @param adapterId The id of the adapter - * @return List of column placements on the specified adapter - */ - List getColumnPlacementsOnAdapter( long adapterId ); - - /** - * Gets a collection of column placements for a given column. - * - * @param columnId The id of the column of requested column placements - * @return The collection of placements sorted - */ - List getColumnPlacementsByColumn( long columnId ); - - /** - * Gets all column placements of a table structured by the id of the adapters. - * - * @param tableId The id of the table for the requested column placements - * @return The requested collection - */ - ImmutableMap> getColumnPlacementsByAdapter( long tableId ); - - /** - * Gets the partition group sorted by partition. - * - * @param partitionId The id of the partitions group - */ - long getPartitionGroupByPartition( long partitionId ); - - /** - * Get column placements in a specific schema on a specific adapter - * - * @param adapterId The id of the adapter - * @param schemaId The id of the schema - * @return List of column placements on this adapter and schema - */ - List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ); /** * Update the type of a placement. @@ -195,13 +110,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void deletePartitionGroup( long tableId, long schemaId, long partitionGroupId ); - /** - * Get a partition object by its unique id - * - * @param partitionGroupId The unique id of the partition - * @return A catalog partitionGroup - */ - CatalogPartitionGroup getPartitionGroup( long partitionGroupId ); /** * Adds a partition to the catalog @@ -222,21 +130,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void deletePartition( long tableId, long schemaId, long partitionId ); - /** - * Get a partition object by its unique id - * - * @param partitionId The unique id of the partition - * @return A catalog partition - */ - CatalogPartition getPartition( long partitionId ); - - /** - * Retrieves a list of partitions which are associated with a specific table - * - * @param tableId Table for which partitions shall be gathered - * @return List of all partitions associated with that table - */ - List getPartitionsByTable( long tableId ); /** * Effectively partitions a table with the specified partitionType @@ -265,23 +158,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ); - /** - * Get a List of all partitions belonging to a specific table - * - * @param tableId Table to be queried - * @return list of all partitions on this table - */ - List getPartitionGroups( long tableId ); - - /** - * Get all partitions of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. - * - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ); /** * Updates the specified partition group with the attached partitionIds @@ -315,148 +191,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void updatePartition( long partitionId, Long partitionGroupId ); - /** - * Get a List of all partitions belonging to a specific table - * - * @param partitionGroupId Table to be queried - * @return list of all partitions on this table - */ - List getPartitions( long partitionGroupId ); - - /** - * Get all partitions of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. - * - * @param schemaNamePattern Pattern for the schema name. null returns all. - * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. - * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. - */ - List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ); - - /** - * Get a list of all partition name belonging to a specific table - * - * @param tableId Table to be queried - * @return list of all partition names on this table - */ - List getPartitionGroupNames( long tableId ); - - /** - * Get placements by partition. Identify the location of partitions. - * Essentially returns all ColumnPlacements which hold the specified partitionID. - * - * @param tableId The id of the table - * @param partitionGroupId The id of the partition - * @param columnId The id of tje column - * @return List of CatalogColumnPlacements - */ - List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ); - - /** - * Get adapters by partition. Identify the location of partitions/replicas - * Essentially returns all adapters which hold the specified partitionID - * - * @param tableId The unique id of the table - * @param partitionGroupId The unique id of the partition - * @return List of CatalogAdapters - */ - List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ); - - /** - * Get all partitions of a DataPlacement (identified by adapterId and tableId) - * - * @param adapterId The unique id of the adapter - * @param tableId The unique id of the table - * @return List of partitionIds - */ - List getPartitionGroupsOnDataPlacement( long adapterId, long tableId ); - - /** - * Get all partitions of a DataPlacement (identified by adapterId and tableId) - * - * @param adapterId The unique id of the adapter - * @param tableId The unique id of the table - * @return List of partitionIds - */ - List getPartitionsOnDataPlacement( long adapterId, long tableId ); - - /** - * Returns list with the index of the partitions on this store from 0..numPartitions - * - * @param adapterId The unique id of the adapter - * @param tableId The unique id of the table - * @return List of partitionId Indices - */ - List getPartitionGroupsIndexOnDataPlacement( long adapterId, long tableId ); - - /** - * Returns a specific DataPlacement of a given table. - * - * @param adapterId adapter where placement is located - * @param tableId table to retrieve the placement from - * @return DataPlacement of a table placed on a specific store - */ - CatalogDataPlacement getDataPlacement( long adapterId, long tableId ); - - /** - * Returns all DataPlacements of a given table. - * - * @param tableId table to retrieve the placements from - * @return List of all DataPlacements for the table - */ - List getDataPlacements( long tableId ); - - /** - * Returns a list of all DataPlacements that contain all columns as well as all partitions - * - * @param tableId table to retrieve the list from - * @return list of all full DataPlacements - */ - List getAllFullDataPlacements( long tableId ); - - /** - * Returns a list of all DataPlacements that contain all columns - * - * @param tableId table to retrieve the list from - * @return list of all full DataPlacements - */ - List getAllColumnFullDataPlacements( long tableId ); - - /** - * Returns a list of all DataPlacements that contain all partitions - * - * @param tableId table to retrieve the list from - * @return list of all full DataPlacements - */ - List getAllPartitionFullDataPlacements( long tableId ); - - /** - * Returns all DataPlacements of a given table that are associated with a given role. - * - * @param tableId table to retrieve the placements from - * @param role role to specifically filter - * @return List of all DataPlacements for the table that are associated with a specific role - */ - List getDataPlacementsByRole( long tableId, DataPlacementRole role ); - - /** - * Returns all PartitionPlacements of a given table that are associated with a given role. - * - * @param tableId table to retrieve the placements from - * @param role role to specifically filter - * @return List of all PartitionPlacements for the table that are associated with a specific role - */ - List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ); - - /** - * Returns all PartitionPlacements of a given table with a given ID that are associated with a given role. - * - * @param tableId table to retrieve the placements from - * @param role role to specifically filter - * @param partitionId filter by ID - * @return List of all PartitionPlacements for the table that are associated with a specific role for a specific partitionId - */ - List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ); /** * Checks if the planned changes are allowed in terms of placements that need to be present. @@ -597,48 +331,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void deletePartitionPlacement( long adapterId, long partitionId ); - /** - * Returns a specific partition entity which is placed on a store. - * - * @param adapterId The adapter on which the requested partition placements reside - * @param partitionId The id of the requested partition - * @return The requested PartitionPlacement on that store for a given is - */ - CatalogPartitionPlacement getPartitionPlacement( long adapterId, long partitionId ); - - /** - * Returns a list of all Partition Placements which currently reside on an adapter, disregarded of the table. - * - * @param adapterId The adapter on which the requested partition placements reside - * @return A list of all Partition Placements, that are currently located on that specific store - */ - List getPartitionPlacementsByAdapter( long adapterId ); - - /** - * Returns a list of all Partition Placements which currently reside on an adapter, for a specific table. - * - * @param adapterId The adapter on which the requested partition placements reside - * @param tableId The table for which all partition placements on an adapter should be considered - * @return A list of all Partition Placements, that are currently located on that specific store for an individual table - */ - List getPartitionPlacementsByTableOnAdapter( long adapterId, long tableId ); - /** - * Returns a list of all Partition Placements which are currently associated with a table. - * - * @param tableId The table on which the requested partition placements are currently associated with. - * @return A list of all Partition Placements, that belong to the desired table - */ - List getAllPartitionPlacementsByTable( long tableId ); - - /** - * Get all Partition Placements which are associated with an individual partition ID. - * Identifies on which locations and how often the individual partition is placed. - * - * @param partitionId The requested partition ID - * @return A list of Partition Placements which are physically responsible for that partition - */ - List getPartitionPlacements( long partitionId ); /** * Registers a table to be considered for periodic processing @@ -654,16 +347,5 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void removeTableFromPeriodicProcessing( long tableId ); - /** - * Probes if a Partition Placement on an adapter for a specific partition already exists. - * - * @param adapterId Adapter on which to check - * @param partitionId Partition which to check - * @return teh response of the probe - */ - boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ); - - - List getAllocationsFromLogical( long logicalId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java index 1ee874c5e2..682f90664f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java @@ -23,4 +23,6 @@ public interface LogicalCatalog { LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ); + LogicalNamespace getLogicalNamespace(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java index 711cb97f65..bbe2a38bd5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java @@ -17,7 +17,9 @@ package org.polypheny.db.catalog.catalogs; import java.util.List; +import java.util.Map; import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.EntityType; public interface LogicalDocumentCatalog extends LogicalCatalog { @@ -43,4 +45,6 @@ public interface LogicalDocumentCatalog extends LogicalCatalog { long addCollectionLogistics( String name, List stores, boolean placementOnly ); + Map getCollections(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java index 1f2f3028bd..3831b68247 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java @@ -17,7 +17,9 @@ package org.polypheny.db.catalog.catalogs; import java.util.List; +import java.util.Map; import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownTableException; @@ -62,7 +64,6 @@ public interface LogicalGraphCatalog extends LogicalCatalog { public abstract void deleteGraph( long id ); - /** * Additional operations for the creation of a graph entity. * @@ -73,4 +74,6 @@ public interface LogicalGraphCatalog extends LogicalCatalog { public abstract void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException; + Map getGraphs(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index a3d432403c..cfa5e6fbb2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -16,13 +16,19 @@ package org.polypheny.db.catalog.catalogs; +import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.EntityType; @@ -309,4 +315,15 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract void deleteIndex( long indexId ); + ImmutableMap getTables(); + + ImmutableMap getColumns(); + + LogicalNamespace getLogicalNamespace(); + + ImmutableMap getIndexes(); + + ImmutableMap getKeys(); + + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 212da38ef8..83574a65f8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -78,7 +78,7 @@ public AlgProtoDataType buildProto() { final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); for ( CatalogColumnPlacement placement : placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getRelSnapshot( namespaceId ).getLogicalColumn( placement.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getLogicalColumn( placement.columnId ); AlgDataType sqlType = logicalColumn.getAlgDataType( typeFactory ); fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index 39ff850c28..5727c7e778 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -16,8 +16,19 @@ package org.polypheny.db.catalog.snapshot; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import java.util.List; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.CatalogPartitionGroup; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.Pattern; public interface AllocSnapshot { @@ -33,6 +44,314 @@ public interface AllocSnapshot { AllocationEntity getAllocEntity( long id ); + /** + * Gets a collective list of column placements per column on an adapter. + * Effectively used to retrieve all relevant placements including partitions. + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + * @return The specific column placement + */ + CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ); + + /** + * Checks if there is a column with the specified name in the specified table. + * + * @param adapterId The id of the adapter + * @param columnId The id of the column + * @return true if there is a column placement, false if not. + */ + boolean checkIfExistsColumnPlacement( long adapterId, long columnId ); + + /** + * Get all column placements of a column + * + * @param columnId The id of the specific column + * @return List of column placements of specific column + */ + List getColumnPlacements( long columnId ); + + /** + * Get column placements of a specific table on a specific adapter on column detail level. + * Only returns one ColumnPlacement per column on adapter. Ignores multiplicity due to different partitionsIds + * + * @param adapterId The id of the adapter + * @return List of column placements of the table on the specified adapter + */ + List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ); + + /** + * Get column placements on a adapter. On column detail level + * Only returns one ColumnPlacement per column on adapter. Ignores multiplicity due to different partitionsIds + * + * @param adapterId The id of the adapter + * @return List of column placements on the specified adapter + */ + List getColumnPlacementsOnAdapter( long adapterId ); + + /** + * Gets a collection of column placements for a given column. + * + * @param columnId The id of the column of requested column placements + * @return The collection of placements sorted + */ + List getColumnPlacementsByColumn( long columnId ); + + /** + * Gets all column placements of a table structured by the id of the adapters. + * + * @param tableId The id of the table for the requested column placements + * @return The requested collection + */ + ImmutableMap> getColumnPlacementsByAdapter( long tableId ); + + /** + * Gets the partition group sorted by partition. + * + * @param partitionId The id of the partitions group + */ + long getPartitionGroupByPartition( long partitionId ); + + + /** + * Get column placements in a specific schema on a specific adapter + * + * @param adapterId The id of the adapter + * @param schemaId The id of the schema + * @return List of column placements on this adapter and schema + */ + List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ); + + /** + * Get a partition object by its unique id + * + * @param partitionGroupId The unique id of the partition + * @return A catalog partitionGroup + */ + CatalogPartitionGroup getPartitionGroup( long partitionGroupId ); + + /** + * Get a partition object by its unique id + * + * @param partitionId The unique id of the partition + * @return A catalog partition + */ + CatalogPartition getPartition( long partitionId ); + + /** + * Retrieves a list of partitions which are associated with a specific table + * + * @param tableId Table for which partitions shall be gathered + * @return List of all partitions associated with that table + */ + List getPartitionsByTable( long tableId ); + + /** + * Get a List of all partitions belonging to a specific table + * + * @param tableId Table to be queried + * @return list of all partitions on this table + */ + List getPartitionGroups( long tableId ); + + /** + * Get all partitions of the specified database which fit to the specified filter patterns. + * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. + * + * @param schemaNamePattern Pattern for the schema name. null returns all. + * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. + * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. + */ + List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ); + + + /** + * Get a List of all partitions belonging to a specific table + * + * @param partitionGroupId Table to be queried + * @return list of all partitions on this table + */ + List getPartitions( long partitionGroupId ); + + /** + * Get all partitions of the specified database which fit to the specified filter patterns. + * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. + * + * @param schemaNamePattern Pattern for the schema name. null returns all. + * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. + * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. + */ + List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ); + + /** + * Get a list of all partition name belonging to a specific table + * + * @param tableId Table to be queried + * @return list of all partition names on this table + */ + List getPartitionGroupNames( long tableId ); + + /** + * Get placements by partition. Identify the location of partitions. + * Essentially returns all ColumnPlacements which hold the specified partitionID. + * + * @param tableId The id of the table + * @param partitionGroupId The id of the partition + * @param columnId The id of tje column + * @return List of CatalogColumnPlacements + */ + List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ); + + /** + * Get adapters by partition. Identify the location of partitions/replicas + * Essentially returns all adapters which hold the specified partitionID + * + * @param tableId The unique id of the table + * @param partitionGroupId The unique id of the partition + * @return List of CatalogAdapters + */ + List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ); + + /** + * Get all partitions of a DataPlacement (identified by adapterId and tableId) + * + * @param adapterId The unique id of the adapter + * @param tableId The unique id of the table + * @return List of partitionIds + */ + List getPartitionGroupsOnDataPlacement( long adapterId, long tableId ); + + /** + * Get all partitions of a DataPlacement (identified by adapterId and tableId) + * + * @param adapterId The unique id of the adapter + * @param tableId The unique id of the table + * @return List of partitionIds + */ + List getPartitionsOnDataPlacement( long adapterId, long tableId ); + + /** + * Returns list with the index of the partitions on this store from 0..numPartitions + * + * @param adapterId The unique id of the adapter + * @param tableId The unique id of the table + * @return List of partitionId Indices + */ + List getPartitionGroupsIndexOnDataPlacement( long adapterId, long tableId ); + + /** + * Returns a specific DataPlacement of a given table. + * + * @param adapterId adapter where placement is located + * @param tableId table to retrieve the placement from + * @return DataPlacement of a table placed on a specific store + */ + CatalogDataPlacement getDataPlacement( long adapterId, long tableId ); + + /** + * Returns all DataPlacements of a given table. + * + * @param tableId table to retrieve the placements from + * @return List of all DataPlacements for the table + */ + List getDataPlacements( long tableId ); + + /** + * Returns a list of all DataPlacements that contain all columns as well as all partitions + * + * @param tableId table to retrieve the list from + * @return list of all full DataPlacements + */ + List getAllFullDataPlacements( long tableId ); + + /** + * Returns a list of all DataPlacements that contain all columns + * + * @param tableId table to retrieve the list from + * @return list of all full DataPlacements + */ + List getAllColumnFullDataPlacements( long tableId ); + + /** + * Returns a list of all DataPlacements that contain all partitions + * + * @param tableId table to retrieve the list from + * @return list of all full DataPlacements + */ + List getAllPartitionFullDataPlacements( long tableId ); + + /** + * Returns all DataPlacements of a given table that are associated with a given role. + * + * @param tableId table to retrieve the placements from + * @param role role to specifically filter + * @return List of all DataPlacements for the table that are associated with a specific role + */ + List getDataPlacementsByRole( long tableId, DataPlacementRole role ); + + /** + * Returns all PartitionPlacements of a given table that are associated with a given role. + * + * @param tableId table to retrieve the placements from + * @param role role to specifically filter + * @return List of all PartitionPlacements for the table that are associated with a specific role + */ + List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ); + + /** + * Returns all PartitionPlacements of a given table with a given ID that are associated with a given role. + * + * @param tableId table to retrieve the placements from + * @param role role to specifically filter + * @param partitionId filter by ID + * @return List of all PartitionPlacements for the table that are associated with a specific role for a specific partitionId + */ + List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ); + + + /** + * Returns a specific partition entity which is placed on a store. + * + * @param adapterId The adapter on which the requested partition placements reside + * @param partitionId The id of the requested partition + * @return The requested PartitionPlacement on that store for a given is + */ + CatalogPartitionPlacement getPartitionPlacement( long adapterId, long partitionId ); + + /** + * Returns a list of all Partition Placements which currently reside on an adapter, disregarded of the table. + * + * @param adapterId The adapter on which the requested partition placements reside + * @return A list of all Partition Placements, that are currently located on that specific store + */ + List getPartitionPlacementsByAdapter( long adapterId ); + + /** + * Returns a list of all Partition Placements which currently reside on an adapter, for a specific table. + * + * @param adapterId The adapter on which the requested partition placements reside + * @param tableId The table for which all partition placements on an adapter should be considered + * @return A list of all Partition Placements, that are currently located on that specific store for an individual table + */ + List getPartitionPlacementsByTableOnAdapter( long adapterId, long tableId ); + + /** + * Returns a list of all Partition Placements which are currently associated with a table. + * + * @param tableId The table on which the requested partition placements are currently associated with. + * @return A list of all Partition Placements, that belong to the desired table + */ + List getAllPartitionPlacementsByTable( long tableId ); + + /** + * Get all Partition Placements which are associated with an individual partition ID. + * Identifies on which locations and how often the individual partition is placed. + * + * @param partitionId The requested partition ID + * @return A list of Partition Placements which are physically responsible for that partition + */ + List getPartitionPlacements( long partitionId ); + //// LOGISTICS boolean isHorizontalPartitioned( long id ); @@ -40,4 +359,18 @@ public interface AllocSnapshot { boolean isVerticalPartitioned( long id ); + /** + * Probes if a Partition Placement on an adapter for a specific partition already exists. + * + * @param adapterId Adapter on which to check + * @param partitionId Partition which to check + * @return teh response of the probe + */ + boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ); + + + List getAllocationsFromLogical( long logicalId ); + + boolean isPartitioned( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 4cf93cc914..4ad92dfe84 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -64,14 +64,6 @@ public interface LogicalRelSnapshot { */ public abstract LogicalTable getTable( String tableName ) throws UnknownTableException; - /** - * Returns the table which is associated with a given partitionId - * - * @param partitionId to use for lookup - * @return CatalogEntity that contains partitionId - */ - public abstract LogicalTable getTableFromPartition( long partitionId ); - /** * Gets a collection of all keys. @@ -102,11 +94,11 @@ public interface LogicalRelSnapshot { * Get all columns of the specified database which fit to the specified filter patterns. * getColumns(xid, databaseName, null, null, null) returns all columns of the database. * - * @param tableNamePattern Pattern for the table name. null returns all. - * @param columnNamePattern Pattern for the column name. null returns all. + * @param tableName Pattern for the table name. null returns all. + * @param columnName Pattern for the column name. null returns all. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getColumns( @Nullable Pattern tableNamePattern, @Nullable Pattern columnNamePattern ); + public abstract List getColumns( @Nullable Pattern tableName, @Nullable Pattern columnName ); /** * Returns the column with the specified id. @@ -301,4 +293,6 @@ public interface LogicalRelSnapshot { LogicalNamespace getNamespace( long id ); + boolean checkIfExistsEntity( String newName ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java index 2e3efa1b6f..7cc2a09919 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java @@ -16,7 +16,9 @@ package org.polypheny.db.catalog.snapshot; +import java.util.List; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; @@ -37,4 +39,6 @@ public interface PhysicalSnapshot { PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ); + List> getPhysicalsOnAdapter( long adapterId ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java new file mode 100644 index 0000000000..76504b0024 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -0,0 +1,293 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot.impl; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.List; +import java.util.Map; +import org.polypheny.db.catalog.catalogs.AllocationCatalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.CatalogPartition; +import org.polypheny.db.catalog.entity.CatalogPartitionGroup; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.AllocSnapshot; + +public class AllocSnapshotImpl implements AllocSnapshot { + + public AllocSnapshotImpl( Map allocationCatalogs ) { + } + + + @Override + public List> getAllocationsOnAdapter( long id ) { + return null; + } + + + @Override + public AllocationEntity getAllocEntity( long id ) { + return null; + } + + + @Override + public CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ) { + return null; + } + + + @Override + public boolean checkIfExistsColumnPlacement( long adapterId, long columnId ) { + return false; + } + + + @Override + public List getColumnPlacements( long columnId ) { + return null; + } + + + @Override + public List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getColumnPlacementsOnAdapter( long adapterId ) { + return null; + } + + + @Override + public List getColumnPlacementsByColumn( long columnId ) { + return null; + } + + + @Override + public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { + return null; + } + + + @Override + public long getPartitionGroupByPartition( long partitionId ) { + return 0; + } + + + @Override + public List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ) { + return null; + } + + + @Override + public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) { + return null; + } + + + @Override + public CatalogPartition getPartition( long partitionId ) { + return null; + } + + + @Override + public List getPartitionsByTable( long tableId ) { + return null; + } + + + @Override + public List getPartitionGroups( long tableId ) { + return null; + } + + + @Override + public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public List getPartitions( long partitionGroupId ) { + return null; + } + + + @Override + public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { + return null; + } + + + @Override + public List getPartitionGroupNames( long tableId ) { + return null; + } + + + @Override + public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { + return null; + } + + + @Override + public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { + return null; + } + + + @Override + public List getPartitionGroupsOnDataPlacement( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getPartitionsOnDataPlacement( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getPartitionGroupsIndexOnDataPlacement( long adapterId, long tableId ) { + return null; + } + + + @Override + public CatalogDataPlacement getDataPlacement( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllColumnFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getAllPartitionFullDataPlacements( long tableId ) { + return null; + } + + + @Override + public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { + return null; + } + + + @Override + public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { + return null; + } + + + @Override + public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { + return null; + } + + + @Override + public CatalogPartitionPlacement getPartitionPlacement( long adapterId, long partitionId ) { + return null; + } + + + @Override + public List getPartitionPlacementsByAdapter( long adapterId ) { + return null; + } + + + @Override + public List getPartitionPlacementsByTableOnAdapter( long adapterId, long tableId ) { + return null; + } + + + @Override + public List getAllPartitionPlacementsByTable( long tableId ) { + return null; + } + + + @Override + public List getPartitionPlacements( long partitionId ) { + return null; + } + + + @Override + public boolean isHorizontalPartitioned( long id ) { + return false; + } + + + @Override + public boolean isVerticalPartitioned( long id ) { + return false; + } + + + @Override + public boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ) { + return false; + } + + + @Override + public List getAllocationsFromLogical( long logicalId ) { + return null; + } + + + @Override + public boolean isPartitioned( long id ) { + return false; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java new file mode 100644 index 0000000000..8164959d75 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java @@ -0,0 +1,67 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot.impl; + +import java.util.List; +import org.polypheny.db.catalog.catalogs.LogicalCatalog; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; + +public class LogicalDocSnapshotImpl implements LogicalDocSnapshot { + + public LogicalDocSnapshotImpl( LogicalCatalog value ) { + + } + + + @Override + public LogicalCollection getCollection( long collectionId ) { + return null; + } + + + @Override + public List getCollections( Pattern namePattern ) { + return null; + } + + + @Override + public LogicalCollection getLogicalCollection( List names ) { + return null; + } + + + @Override + public LogicalCollection getLogicalCollection( long id ) { + return null; + } + + + @Override + public LogicalCollection getLogicalCollection( long namespaceId, String name ) { + return null; + } + + + @Override + public List getLogicalCollections( long namespaceId, Pattern name ) { + return null; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java new file mode 100644 index 0000000000..7d6d5f767f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java @@ -0,0 +1,66 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot.impl; + +import java.util.List; +import org.polypheny.db.catalog.catalogs.LogicalCatalog; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; + +public class LogicalGraphSnapshotImpl implements LogicalGraphSnapshot { + + public LogicalGraphSnapshotImpl( LogicalCatalog value ) { + } + + + @Override + public LogicalGraph getGraph( long id ) { + return null; + } + + + @Override + public List getGraphs( Pattern graphName ) { + return null; + } + + + @Override + public LogicalGraph getLogicalGraph( List names ) { + return null; + } + + + @Override + public LogicalGraph getLogicalGraph( long id ) { + return null; + } + + + @Override + public LogicalGraph getLogicalGraph( long namespaceId, String name ) { + return null; + } + + + @Override + public List getLogicalGraphs( long namespaceId, Pattern name ) { + return null; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java new file mode 100644 index 0000000000..8d4420053d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -0,0 +1,337 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot.impl; + +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; +import lombok.Value; +import org.apache.commons.lang3.NotImplementedException; +import org.jetbrains.annotations.Nullable; +import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; +import org.polypheny.db.catalog.entity.CatalogConstraint; +import org.polypheny.db.catalog.entity.CatalogForeignKey; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownConstraintException; +import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; +import org.polypheny.db.catalog.exceptions.UnknownIndexException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; +import org.polypheny.db.util.Pair; + +@Value +public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { + + LogicalNamespace namespace; + + ImmutableMap tables; + + ImmutableMap tableNames; + + ImmutableMap> tableColumns; + ImmutableMap columns; + + ImmutableMap columnNames; + + ImmutableMap keys; + + ImmutableMap> tableKeys; + + ImmutableMap indexes; + + ImmutableMap, LogicalColumn> tableColumnIdColumn; + + ImmutableMap, LogicalColumn> tableColumnNameColumn; + + ImmutableMap, LogicalColumn> tableIdColumnNameColumn; + + + public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { + namespace = catalog.getLogicalNamespace(); + tables = ImmutableMap.copyOf( catalog.getTables() ); + tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> e.getValue().name, Entry::getValue ) ) ); + columns = ImmutableMap.copyOf( catalog.getColumns() ); + columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> e.getValue().name, Entry::getValue ) ) ); + + Map> tableChildren = new HashMap<>(); + columns.forEach( ( k, v ) -> { + if ( tableChildren.containsKey( v.tableId ) ) { + tableChildren.get( v.tableId ).add( v ); + } else { + tableChildren.put( v.tableId, new ArrayList<>( List.of( v ) ) ); + } + } ); + this.tableColumns = ImmutableMap.copyOf( tableChildren ); + + keys = catalog.getKeys(); + + Map> tableKeys = new HashMap<>(); + keys.forEach( ( k, v ) -> { + if ( tableKeys.containsKey( v.tableId ) ) { + tableKeys.get( v.tableId ).add( v ); + } else { + tableKeys.put( v.tableId, new ArrayList<>( List.of( v ) ) ); + } + } ); + + this.tableKeys = ImmutableMap.copyOf( tableKeys ); + + this.tableColumnIdColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( c.getValue().tableId, c.getValue().id ), Entry::getValue ) ) ); + this.tableColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( tables.get( c.getValue().tableId ).name, c.getValue().name ), Entry::getValue ) ) ); + this.tableIdColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( c.getValue().tableId, c.getValue().name ), Entry::getValue ) ) ); + + this.indexes = catalog.getIndexes(); + + } + + + @Override + public List getTables( @Nullable Pattern name ) { + if ( name == null ) { + return tables.values().asList(); + } + return tables.values().stream().filter( t -> namespace.caseSensitive ? t.name.matches( name.toRegex() ) : t.name.toLowerCase().matches( (name.toRegex().toLowerCase()) ) ).collect( Collectors.toList() ); + } + + + @Override + public LogicalTable getTable( long tableId ) { + return tables.get( tableId ); + } + + + @Override + public LogicalTable getTable( String tableName ) throws UnknownTableException { + return tableNames.get( tableName ); + } + + + @Override + public List getKeys() { + return keys.values().asList(); + } + + + @Override + public List getTableKeys( long tableId ) { + return tableKeys.get( tableId ); + } + + + @Override + public List getColumns( long tableId ) { + return tableColumns.get( tableId ); + } + + + @Override + public List getColumns( @Nullable Pattern tableName, @Nullable Pattern columnName ) { + List tables = getTables( tableName ); + if ( columnName == null ) { + return tables.stream().flatMap( t -> tableColumns.get( t.id ).stream() ).collect( Collectors.toList() ); + } + + return tables + .stream() + .flatMap( t -> tableColumns.get( t.id ).stream().filter( + c -> namespace.caseSensitive + ? c.name.matches( columnName.toRegex() ) + : c.name.toLowerCase().matches( columnName.toLowerCase().toRegex() ) ) ).collect( Collectors.toList() ); + + } + + + @Override + public LogicalColumn getColumn( long columnId ) { + return columns.get( columnId ); + } + + + @Override + public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { + return tableIdColumnNameColumn.get( Pair.of( tableId, columnName ) ); + } + + + @Override + public LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { + return tableIdColumnNameColumn.get( Pair.of( tableName, columnName ) ); + } + + + @Override + public boolean checkIfExistsColumn( long tableId, String columnName ) { + return tableIdColumnNameColumn.containsKey( Pair.of( tableId, columnName ) ); + } + + + @Override + public CatalogPrimaryKey getPrimaryKey( long key ) { + return (CatalogPrimaryKey) keys.get( key ); + } + + + @Override + public boolean isPrimaryKey( long keyId ) { + throw new NotImplementedException(); + } + + + @Override + public boolean isForeignKey( long keyId ) { + throw new NotImplementedException(); + } + + + @Override + public boolean isIndex( long keyId ) { + throw new NotImplementedException(); + } + + + @Override + public boolean isConstraint( long keyId ) { + throw new NotImplementedException(); + } + + + @Override + public List getForeignKeys( long tableId ) { + throw new NotImplementedException(); + } + + + @Override + public List getExportedKeys( long tableId ) { + throw new NotImplementedException(); + } + + + @Override + public List getConstraints( long tableId ) { + throw new NotImplementedException(); + } + + + @Override + public List getConstraints( CatalogKey key ) { + throw new NotImplementedException(); + } + + + @Override + public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { + throw new NotImplementedException(); + } + + + @Override + public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { + throw new NotImplementedException(); + } + + + @Override + public List getIndexes( CatalogKey key ) { + return indexes.get( key.id ); + } + + + @Override + public List getForeignKeys( CatalogKey key ) { + return null; + } + + + @Override + public List getIndexes( long tableId, boolean onlyUnique ) { + return null; + } + + + @Override + public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { + return null; + } + + + @Override + public boolean checkIfExistsIndex( long tableId, String indexName ) { + return false; + } + + + @Override + public CatalogIndex getIndex( long indexId ) { + return null; + } + + + @Override + public LogicalTable getLogicalTable( List names ) { + return null; + } + + + @Override + public LogicalTable getLogicalTable( long id ) { + return null; + } + + + @Override + public LogicalTable getLogicalTable( String name ) { + return null; + } + + + @Override + public List getLogicalTables( long namespaceId, Pattern name ) { + return null; + } + + + @Override + public LogicalColumn getLogicalColumn( long id ) { + return null; + } + + + @Override + public LogicalNamespace getNamespace( long id ) { + return null; + } + + + @Override + public boolean checkIfExistsEntity( String newName ) { + return false; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java new file mode 100644 index 0000000000..3e7ed3fa86 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java @@ -0,0 +1,75 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot.impl; + +import java.util.List; +import java.util.Map; +import org.polypheny.db.catalog.catalogs.PhysicalCatalog; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalGraph; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; + +public class PhysicalSnapshotImpl implements PhysicalSnapshot { + + public PhysicalSnapshotImpl( Map physicalCatalogs ) { + } + + + @Override + public PhysicalTable getPhysicalTable( long id ) { + return null; + } + + + @Override + public PhysicalTable getPhysicalTable( long logicalId, long adapterId ) { + return null; + } + + + @Override + public PhysicalCollection getPhysicalCollection( long id ) { + return null; + } + + + @Override + public PhysicalCollection getPhysicalCollection( long logicalId, long adapterId ) { + return null; + } + + + @Override + public PhysicalGraph getPhysicalGraph( long id ) { + return null; + } + + + @Override + public PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ) { + return null; + } + + + @Override + public List> getPhysicalsOnAdapter( long adapterId ) { + return null; + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java new file mode 100644 index 0000000000..e63a8eeb71 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java @@ -0,0 +1,83 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot.impl; + +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; +import org.polypheny.db.catalog.catalogs.AllocationCatalog; +import org.polypheny.db.catalog.catalogs.LogicalCatalog; +import org.polypheny.db.catalog.catalogs.PhysicalCatalog; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.AllocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; +import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; + +public class SnapshotBuilder { + + public static Snapshot createSnapshot( long id, Map logicalCatalogs, Map allocationCatalogs, Map physicalCatalogs ) { + Map rels = buildRelSnapshots( logicalCatalogs ); + Map docs = buildDocSnapshots( logicalCatalogs ); + Map graphs = buildGraphSnapshots( logicalCatalogs ); + + AllocSnapshot alloc = buildAlloc( allocationCatalogs ); + PhysicalSnapshot physical = buildPhysical( physicalCatalogs ); + + return new SnapshotImpl( id, rels, docs, graphs, alloc, physical ); + } + + + private static PhysicalSnapshot buildPhysical( Map physicalCatalogs ) { + return new PhysicalSnapshotImpl( physicalCatalogs ); + } + + + private static AllocSnapshot buildAlloc( Map allocationCatalogs ) { + return new AllocSnapshotImpl( allocationCatalogs ); + } + + + private static Map buildGraphSnapshots( Map logicalCatalogs ) { + return logicalCatalogs + .entrySet() + .stream() + .filter( e -> e.getValue().getLogicalNamespace().namespaceType == NamespaceType.GRAPH ) + .collect( Collectors.toMap( Entry::getKey, e -> new LogicalGraphSnapshotImpl( e.getValue() ) ) ); + } + + + private static Map buildDocSnapshots( Map logicalCatalogs ) { + return logicalCatalogs + .entrySet() + .stream() + .filter( e -> e.getValue().getLogicalNamespace().namespaceType == NamespaceType.DOCUMENT ) + .collect( Collectors.toMap( Entry::getKey, e -> new LogicalDocSnapshotImpl( e.getValue() ) ) ); + } + + + private static Map buildRelSnapshots( Map logicalCatalogs ) { + return logicalCatalogs + .entrySet() + .stream() + .filter( e -> e.getValue().getLogicalNamespace().namespaceType == NamespaceType.RELATIONAL ) + .collect( Collectors.toMap( Entry::getKey, e -> new LogicalRelSnapshotImpl( e.getValue() ) ) ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java new file mode 100644 index 0000000000..702db195d9 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -0,0 +1,253 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.catalog.snapshot.impl; + +import com.google.common.collect.ImmutableMap; +import java.util.List; +import java.util.Map; +import lombok.NonNull; +import lombok.Value; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.CatalogQueryInterface; +import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; +import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.AllocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; +import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; + +@Value +public class SnapshotImpl implements Snapshot { + + ImmutableMap relationals; + ImmutableMap documents; + ImmutableMap graphs; + AllocSnapshot alloc; + PhysicalSnapshot physical; + long id; + + + public SnapshotImpl( long id, Map relationals, Map documents, Map graphs, AllocSnapshot alloc, PhysicalSnapshot physical ) { + this.id = id; + this.relationals = ImmutableMap.copyOf( relationals ); + this.documents = ImmutableMap.copyOf( documents ); + this.graphs = ImmutableMap.copyOf( graphs ); + + this.alloc = alloc; + + this.physical = physical; + } + + + @Override + public long getId() { + return 0; + } + + + @Override + public @NonNull List getNamespaces( Pattern name ) { + return null; + } + + + @Override + public LogicalNamespace getNamespace( long id ) { + return null; + } + + + @Override + public LogicalNamespace getNamespace( String name ) { + return null; + } + + + @Override + public boolean checkIfExistsNamespace( String name ) { + return false; + } + + + @Override + public CatalogUser getUser( String name ) throws UnknownUserException { + return null; + } + + + @Override + public CatalogUser getUser( long id ) { + return null; + } + + + @Override + public List getAdapters() { + return null; + } + + + @Override + public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { + return null; + } + + + @Override + public CatalogAdapter getAdapter( long id ) { + return null; + } + + + @Override + public boolean checkIfExistsAdapter( long id ) { + return false; + } + + + @Override + public List getQueryInterfaces() { + return null; + } + + + @Override + public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { + return null; + } + + + @Override + public CatalogQueryInterface getQueryInterface( long id ) { + return null; + } + + + @Override + public List> getAllocationsOnAdapter( long id ) { + return null; + } + + + @Override + public List> getPhysicalsOnAdapter( long adapterId ) { + return null; + } + + + @Override + public List getIndexes() { + return null; + } + + + @Override + public List getTablesForPeriodicProcessing() { + return null; + } + + + @Override + public CatalogEntity getEntity( long id ) { + return null; + } + + + @Override + public CatalogEntity getEntity( long namespaceId, String name ) { + return null; + } + + + @Override + public CatalogEntity getEntity( long namespaceId, Pattern name ) { + return null; + } + + + @Override + public CatalogEntity getEntity( List names ) { + return null; + } + + + @Override + public boolean checkIfExistsEntity( String entityName ) { + return false; + } + + + @Override + public boolean checkIfExistsEntity( long tableId ) { + return false; + } + + + @Override + public LogicalNamespace getLogicalNamespace() { + return null; + } + + + @Override + public LogicalEntity getEntity( String name ) { + return null; + } + + + @Override + public LogicalDocSnapshot getDocSnapshot( long namespaceId ) { + return documents.get( namespaceId ); + } + + + @Override + public LogicalGraphSnapshot getGraphSnapshot( long namespaceId ) { + return graphs.get( namespaceId ); + } + + + @Override + public LogicalRelSnapshot getRelSnapshot( long namespaceId ) { + return relationals.get( namespaceId ); + } + + + @Override + public PhysicalSnapshot getPhysicalSnapshot() { + return physical; + } + + + @Override + public AllocSnapshot getAllocSnapshot() { + return alloc; + } + +} diff --git a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java index 2fe662cfac..c5536a9bc5 100644 --- a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java +++ b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java @@ -48,15 +48,4 @@ public Snapshot getSnapshot() { } - @Override - public boolean checkIfExistsAdapter( long id ) { - return adapters.containsKey( id ); - } - - - @Override - public CatalogAdapter getAdapter( long id ) { - return adapters.get( id ); - } - } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index e2b01b0567..b11dd87011 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -100,6 +100,7 @@ import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.exception.AlterSourceException; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -159,7 +160,7 @@ private void checkViewDependencies( LogicalTable catalogTable ) { if ( catalogTable.connectedViews.size() > 0 ) { List views = new ArrayList<>(); for ( Long id : catalogTable.connectedViews ) { - views.add( catalog.getLogicalRel( catalogTable.namespaceId ).getTable( id ).name ); + views.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTable( id ).name ); } throw new PolyphenyDbException( "Cannot alter table because of underlying View " + views.stream().map( String::valueOf ).collect( Collectors.joining( (", ") ) ) ); } @@ -196,7 +197,7 @@ protected DataStore getDataStoreInstance( long storeId ) throws DdlOnSourceExcep private LogicalColumn getCatalogColumn( long namespaceId, long tableId, String columnName ) throws ColumnNotExistsException { try { - return catalog.getLogicalRel( namespaceId ).getColumn( tableId, columnName ); + return catalog.getSnapshot().getRelSnapshot( namespaceId ).getColumn( tableId, columnName ); } catch ( UnknownColumnException e ) { throw new ColumnNotExistsException( tableId, columnName ); } @@ -207,10 +208,10 @@ private LogicalColumn getCatalogColumn( long namespaceId, long tableId, String c public long createNamespace( String name, NamespaceType type, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException { name = name.toLowerCase(); // Check if there is already a schema with this name - if ( catalog.checkIfExistsNamespace( name ) ) { + if ( catalog.getSnapshot().checkIfExistsNamespace( name ) ) { if ( ifNotExists ) { // It is ok that there is already a schema with this name because "IF NOT EXISTS" was specified - return catalog.getNamespace( name ).id; + return catalog.getSnapshot().getNamespace( name ).id; } else if ( replace ) { throw new RuntimeException( "Replacing namespace is not yet supported." ); } else { @@ -245,9 +246,9 @@ private void handleSource( DataSource adapter ) { for ( Map.Entry> entry : exportedColumns.entrySet() ) { // Make sure the table name is unique String tableName = entry.getKey(); - if ( catalog.getLogicalRel( defaultNamespaceId ).checkIfExistsEntity( tableName ) ) { // apparently we put them all into 1? + if ( catalog.getSnapshot().checkIfExistsEntity( tableName ) ) { // apparently we put them all into 1? int i = 0; - while ( catalog.getLogicalRel( defaultNamespaceId ).checkIfExistsEntity( tableName + i ) ) { + while ( catalog.getSnapshot().checkIfExistsEntity( tableName + i ) ) { i++; } tableName += i; @@ -290,7 +291,7 @@ private void handleSource( DataSource adapter ) { } try { catalog.getLogicalRel( defaultNamespaceId ).addPrimaryKey( tableId, primaryKeyColIds ); - LogicalTable catalogTable = catalog.getLogicalRel( defaultNamespaceId ).getTable( tableId ); + LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( tableId ); catalog.getAllocRel( defaultNamespaceId ) .addPartitionPlacement( catalogTable.namespaceId, @@ -317,7 +318,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte name = StringUtils.chop( name ); } - CatalogAdapter catalogAdapter = catalog.getAdapter( name ); + CatalogAdapter catalogAdapter = catalog.getSnapshot().getAdapter( name ); if ( catalogAdapter.type == AdapterType.SOURCE ) { // Remove collection Set collectionsToDrop = new HashSet<>(); @@ -326,7 +327,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } for ( long id : collectionsToDrop ) { - LogicalCollection collection = catalog.getLogicalDoc( 1 ).getCollection( id ); + LogicalCollection collection = catalog.getSnapshot().getDocSnapshot( 1 ).getCollection( id ); // Make sure that there is only one adapter if ( collection.placements.size() != 1 ) { @@ -339,19 +340,19 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte // Remove table Set tablesToDrop = new HashSet<>(); - for ( CatalogColumnPlacement ccp : catalog.getAllocRel( defaultNamespaceId ).getColumnPlacementsOnAdapter( catalogAdapter.id ) ) { + for ( CatalogColumnPlacement ccp : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapter( catalogAdapter.id ) ) { tablesToDrop.add( ccp.tableId ); } for ( Long id : tablesToDrop ) { - if ( catalog.getLogicalRel( defaultNamespaceId ).getTable( id ).entityType != EntityType.MATERIALIZED_VIEW ) { + if ( catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( id ).entityType != EntityType.MATERIALIZED_VIEW ) { tablesToDrop.add( id ); } } // Remove foreign keys for ( Long tableId : tablesToDrop ) { - for ( CatalogForeignKey fk : catalog.getLogicalRel( defaultNamespaceId ).getForeignKeys( tableId ) ) { + for ( CatalogForeignKey fk : catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getForeignKeys( tableId ) ) { try { catalog.getLogicalRel( defaultNamespaceId ).deleteForeignKey( fk.id ); } catch ( GenericCatalogException e ) { @@ -361,7 +362,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Drop tables for ( Long tableId : tablesToDrop ) { - LogicalTable table = catalog.getLogicalRel( defaultNamespaceId ).getTable( tableId ); + LogicalTable table = catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( tableId ); // Make sure that there is only one adapter if ( table.dataPlacements.size() != 1 ) { @@ -375,7 +376,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte // Delete column placement in catalog for ( LogicalColumn column : table.columns ) { - if ( catalog.getAllocRel( defaultNamespaceId ).checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { + if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { catalog.getAllocRel( defaultNamespaceId ).deleteColumnPlacement( catalogAdapter.id, column.id, false ); } } @@ -406,10 +407,10 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte @Override public void renameSchema( String newName, String oldName ) throws NamespaceAlreadyExistsException, UnknownSchemaException { newName = newName.toLowerCase(); - if ( catalog.checkIfExistsNamespace( newName ) ) { + if ( catalog.getSnapshot().checkIfExistsNamespace( newName ) ) { throw new NamespaceAlreadyExistsException(); } - LogicalNamespace logicalNamespace = catalog.getNamespace( oldName ); + LogicalNamespace logicalNamespace = catalog.getSnapshot().getNamespace( oldName ); catalog.renameNamespace( logicalNamespace.id, newName ); // Update Name in statistics @@ -420,7 +421,7 @@ public void renameSchema( String newName, String oldName ) throws NamespaceAlrea @Override public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException { - if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsColumn( catalogTable.id, columnLogicalName ) ) { + if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsColumn( catalogTable.id, columnLogicalName ) ) { throw new ColumnAlreadyExistsException( columnLogicalName, catalogTable.name ); } @@ -433,14 +434,14 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure there is only one adapter - if ( catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( catalogTable.columns.get( 0 ).id ).size() != 1 ) { + if ( catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( catalogTable.columns.get( 0 ).id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } - long adapterId = catalog.getAllocRel( catalogTable.namespaceId ).getAllocationsFromLogical( catalogTable.id ).get( 0 ).adapterId; + long adapterId = catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ).get( 0 ).adapterId; DataSource dataSource = (DataSource) AdapterManager.getInstance().getAdapter( adapterId ); - String physicalTableName = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( adapterId, catalogTable.partitionProperty.partitionIds.get( 0 ) ).physicalTableName; + String physicalTableName = catalog.getSnapshot().getAllocSnapshot().getPartitionPlacement( adapterId, catalogTable.partitionProperty.partitionIds.get( 0 ) ).physicalTableName; List exportedColumns = dataSource.getExportedColumns().get( physicalTableName ); // Check if physicalColumnName is valid @@ -455,7 +456,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure this physical column has not already been added to this table - for ( CatalogColumnPlacement ccp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { + for ( CatalogColumnPlacement ccp : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { if ( ccp.physicalColumnName.equalsIgnoreCase( columnPhysicalName ) ) { throw new RuntimeException( "The physical column '" + columnPhysicalName + "' has already been added to this table!" ); } @@ -479,7 +480,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys // Add default value addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); - LogicalColumn addedColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ); + LogicalColumn addedColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ); // Add column placement catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, @@ -498,7 +499,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn beforeColumn, LogicalColumn afterColumn ) { - List columns = catalog.getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); int position = columns.size() + 1; if ( beforeColumn != null || afterColumn != null ) { if ( beforeColumn != null ) { @@ -523,7 +524,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo throw new NotNullAndDefaultValueException(); } - if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsColumn( catalogTable.id, columnName ) ) { + if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsColumn( catalogTable.id, columnName ) ) { throw new ColumnAlreadyExistsException( columnName, catalogTable.name ); } // @@ -548,7 +549,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo // Add default value addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); - LogicalColumn addedColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ); + LogicalColumn addedColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ); // Ask router on which stores this column shall be placed List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); @@ -573,12 +574,12 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } List referencesIds = new LinkedList<>(); for ( String columnName : refColumnNames ) { - LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( refTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( refTable.id, columnName ); referencesIds.add( logicalColumn.id ); } catalog.getLogicalRel( catalogTable.namespaceId ).addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); @@ -589,7 +590,7 @@ public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, Lis public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } @@ -601,7 +602,7 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List 0 ) { for ( long columnId : columnIds ) { - if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { + if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { hasAllColumns = false; } } @@ -636,11 +637,11 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List 0 ) { for ( long columnId : columnIds ) { - if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { + if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { hasAllColumns = false; } } @@ -649,7 +650,7 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnIds, IndexType type ) throws MissingColumnPlacementException, UnknownIndexMethodException, GenericCatalogException { // Check if all required columns are present on this store for ( long columnId : columnIds ) { - if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { - throw new MissingColumnPlacementException( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ).name ); + if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { + throw new MissingColumnPlacementException( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ).name ); } } @@ -706,8 +707,8 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam location.addIndex( statement.getPrepareContext(), - catalog.getLogicalRel( catalogTable.namespaceId ).getIndex( indexId ), - catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( location.getAdapterId(), catalogTable.id ) ); + catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndex( indexId ), + catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( location.getAdapterId(), catalogTable.id ) ); } @@ -715,7 +716,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName indexName = indexName.toLowerCase(); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } @@ -727,7 +728,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName } // Check if there is already an index with this name for this table - if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsIndex( catalogTable.id, indexName ) ) { + if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsIndex( catalogTable.id, indexName ) ) { throw new IndexExistsException(); } @@ -760,7 +761,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName type, indexName ); - IndexManager.getInstance().addIndex( catalog.getLogicalRel( catalogTable.namespaceId ).getIndex( indexId ), statement ); + IndexManager.getInstance().addIndex( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndex( indexId ), statement ); } @@ -787,7 +788,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L // Needed to ensure that column placements on the same store contain all the same partitions // Check if this column placement is the first on the data placement // If this returns null this means that this is the first placement and partition list can therefore be specified - List currentPartList = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupsOnDataPlacement( dataStore.getAdapterId(), catalogTable.id ); + List currentPartList = catalog.getSnapshot().getAllocSnapshot().getPartitionGroupsOnDataPlacement( dataStore.getAdapterId(), catalogTable.id ); isDataPlacementPartitioned = !currentPartList.isEmpty(); @@ -817,7 +818,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L + dataStore.getUniqueName() + "' already contains manually specified partitions: " + currentPartList + ". Use 'ALTER TABLE ... MODIFY PARTITIONS...' instead" ); } - List catalogPartitionGroups = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getSnapshot().getAllocSnapshot().getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNames ) { boolean isPartOfTable = false; for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { @@ -829,7 +830,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L } if ( !isPartOfTable ) { throw new RuntimeException( "Specified Partition-Name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupNames( tableId ) ) ); + + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getSnapshot().getAllocSnapshot().getPartitionGroupNames( tableId ) ) ); } } @@ -851,7 +852,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { List partitionIds = new ArrayList<>(); // Gather all partitions relevant to add depending on the specified partitionGroup - tempPartitionGroupList.forEach( pg -> catalog.getAllocRel( catalogTable.namespaceId ).getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); + tempPartitionGroupList.forEach( pg -> catalog.getSnapshot().getAllocSnapshot().getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); // Create column placements for ( long cid : columnIds ) { @@ -861,10 +862,10 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { PlacementType.MANUAL, null, null, null, 0 ); - addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); + addedColumns.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( cid ) ); } // Check if placement includes primary key columns - CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( long cid : primaryKey.columnIds ) { if ( !columnIds.contains( cid ) ) { catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, @@ -873,7 +874,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { PlacementType.AUTOMATIC, null, null, null, 0 ); - addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); + addedColumns.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( cid ) ); } } @@ -894,7 +895,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { dataStore.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); // Copy data to the newly added placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); - dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( dataStore.getAdapterId() ), addedColumns, partitionIds ); + dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( dataStore.getAdapterId() ), addedColumns, partitionIds ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -909,21 +910,21 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, checkModelLogic( catalogTable ); try { - CatalogPrimaryKey oldPk = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey oldPk = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } catalog.getLogicalRel( catalogTable.namespaceId ).addPrimaryKey( catalogTable.id, columnIds ); // Add new column placements long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores - List oldPkPlacements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( pkColumnId ); + List oldPkPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumnId ); for ( CatalogColumnPlacement ccp : oldPkPlacements ) { for ( long columnId : columnIds ) { - if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { + if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, ccp.adapterId, columnId, // Will be set later @@ -932,8 +933,8 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, null, null, 0 ); AdapterManager.getInstance().getStore( ccp.adapterId ).addColumn( statement.getPrepareContext(), - catalog.getLogicalRel( catalogTable.namespaceId ).getTable( ccp.tableId ), - catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( columnId ) ); + catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTable( ccp.tableId ), + catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ) ); } } } @@ -953,7 +954,7 @@ public void addUniqueConstraint( LogicalTable catalogTable, List columnN try { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } catalog.getLogicalRel( catalogTable.namespaceId ).addUniqueConstraint( catalogTable.id, constraintName, columnIds ); @@ -977,17 +978,19 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement LogicalColumn column = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); + LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ); + // Check if column is part of a key - for ( CatalogKey key : catalog.getLogicalRel( catalogTable.namespaceId ).getTableKeys( catalogTable.id ) ) { + for ( CatalogKey key : snapshot.getTableKeys( catalogTable.id ) ) { if ( key.columnIds.contains( column.id ) ) { - if ( catalog.getLogicalRel( catalogTable.namespaceId ).isPrimaryKey( key.id ) ) { + if ( snapshot.isPrimaryKey( key.id ) ) { throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the primary key." ); - } else if ( catalog.getLogicalRel( catalogTable.namespaceId ).isIndex( key.id ) ) { - throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the index with the name: '" + catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( key ).get( 0 ).name + "'." ); - } else if ( catalog.getLogicalRel( catalogTable.namespaceId ).isForeignKey( key.id ) ) { - throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the foreign key with the name: '" + catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKeys( key ).get( 0 ).name + "'." ); - } else if ( catalog.getLogicalRel( catalogTable.namespaceId ).isConstraint( key.id ) ) { - throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the constraint with the name: '" + catalog.getLogicalRel( catalogTable.namespaceId ).getConstraints( key ).get( 0 ).name + "'." ); + } else if ( snapshot.isIndex( key.id ) ) { + throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the index with the name: '" + snapshot.getIndexes( key ).get( 0 ).name + "'." ); + } else if ( snapshot.isForeignKey( key.id ) ) { + throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the foreign key with the name: '" + snapshot.getForeignKeys( key ).get( 0 ).name + "'." ); + } else if ( snapshot.isConstraint( key.id ) ) { + throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the constraint with the name: '" + snapshot.getConstraints( key ).get( 0 ).name + "'." ); } throw new PolyphenyDbException( "Ok, strange... Something is going wrong here!" ); } @@ -1000,7 +1003,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( dp.adapterId, dp.columnId, true ); }*/ - for ( AllocationTable table : catalog.getAllocRel( catalogTable.namespaceId ).getAllocationsFromLogical( catalogTable.id ) ) { + for ( AllocationTable table : catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ) ) { for ( CatalogColumnPlacement placement : table.placements ) { if ( catalogTable.entityType == EntityType.ENTITY ) { AdapterManager.getInstance().getStore( table.adapterId ).dropColumn( statement.getPrepareContext(), placement ); @@ -1010,7 +1013,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } // Delete from catalog - List columns = catalog.getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = snapshot.getColumns( catalogTable.id ); catalog.getLogicalRel( catalogTable.namespaceId ).deleteColumn( column.id ); if ( column.position != columns.size() ) { // Update position of the other columns @@ -1048,7 +1051,7 @@ public void dropConstraint( LogicalTable catalogTable, String constraintName ) t checkIfDdlPossible( catalogTable.entityType ); try { - CatalogConstraint constraint = catalog.getLogicalRel( catalogTable.namespaceId ).getConstraint( catalogTable.id, constraintName ); + CatalogConstraint constraint = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getConstraint( catalogTable.id, constraintName ); catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); } catch ( GenericCatalogException | UnknownConstraintException e ) { throw new RuntimeException( e ); @@ -1062,7 +1065,7 @@ public void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) t checkIfDdlPossible( catalogTable.entityType ); try { - CatalogForeignKey foreignKey = catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKey( catalogTable.id, foreignKeyName ); + CatalogForeignKey foreignKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getForeignKey( catalogTable.id, foreignKeyName ); catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } catch ( GenericCatalogException | UnknownForeignKeyException e ) { throw new RuntimeException( e ); @@ -1076,13 +1079,13 @@ public void dropIndex( LogicalTable catalogTable, String indexName, Statement st checkIfDdlPossible( catalogTable.entityType ); try { - CatalogIndex index = catalog.getLogicalRel( catalogTable.namespaceId ).getIndex( catalogTable.id, indexName ); + CatalogIndex index = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndex( catalogTable.id, indexName ); if ( index.location == 0 ) { IndexManager.getInstance().deleteIndex( index ); } else { DataStore storeInstance = AdapterManager.getInstance().getStore( index.location ); - storeInstance.dropIndex( statement.getPrepareContext(), index, catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + storeInstance.dropIndex( statement.getPrepareContext(), index, catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); @@ -1099,7 +1102,7 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc throw new PlacementNotExistsException(); } - CatalogDataPlacement dataPlacement = catalog.getAllocRel( catalogTable.namespaceId ).getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + CatalogDataPlacement dataPlacement = catalog.getSnapshot().getAllocSnapshot().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), dataPlacement.columnPlacementsOnAdapter, dataPlacement.getAllPartitionIds() ) ) { @@ -1107,7 +1110,7 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc } // Drop all indexes on this store - for ( CatalogIndex index : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() ) { if ( index.location == 0 ) { // Delete polystore index @@ -1117,14 +1120,14 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc AdapterManager.getInstance().getStore( index.location ).dropIndex( statement.getPrepareContext(), index, - catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } // Delete index in catalog catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); } } // Physically delete the data from the store - storeInstance.dropTable( statement.getPrepareContext(), catalogTable, catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); + storeInstance.dropTable( statement.getPrepareContext(), catalogTable, catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); // Remove physical stores afterwards catalog.getAllocRel( catalogTable.namespaceId ).removeDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); @@ -1164,11 +1167,11 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT type.scale, type.dimension, type.cardinality ); - for ( CatalogColumnPlacement placement : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( logicalColumn.id ) ) { + for ( CatalogColumnPlacement placement : catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( logicalColumn.id ) ) { AdapterManager.getInstance().getStore( placement.adapterId ).updateColumnType( statement.getPrepareContext(), placement, - catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( logicalColumn.id ), + catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( logicalColumn.id ), logicalColumn.type ); } @@ -1213,7 +1216,7 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str if ( logicalColumn.id == refColumn.id ) { throw new RuntimeException( "Same column!" ); } - List columns = catalog.getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); if ( targetPosition < logicalColumn.position ) { // Walk from last column to first column for ( int i = columns.size(); i >= 1; i-- ) { if ( i < logicalColumn.position && i >= targetPosition ) { @@ -1304,18 +1307,20 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds List columnsToRemove = new ArrayList<>(); + LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ); + // Checks before physically removing of placement that the partition distribution is still valid and sufficient // Identifies which columns need to be removed - for ( CatalogColumnPlacement placement : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { + for ( CatalogColumnPlacement placement : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { if ( !columnIds.contains( placement.columnId ) ) { // Check whether there are any indexes located on the store requiring this column - for ( CatalogIndex index : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : snapshot.getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( placement.columnId ) ) { - throw new IndexPreventsRemovalException( index.name, catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( placement.columnId ).name ); + throw new IndexPreventsRemovalException( index.name, snapshot.getColumn( placement.columnId ).name ); } } // Check whether the column is a primary key column - CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = snapshot.getPrimaryKey( catalogTable.primaryKey ); if ( primaryKey.columnIds.contains( placement.columnId ) ) { // Check if the placement type is manual. If so, change to automatic if ( placement.placementType == PlacementType.MANUAL ) { @@ -1340,7 +1345,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Remove columns physically for ( long columnId : columnsToRemove ) { // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); // Drop column placement catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), columnId, true ); } @@ -1366,7 +1371,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds } // If name partitions are specified else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { - List catalogPartitionGroups = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getSnapshot().getAllocSnapshot().getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNames ) { boolean isPartOfTable = false; for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { @@ -1378,7 +1383,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { } if ( !isPartOfTable ) { throw new RuntimeException( "Specified partition name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupNames( tableId ) ) ); + + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getSnapshot().getAllocSnapshot().getPartitionGroupNames( tableId ) ) ); } } } else if ( partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { @@ -1395,14 +1400,14 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { List intendedPartitionIds = new ArrayList<>(); // Gather all partitions relevant to add depending on the specified partitionGroup - tempPartitionGroupList.forEach( pg -> catalog.getAllocRel( catalogTable.namespaceId ).getPartitions( pg ).forEach( p -> intendedPartitionIds.add( p.id ) ) ); + tempPartitionGroupList.forEach( pg -> catalog.getSnapshot().getAllocSnapshot().getPartitions( pg ).forEach( p -> intendedPartitionIds.add( p.id ) ) ); // Which columns to add List addedColumns = new LinkedList<>(); for ( long cid : columnIds ) { - if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { - CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), cid ); + if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { + CatalogColumnPlacement placement = catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), cid ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); @@ -1416,13 +1421,13 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { null, null, null, 0 ); // Add column on store - storeInstance.addColumn( statement.getPrepareContext(), catalogTable, catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); + storeInstance.addColumn( statement.getPrepareContext(), catalogTable, snapshot.getColumn( cid ) ); // Add to list of columns for which we need to copy data - addedColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cid ) ); + addedColumns.add( snapshot.getColumn( cid ) ); } } - CatalogDataPlacement dataPlacement = catalog.getAllocRel( catalogTable.namespaceId ).getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + CatalogDataPlacement dataPlacement = catalog.getSnapshot().getAllocSnapshot().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); List removedPartitionIdsFromDataPlacement = new ArrayList<>(); // Removed Partition Ids for ( long partitionId : dataPlacement.getAllPartitionIds() ) { @@ -1457,7 +1462,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { // Copy the data to the newly added column placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); if ( addedColumns.size() > 0 ) { - dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( storeInstance.getAdapterId() ), addedColumns, intendedPartitionIds ); + dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( storeInstance.getAdapterId() ), addedColumns, intendedPartitionIds ); } // Reset query plan cache, implementation cache & routing cache @@ -1471,12 +1476,12 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part List newPartitions = new ArrayList<>(); List removedPartitions = new ArrayList<>(); - List currentPartitionGroupsOnStore = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupsOnDataPlacement( storeId, catalogTable.id ); + List currentPartitionGroupsOnStore = catalog.getSnapshot().getAllocSnapshot().getPartitionGroupsOnDataPlacement( storeId, catalogTable.id ); // Get PartitionGroups that have been removed for ( long partitionGroupId : currentPartitionGroupsOnStore ) { if ( !partitionGroupIds.contains( partitionGroupId ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).getPartitions( partitionGroupId ).forEach( p -> removedPartitions.add( p.id ) ); + catalog.getSnapshot().getAllocSnapshot().getPartitions( partitionGroupId ).forEach( p -> removedPartitions.add( p.id ) ); } } @@ -1487,7 +1492,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part // Get PartitionGroups that have been newly added for ( Long partitionGroupId : partitionGroupIds ) { if ( !currentPartitionGroupsOnStore.contains( partitionGroupId ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).getPartitions( partitionGroupId ).forEach( p -> newPartitions.add( p.id ) ); + catalog.getSnapshot().getAllocSnapshot().getPartitions( partitionGroupId ).forEach( p -> newPartitions.add( p.id ) ); } } @@ -1508,11 +1513,11 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); - catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ).forEach( cp -> necessaryColumns.add( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( cp.columnId ) ) ); - dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( storeId ), necessaryColumns, newPartitions ); + catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ).forEach( cp -> necessaryColumns.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( cp.columnId ) ) ); + dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( storeId ), necessaryColumns, newPartitions ); // Add indexes on this new Partition Placement if there is already an index - for ( CatalogIndex currentIndex : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex currentIndex : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( currentIndex.location == storeId ) { storeInstance.addIndex( statement.getPrepareContext(), currentIndex, newPartitions ); } @@ -1521,7 +1526,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part if ( removedPartitions.size() > 0 ) { // Remove indexes - for ( CatalogIndex currentIndex : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex currentIndex : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( currentIndex.location == storeId ) { storeInstance.dropIndex( statement.getPrepareContext(), currentIndex, removedPartitions ); } @@ -1549,8 +1554,8 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Make sure that this store does not contain a placement of this column - if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { - CatalogColumnPlacement placement = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); + if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { + CatalogColumnPlacement placement = catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( @@ -1572,8 +1577,8 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da storeInstance.addColumn( statement.getPrepareContext(), catalogTable, logicalColumn ); // Copy the data to the newly added column placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); - dataMigrator.copyData( statement.getTransaction(), catalog.getAdapter( storeInstance.getAdapterId() ), - ImmutableList.of( logicalColumn ), catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); + dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( storeInstance.getAdapterId() ), + ImmutableList.of( logicalColumn ), catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); } // Reset query plan cache, implementation cache & routing cache @@ -1594,11 +1599,11 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Check whether this store actually contains a placement of this column - if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { + if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { throw new PlacementNotExistsException(); } // Check whether there are any indexes located on the store requiring this column - for ( CatalogIndex index : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( logicalColumn.id ) ) { throw new IndexPreventsRemovalException( index.name, columnName ); } @@ -1609,12 +1614,12 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D } // Check whether the column to drop is a primary key - CatalogPrimaryKey primaryKey = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); if ( primaryKey.columnIds.contains( logicalColumn.id ) ) { throw new PlacementIsPrimaryException(); } // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); // Drop column placement catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id, false ); @@ -1625,20 +1630,20 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D @Override public void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) throws UnknownUserException { - CatalogUser catalogUser = catalog.getUser( newOwnerName ); + CatalogUser catalogUser = catalog.getSnapshot().getUser( newOwnerName ); catalog.getLogicalRel( catalogTable.namespaceId ).setTableOwner( catalogTable.id, catalogUser.id ); } @Override public void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException { - if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsEntity( newTableName ) ) { + if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsEntity( newTableName ) ) { throw new EntityAlreadyExistsException(); } // Check if views are dependent from this view checkViewDependencies( catalogTable ); - if ( catalog.getNamespace( catalogTable.namespaceId ).caseSensitive ) { + if ( catalog.getSnapshot().getNamespace( catalogTable.namespaceId ).caseSensitive ) { newTableName = newTableName.toLowerCase(); } @@ -1656,7 +1661,7 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme public void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException { LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); - if ( catalog.getLogicalRel( catalogTable.namespaceId ).checkIfExistsColumn( logicalColumn.tableId, newColumnName ) ) { + if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsColumn( logicalColumn.tableId, newColumnName ) ) { throw new ColumnAlreadyExistsException( newColumnName, logicalColumn.getTableName() ); } // Check if views are dependent from this view @@ -1676,10 +1681,10 @@ public void renameColumn( LogicalTable catalogTable, String columnName, String n public void createView( String viewName, long namespaceId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) throws EntityAlreadyExistsException { viewName = adjustNameIfNeeded( viewName, namespaceId ); - if ( catalog.getLogicalRel( namespaceId ).checkIfExistsEntity( viewName ) ) { + if ( catalog.getSnapshot().getRelSnapshot( namespaceId ).checkIfExistsEntity( viewName ) ) { if ( replace ) { try { - dropView( catalog.getLogicalRel( namespaceId ).getTable( viewName ), statement ); + dropView( catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( viewName ), statement ); } catch ( UnknownTableException | DdlOnSourceException e ) { throw new RuntimeException( "Unable tp drop the existing View with this name." ); } @@ -1730,7 +1735,7 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC private String adjustNameIfNeeded( String name, long namespaceId ) { - if ( !catalog.getNamespace( namespaceId ).caseSensitive ) { + if ( !catalog.getSnapshot().getNamespace( namespaceId ).caseSensitive ) { return name.toLowerCase(); } return name; @@ -1755,12 +1760,14 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a Map> underlyingTables = new HashMap<>(); Map> underlying = findUnderlyingTablesOfView( algRoot.alg, underlyingTables, fieldList ); + LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( namespaceId ); + // add check if underlying table is of model document -> mql, relational -> sql underlying.keySet().forEach( tableId -> checkModelLangCompatibility( language, namespaceId, tableId ) ); if ( materializedCriteria.getCriteriaType() == CriteriaType.UPDATE ) { List entityTypes = new ArrayList<>(); - underlying.keySet().forEach( t -> entityTypes.add( catalog.getLogicalRel( namespaceId ).getTable( t ).entityType ) ); + underlying.keySet().forEach( t -> entityTypes.add( snapshot.getTable( t ).entityType ) ); if ( !(entityTypes.contains( EntityType.ENTITY )) ) { throw new GenericCatalogException( "Not possible to use Materialized View with Update Freshness if underlying table does not include a modifiable table." ); } @@ -1821,7 +1828,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a } else { logicalColumns = new ArrayList<>(); } - logicalColumns.add( catalog.getLogicalRel( namespaceId ).getColumn( columnId ) ); + logicalColumns.add( snapshot.getColumn( columnId ) ); addedColumns.put( adapterId, logicalColumns ); } @@ -1829,7 +1836,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a // Sets previously created primary key catalog.getLogicalRel( namespaceId ).addPrimaryKey( tableId, columnIds ); - CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalog.getLogicalRel( namespaceId ).getTable( tableId ); + CatalogMaterializedView catalogMaterializedView = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ).unwrap( CatalogMaterializedView.class ); Catalog.getInstance().getSnapshot(); for ( DataStore store : stores ) { @@ -1851,7 +1858,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a private void checkModelLangCompatibility( QueryLanguage language, long namespaceId, Long tableId ) { - LogicalTable catalogTable = catalog.getLogicalRel( namespaceId ).getTable( tableId ); + LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ); if ( catalogTable.getNamespaceType() != language.getNamespaceType() ) { throw new RuntimeException( String.format( @@ -1898,7 +1905,7 @@ public long addGraphPlacement( long graphId, List stores, boolean onl throw new RuntimeException(); } - LogicalGraph graph = catalog.getLogicalGraph( graphId ).getGraph( graphId ); + LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( graphId ).getGraph( graphId ); Catalog.getInstance().getSnapshot(); List preExistingPlacements = graph.placements @@ -1918,7 +1925,7 @@ public long addGraphPlacement( long graphId, List stores, boolean onl if ( existingAdapterId != null ) { // Copy the data to the newly added column placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); - dataMigrator.copyGraphData( graph, statement.getTransaction(), existingAdapterId, catalog.getAdapter( store.getAdapterId() ) ); + dataMigrator.copyGraphData( graph, statement.getTransaction(), existingAdapterId, catalog.getSnapshot().getAdapter( store.getAdapterId() ) ); } } @@ -2036,7 +2043,7 @@ public void replaceGraphAlias( long graphId, String oldAlias, String alias ) { @Override public void removeGraph( long graphId, boolean ifExists, Statement statement ) { - LogicalGraph graph = catalog.getLogicalGraph( graphId ).getGraph( graphId ); + LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( graphId ).getGraph( graphId ); if ( graph == null ) { if ( !ifExists ) { @@ -2196,7 +2203,7 @@ public void createTable( long namespaceId, String name, List f addConstraint( namespaceId, constraint.name, constraint.type, constraint.columnNames, tableId ); } - LogicalTable catalogTable = catalog.getLogicalRel( namespaceId ).getTable( tableId ); + LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ); // Trigger rebuild of schema; triggers schema creation on adapters Catalog.getInstance().getSnapshot(); @@ -2247,7 +2254,7 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists true ); // Initially create DataPlacement containers on every store the table should be placed. - LogicalCollection catalogCollection = catalog.getLogicalDoc( namespaceId ).getCollection( collectionId ); + LogicalCollection catalogCollection = catalog.getSnapshot().getDocSnapshot( namespaceId ).getCollection( collectionId ); // Trigger rebuild of schema; triggers schema creation on adapters Catalog.getInstance().getSnapshot(); @@ -2269,7 +2276,7 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists private boolean assertEntityExists( long namespaceId, String name, boolean ifNotExists ) throws EntityAlreadyExistsException { // Check if there is already an entity with this name - if ( catalog.getLogicalRel( namespaceId ).checkIfExistsEntity( name ) ) { + if ( catalog.getSnapshot().getRelSnapshot( namespaceId ).checkIfExistsEntity( name ) ) { if ( ifNotExists ) { // It is ok that there is already a table with this name because "IF NOT EXISTS" was specified return true; @@ -2297,7 +2304,7 @@ public void dropCollection( LogicalCollection catalogCollection, Statement state public void removeDocumentLogistics( LogicalCollection catalogCollection, Statement statement ) { CatalogCollectionMapping mapping = catalog.getAllocDoc( catalogCollection.namespaceId ).getCollectionMapping( catalogCollection.id ); - LogicalTable table = catalog.getLogicalRel( catalogCollection.namespaceId ).getTable( mapping.collectionId ); + LogicalTable table = catalog.getSnapshot().getRelSnapshot( catalogCollection.namespaceId ).getTable( mapping.collectionId ); catalog.getLogicalRel( catalogCollection.namespaceId ).deleteTable( table.id ); } @@ -2308,7 +2315,7 @@ public void addCollectionPlacement( long namespaceId, String name, List columns, List constraints ) { - if ( catalog.getNamespace( namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { + if ( catalog.getSnapshot().getNamespace( namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { List names = columns.stream().map( c -> c.name ).collect( Collectors.toList() ); if ( names.contains( "_id" ) ) { @@ -2417,7 +2424,7 @@ private void checkDocumentModel( long namespaceId, List column @Override public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { - LogicalColumn logicalColumn = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getColumn( partitionInfo.table.id, partitionInfo.columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( partitionInfo.table.namespaceId ).getColumn( partitionInfo.table.id, partitionInfo.columnName ); PartitionType actualPartitionType = PartitionType.getByName( partitionInfo.typeName ); @@ -2517,7 +2524,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List List partitionIds = new ArrayList<>(); //get All PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds //catalog.getLogicalRel( catalogTable.namespaceId ).getPartitionGroups( partitionInfo.table.id ).forEach( pg -> partitionIds.forEach( p -> partitionIds.add( p ) ) ); - partitionGroupIds.forEach( pg -> catalog.getAllocRel( partitionInfo.table.namespaceId ).getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); + partitionGroupIds.forEach( pg -> catalog.getSnapshot().getAllocSnapshot().getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); PartitionProperty partitionProperty; if ( actualPartitionType == PartitionType.TEMPERATURE ) { @@ -2549,7 +2556,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // -1 because one partition is already created in COLD List partitionsForHot = new ArrayList<>(); - catalog.getAllocRel( partitionInfo.table.namespaceId ).getPartitions( partitionGroupIds.get( 0 ) ).forEach( p -> partitionsForHot.add( p.id ) ); + catalog.getSnapshot().getAllocSnapshot().getPartitions( partitionGroupIds.get( 0 ) ).forEach( p -> partitionsForHot.add( p.id ) ); // -1 because one partition is already created in HOT for ( int i = 0; i < numberOfPartitionsInHot - 1; i++ ) { @@ -2563,7 +2570,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // -1 because one partition is already created in COLD List partitionsForCold = new ArrayList<>(); - catalog.getAllocRel( partitionInfo.table.namespaceId ).getPartitions( partitionGroupIds.get( 1 ) ).forEach( p -> partitionsForCold.add( p.id ) ); + catalog.getSnapshot().getAllocSnapshot().getPartitions( partitionGroupIds.get( 1 ) ).forEach( p -> partitionsForCold.add( p.id ) ); for ( int i = 0; i < numberOfPartitionsInCold - 1; i++ ) { long tempId; @@ -2607,9 +2614,10 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Get primary key of table and use PK to find all DataPlacements of table long pkid = partitionInfo.table.primaryKey; - List pkColumnIds = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getPrimaryKey( pkid ).columnIds; + LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( partitionInfo.table.namespaceId ); + List pkColumnIds = snapshot.getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient - LogicalColumn pkColumn = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = snapshot.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) boolean fillStores = false; @@ -2617,7 +2625,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List stores = new ArrayList<>(); fillStores = true; } - List catalogColumnPlacements = catalog.getAllocRel( partitionInfo.table.namespaceId ).getColumnPlacements( pkColumn.id ); + List catalogColumnPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { if ( fillStores ) { // Ask router on which store(s) the table should be placed @@ -2629,7 +2637,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Now get the partitioned table, partitionInfo still contains the basic/unpartitioned table. - LogicalTable partitionedTable = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getTable( partitionInfo.table.id ); + LogicalTable partitionedTable = snapshot.getTable( partitionInfo.table.id ); DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); for ( DataStore store : stores ) { for ( long partitionId : partitionIds ) { @@ -2649,12 +2657,12 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Get only columns that are actually on that store // Every store of a newly partitioned table, initially will hold all partitions List necessaryColumns = new LinkedList<>(); - catalog.getAllocRel( partitionInfo.table.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( catalog.getLogicalRel( partitionInfo.table.namespaceId ).getColumn( cp.columnId ) ) ); + catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( snapshot.getColumn( cp.columnId ) ) ); // Copy data from the old partition to new partitions dataMigrator.copyPartitionData( statement.getTransaction(), - catalog.getAdapter( store.getAdapterId() ), + catalog.getSnapshot().getAdapter( store.getAdapterId() ), unPartitionedTable, partitionedTable, necessaryColumns, @@ -2663,7 +2671,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Adjust indexes - List indexes = catalog.getLogicalRel( partitionInfo.table.namespaceId ).getIndexes( unPartitionedTable.id, false ); + List indexes = snapshot.getIndexes( unPartitionedTable.id, false ); for ( CatalogIndex index : indexes ) { // Remove old index DataStore ds = ((DataStore) AdapterManager.getInstance().getAdapter( index.location )); @@ -2680,12 +2688,12 @@ public void addPartitioning( PartitionInformation partitionInfo, List index.type, index.name ); if ( index.location == 0 ) { - IndexManager.getInstance().addIndex( catalog.getLogicalRel( partitionInfo.table.namespaceId ).getIndex( newIndexId ), statement ); + IndexManager.getInstance().addIndex( snapshot.getIndex( newIndexId ), statement ); } else { ds.addIndex( statement.getPrepareContext(), - catalog.getLogicalRel( partitionInfo.table.namespaceId ).getIndex( newIndexId ), - catalog.getAllocRel( partitionInfo.table.namespaceId ).getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); + snapshot.getIndex( newIndexId ), + catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); } } @@ -2707,6 +2715,8 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme partitionedTable.name, partitionedTable.id, partitionedTable.getNamespaceName() ); } + LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( partitionedTable.namespaceId ); + // Need to gather the partitionDistribution before actually merging // We need a columnPlacement for every partition Map> placementDistribution = new HashMap<>(); @@ -2718,17 +2728,17 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme catalog.getAllocRel( partitionedTable.namespaceId ).mergeTable( tableId ); // Now get the merged table - LogicalTable mergedTable = catalog.getLogicalRel( partitionedTable.namespaceId ).getTable( tableId ); + LogicalTable mergedTable = snapshot.getTable( tableId ); List stores = new ArrayList<>(); // Get primary key of table and use PK to find all DataPlacements of table long pkid = partitionedTable.primaryKey; - List pkColumnIds = catalog.getLogicalRel( partitionedTable.namespaceId ).getPrimaryKey( pkid ).columnIds; + List pkColumnIds = snapshot.getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient - LogicalColumn pkColumn = catalog.getLogicalRel( partitionedTable.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = snapshot.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) - List catalogColumnPlacements = catalog.getAllocRel( partitionedTable.namespaceId ).getColumnPlacements( pkColumn.id ); + List catalogColumnPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { // Ask router on which store(s) the table should be placed Adapter adapter = AdapterManager.getInstance().getAdapter( ccp.adapterId ); @@ -2755,19 +2765,19 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); - catalog.getAllocRel( partitionedTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( catalog.getLogicalRel( partitionedTable.namespaceId ).getColumn( cp.columnId ) ) ); + catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( snapshot.getColumn( cp.columnId ) ) ); // TODO @HENNLO Check if this can be omitted catalog.getAllocRel( partitionedTable.namespaceId ).updateDataPlacement( store.getAdapterId(), mergedTable.id, - catalog.getAllocRel( partitionedTable.namespaceId ).getDataPlacement( store.getAdapterId(), mergedTable.id ).columnPlacementsOnAdapter, + catalog.getSnapshot().getAllocSnapshot().getDataPlacement( store.getAdapterId(), mergedTable.id ).columnPlacementsOnAdapter, mergedTable.partitionProperty.partitionIds ); // dataMigrator.copySelectiveData( statement.getTransaction(), - catalog.getAdapter( store.getAdapterId() ), + catalog.getSnapshot().getAdapter( store.getAdapterId() ), partitionedTable, mergedTable, necessaryColumns, @@ -2776,7 +2786,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme } // Adjust indexes - List indexes = catalog.getLogicalRel( partitionedTable.namespaceId ).getIndexes( partitionedTable.id, false ); + List indexes = snapshot.getIndexes( partitionedTable.id, false ); for ( CatalogIndex index : indexes ) { // Remove old index DataStore ds = (DataStore) AdapterManager.getInstance().getAdapter( index.location ); @@ -2793,19 +2803,19 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme index.type, index.name ); if ( index.location == 0 ) { - IndexManager.getInstance().addIndex( catalog.getLogicalRel( partitionedTable.namespaceId ).getIndex( newIndexId ), statement ); + IndexManager.getInstance().addIndex( snapshot.getIndex( newIndexId ), statement ); } else { ds.addIndex( statement.getPrepareContext(), - catalog.getLogicalRel( partitionedTable.namespaceId ).getIndex( newIndexId ), - catalog.getAllocRel( partitionedTable.namespaceId ).getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); + snapshot.getIndex( newIndexId ), + catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); } } // Needs to be separated from loop above. Otherwise we loose data for ( DataStore store : stores ) { List partitionIdsOnStore = new ArrayList<>(); - catalog.getAllocRel( partitionedTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( store.getAdapterId(), partitionedTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); + catalog.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( store.getAdapterId(), partitionedTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); // Otherwise everything will be dropped again, leaving the table inaccessible partitionIdsOnStore.remove( mergedTable.partitionProperty.partitionIds.get( 0 ) ); @@ -2824,7 +2834,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme private void addColumn( long namespaceId, String columnName, ColumnTypeInformation typeInformation, Collation collation, String defaultValue, long tableId, int position, List stores, PlacementType placementType ) throws GenericCatalogException, UnknownCollationException, UnknownColumnException { - columnName = adjustNameIfNeeded( columnName, catalog.getLogicalRel( namespaceId ).getTable( tableId ).namespaceId ); + columnName = adjustNameIfNeeded( columnName, catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ).namespaceId ); long addedColumnId = catalog.getLogicalRel( namespaceId ).addColumn( columnName, tableId, @@ -2857,7 +2867,7 @@ private void addColumn( long namespaceId, String columnName, ColumnTypeInformati public void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getLogicalRel( namespaceId ).getColumn( tableId, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( namespaceId ).getColumn( tableId, columnName ); columnIds.add( logicalColumn.id ); } if ( constraintType == ConstraintType.PRIMARY ) { @@ -2874,18 +2884,19 @@ public void addConstraint( long namespaceId, String constraintName, ConstraintTy @Override public void dropNamespace( String schemaName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException { schemaName = schemaName.toLowerCase(); + // Check if there is a schema with this name - if ( catalog.checkIfExistsNamespace( schemaName ) ) { - LogicalNamespace logicalNamespace = catalog.getNamespace( schemaName ); + if ( catalog.getSnapshot().checkIfExistsNamespace( schemaName ) ) { + LogicalNamespace logicalNamespace = catalog.getSnapshot().getNamespace( schemaName ); // Drop all collections in this namespace - List collections = catalog.getLogicalDoc( logicalNamespace.id ).getCollections( null ); + List collections = catalog.getSnapshot().getDocSnapshot( logicalNamespace.id ).getCollections( null ); for ( LogicalCollection collection : collections ) { dropCollection( collection, statement ); } // Drop all tables in this schema - List catalogEntities = catalog.getLogicalRel( logicalNamespace.id ).getTables( null ); + List catalogEntities = catalog.getSnapshot().getRelSnapshot( logicalNamespace.id ).getTables( null ); for ( LogicalTable catalogTable : catalogEntities ) { dropTable( catalogTable, statement ); } @@ -2961,7 +2972,8 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Check if there are foreign keys referencing this table List selfRefsToDelete = new LinkedList<>(); - List exportedKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getExportedKeys( catalogTable.id ); + LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ); + List exportedKeys = snapshot.getExportedKeys( catalogTable.id ); if ( exportedKeys.size() > 0 ) { for ( CatalogForeignKey foreignKey : exportedKeys ) { if ( foreignKey.tableId == catalogTable.id ) { @@ -2979,7 +2991,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D } // Delete all indexes - for ( CatalogIndex index : catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : snapshot.getIndexes( catalogTable.id, false ) ) { if ( index.location == 0 ) { // Delete polystore index IndexManager.getInstance().deleteIndex( index ); @@ -2988,7 +3000,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D AdapterManager.getInstance().getStore( index.location ).dropIndex( statement.getPrepareContext(), index, - catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } // Delete index in catalog catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); @@ -2999,12 +3011,12 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D for ( long storeId : catalogTable.dataPlacements ) { // Delete table on store List partitionIdsOnStore = new ArrayList<>(); - catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( storeId, catalogTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); + catalog.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( storeId, catalogTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); AdapterManager.getInstance().getStore( storeId ).dropTable( statement.getPrepareContext(), catalogTable, partitionIdsOnStore ); // Delete column placement in catalog for ( LogicalColumn column : catalogTable.columns ) { - if ( catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( storeId, column.id ) ) { + if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( storeId, column.id ) ) { catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeId, column.id, false ); } } @@ -3021,7 +3033,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D } // Delete indexes of this table - List indexes = catalog.getLogicalRel( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ); + List indexes = snapshot.getIndexes( catalogTable.id, false ); for ( CatalogIndex index : indexes ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); IndexManager.getInstance().deleteIndex( index ); @@ -3032,12 +3044,12 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Remove primary key catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); // Delete all foreign keys of the table - List foreignKeys = catalog.getLogicalRel( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); + List foreignKeys = snapshot.getForeignKeys( catalogTable.id ); for ( CatalogForeignKey foreignKey : foreignKeys ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } // Delete all constraints of the table - for ( CatalogConstraint constraint : catalog.getLogicalRel( catalogTable.namespaceId ).getConstraints( catalogTable.id ) ) { + for ( CatalogConstraint constraint : snapshot.getConstraints( catalogTable.id ) ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); } } catch ( GenericCatalogException e ) { diff --git a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java index 9860b90452..7aea399c2c 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java @@ -45,7 +45,7 @@ public abstract class AbstractPartitionManager implements PartitionManager { public boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ) { // Check for the specified columnId if we still have a ColumnPlacement for every partitionGroup for ( Long partitionGroupId : catalogTable.partitionProperty.partitionGroupIds ) { - List ccps = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); + List ccps = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); if ( ccps.size() <= threshold ) { for ( CatalogColumnPlacement placement : ccps ) { if ( placement.adapterId == storeId ) { @@ -66,11 +66,11 @@ public Map> getRelevantPlacements( LogicalTab if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { - CatalogPartition catalogPartition = catalog.getAllocRel( catalogTable.namespaceId ).getPartition( partitionId ); + CatalogPartition catalogPartition = catalog.getSnapshot().getAllocSnapshot().getPartition( partitionId ); List relevantCcps = new ArrayList<>(); for ( LogicalColumn column : catalogTable.columns ) { - List ccps = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, column.id ); + List ccps = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, column.id ); ccps.removeIf( ccp -> excludedAdapters.contains( ccp.adapterId ) ); if ( !ccps.isEmpty() ) { // Get first column placement which contains partition @@ -131,13 +131,13 @@ public Map>> getAllPlacements( Logi Map>> adapterPlacements = new HashMap<>(); // adapterId -> partitionId ; placements if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { - List adapters = catalog.getAllocRel( catalogTable.namespaceId ).getAdaptersByPartitionGroup( catalogTable.id, partitionId ); + List adapters = catalog.getSnapshot().getAllocSnapshot().getAdaptersByPartitionGroup( catalogTable.id, partitionId ); for ( CatalogAdapter adapter : adapters ) { if ( !adapterPlacements.containsKey( adapter.id ) ) { adapterPlacements.put( adapter.id, new HashMap<>() ); } - List placements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapter.id, catalogTable.id ); + List placements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapter.id, catalogTable.id ); adapterPlacements.get( adapter.id ).put( partitionId, placements ); } } diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 283da82977..dce6f24fe9 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -122,7 +122,7 @@ private void processAllPeriodicTables() { Catalog catalog = Catalog.getInstance(); long invocationTimestamp = System.currentTimeMillis(); - List periodicTables = catalog.getTablesForPeriodicProcessing(); + List periodicTables = catalog.getSnapshot().getTablesForPeriodicProcessing(); // Retrieve all Tables which rely on periodic processing for ( LogicalTable table : periodicTables ) { if ( table.partitionProperty.partitionType == PartitionType.TEMPERATURE ) { @@ -216,7 +216,7 @@ private void determinePartitionDistribution( LogicalTable table ) { // Which of those are currently in cold --> action needed - List currentHotPartitions = Catalog.getInstance().getAllocRel( table.namespaceId ).getPartitions( ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); + List currentHotPartitions = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitions( ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); for ( CatalogPartition catalogPartition : currentHotPartitions ) { // Remove partitions from List if they are already in HOT (not necessary to send to DataMigrator) @@ -265,8 +265,8 @@ private void redistributePartitions( LogicalTable table, List partitionsFr Statement statement = transaction.createStatement(); DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); - List adaptersWithHot = Catalog.getInstance().getAllocRel( table.namespaceId ).getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); - List adaptersWithCold = Catalog.getInstance().getAllocRel( table.namespaceId ).getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getColdPartitionGroupId() ); + List adaptersWithHot = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); + List adaptersWithCold = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getColdPartitionGroupId() ); log.debug( "Get adapters to create physical tables" ); // Validate that partition does not already exist on store @@ -330,7 +330,7 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT // If this store contains both Groups HOT {@literal &} COLD do nothing if ( hotPartitionsToCreate.size() != 0 ) { - Catalog.getInstance().getAllocRel( table.namespaceId ).getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); + Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); for ( long partitionId : hotPartitionsToCreate ) { catalog.getAllocRel( table.namespaceId ).addPartitionPlacement( @@ -345,11 +345,11 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT store.createPhysicalTable( statement.getPrepareContext(), table, null ); List logicalColumns = new ArrayList<>(); - catalog.getAllocRel( table.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getLogicalRel( table.namespaceId ).getColumn( cp.columnId ) ) ); + catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumn( cp.columnId ) ) ); dataMigrator.copyData( statement.getTransaction(), - catalog.getAdapter( store.getAdapterId() ), + catalog.getSnapshot().getAdapter( store.getAdapterId() ), logicalColumns, hotPartitionsToCreate ); @@ -380,10 +380,8 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT */ private List filterList( long namespaceId, long adapterId, long tableId, List partitionsToFilter ) { // Remove partition from list if it's already contained on the store - for ( long partitionId : Catalog.getInstance().getAllocRel( namespaceId ).getPartitionsOnDataPlacement( adapterId, tableId ) ) { - if ( partitionsToFilter.contains( partitionId ) ) { - partitionsToFilter.remove( partitionId ); - } + for ( long partitionId : Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( adapterId, tableId ) ) { + partitionsToFilter.remove( partitionId ); } return partitionsToFilter; } diff --git a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java index 01a1ab153f..d47027c26a 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java @@ -45,8 +45,8 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue long selectedPartitionId = -1; // Process all accumulated CatalogPartitions - for ( CatalogPartition catalogPartition : Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionsByTable( catalogTable.id ) ) { - if ( unboundPartitionId == -1 && catalogPartition.isUnbound ) { + for ( CatalogPartition catalogPartition : Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionsByTable( catalogTable.id ) ) { + if ( catalogPartition.isUnbound ) { unboundPartitionId = catalogPartition.id; break; } diff --git a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java index 63b9937576..283866c544 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java @@ -47,7 +47,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue long selectedPartitionId = -1; // Process all accumulated CatalogPartitions - for ( CatalogPartition catalogPartition : Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionsByTable( catalogTable.id ) ) { + for ( CatalogPartition catalogPartition : Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionsByTable( catalogTable.id ) ) { if ( unboundPartitionId == -1 && catalogPartition.isUnbound ) { unboundPartitionId = catalogPartition.id; break; diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index cc9fcc0472..7dc0ca59f7 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -617,7 +617,7 @@ public AlgNode visit( AlgNode node ) { final Catalog catalog = Catalog.getInstance(); final LogicalRelModify ltm = (LogicalRelModify) node; final LogicalTable table = ltm.getEntity().unwrap( LogicalTable.class ); - final LogicalNamespace schema = catalog.getNamespace( table.namespaceId ); + final LogicalNamespace schema = catalog.getSnapshot().getNamespace( table.namespaceId ); final List indices = IndexManager.getInstance().getIndices( schema, table ); // Check if there are any indexes effected by this table modify @@ -1328,7 +1328,7 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< "TableID: {} is partitioned on column: {} - {}", catalogTable.id, catalogTable.partitionProperty.partitionColumnId, - Catalog.getInstance().getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); + Catalog.getInstance().getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); } List identifiedPartitions = new ArrayList<>(); for ( String partitionValue : partitionValues ) { diff --git a/dbms/src/main/java/org/polypheny/db/processing/AuthenticatorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/AuthenticatorImpl.java index 47058a7431..697ee13473 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AuthenticatorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AuthenticatorImpl.java @@ -32,7 +32,7 @@ public class AuthenticatorImpl implements Authenticator { @Override public CatalogUser authenticate( final String username, final String password ) throws AuthenticationException { try { - CatalogUser catalogUser = Catalog.getInstance().getUser( username ); + CatalogUser catalogUser = Catalog.getInstance().getSnapshot().getUser( username ); if ( catalogUser.password.equals( password ) ) { return catalogUser; } else { diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index bf76dd2343..b35a00d38e 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -64,6 +64,7 @@ import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.RuntimeConfig; @@ -202,12 +203,13 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final List foreignKeys; final List exportedKeys; table = root.getEntity().unwrap( LogicalTable.class ); - primaryKey = catalog.getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); - constraints = new ArrayList<>( Catalog.getInstance().getLogicalRel( table.namespaceId ).getConstraints( table.id ) ); - foreignKeys = Catalog.getInstance().getLogicalRel( table.namespaceId ).getForeignKeys( table.id ); - exportedKeys = Catalog.getInstance().getLogicalRel( table.namespaceId ).getExportedKeys( table.id ); + LogicalRelSnapshot snapshot = statement.getTransaction().getSnapshot().getRelSnapshot( table.namespaceId ); + primaryKey = snapshot.getPrimaryKey( table.primaryKey ); + constraints = new ArrayList<>( snapshot.getConstraints( table.id ) ); + foreignKeys = snapshot.getForeignKeys( table.id ); + exportedKeys = snapshot.getExportedKeys( table.id ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); + CatalogPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); @@ -329,7 +331,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final RexBuilder rexBuilder = root.getCluster().getRexBuilder(); for ( final CatalogForeignKey foreignKey : foreignKeys ) { - final LogicalTable entity = statement.getDataContext().getSnapshot().getLogicalTable( foreignKey.referencedKeyTableId ); + final LogicalTable entity = statement.getDataContext().getSnapshot().getRelSnapshot( foreignKey.getNamespaceId() ).getLogicalTable( foreignKey.referencedKeyTableId ); final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), entity ); RexNode joinCondition = rexBuilder.makeLiteral( true ); builder.push( input ); @@ -480,14 +482,14 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme AlgNode input = root.getInput().accept( new DeepCopyShuttle() ); final List projects = new ArrayList<>( foreignKey.columnIds.size() ); final List foreignProjects = new ArrayList<>( foreignKey.columnIds.size() ); - final LogicalTable foreignTable = Catalog.getInstance().getLogicalRel( table.namespaceId ).getTable( foreignKey.referencedKeyTableId ); + final LogicalTable foreignTable = snapshot.getTable( foreignKey.referencedKeyTableId ); builder.push( input ); for ( int i = 0; i < foreignKey.columnIds.size(); ++i ) { final String columnName = foreignKey.getColumnNames().get( i ); final String foreignColumnName = foreignKey.getReferencedKeyColumnNames().get( i ); final LogicalColumn foreignColumn; try { - foreignColumn = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( foreignTable.id, foreignColumnName ); + foreignColumn = snapshot.getColumn( foreignTable.id, foreignColumnName ); } catch ( UnknownColumnException e ) { throw new RuntimeException( e ); } @@ -556,14 +558,14 @@ public RexNode visitFieldAccess( RexFieldAccess fieldAccess ) { } final List projects = new ArrayList<>( foreignKey.columnIds.size() ); final List foreignProjects = new ArrayList<>( foreignKey.columnIds.size() ); - final LogicalTable foreignTable = Catalog.getInstance().getLogicalRel( table.namespaceId ).getTable( foreignKey.tableId ); + final LogicalTable foreignTable = snapshot.getTable( foreignKey.tableId ); for ( int i = 0; i < foreignKey.columnIds.size(); ++i ) { final String columnName = foreignKey.getReferencedKeyColumnNames().get( i ); final String foreignColumnName = foreignKey.getColumnNames().get( i ); final LogicalColumn column, foreignColumn; try { - column = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( table.id, columnName ); - foreignColumn = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( foreignTable.id, foreignColumnName ); + column = snapshot.getColumn( table.id, columnName ); + foreignColumn = snapshot.getColumn( foreignTable.id, foreignColumnName ); } catch ( UnknownColumnException e ) { throw new RuntimeException( e ); } @@ -653,9 +655,10 @@ private boolean testConstraintsValid() { try { List tables = Catalog .getInstance() + .getSnapshot() .getNamespaces( null ) .stream() - .flatMap( n -> Catalog.getInstance().getLogicalRel( n.id ).getTables( null ).stream() ) + .flatMap( n -> Catalog.getInstance().getSnapshot().getRelSnapshot( n.id ).getTables( null ).stream() ) .filter( t -> t.entityType == EntityType.ENTITY && t.getNamespaceType() == NamespaceType.RELATIONAL ) .collect( Collectors.toList() ); Transaction transaction = this.manager.startTransaction( Catalog.defaultUserId, false, "ConstraintEnforcement" ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 039a305182..0bbd9e7c04 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -56,6 +56,8 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.refactor.ModifiableEntity; +import org.polypheny.db.catalog.snapshot.AllocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; @@ -163,20 +165,21 @@ private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGra @Override public void copyData( Transaction transaction, CatalogAdapter store, List columns, List partitionIds ) { - LogicalTable table = Catalog.getInstance().getLogicalRel( columns.get( 0 ).namespaceId ).getTable( columns.get( 0 ).tableId ); - CatalogPrimaryKey primaryKey = Catalog.getInstance().getLogicalRel( table.namespaceId ).getPrimaryKey( table.primaryKey ); + LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( columns.get( 0 ).namespaceId ); + LogicalTable table = snapshot.getTable( columns.get( 0 ).tableId ); + CatalogPrimaryKey primaryKey = snapshot.getPrimaryKey( table.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getAllocRel( logicalColumn.namespaceId ).getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( table.namespaceId ).getColumn( cid ); + LogicalColumn logicalColumn = snapshot.getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } @@ -202,7 +205,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List selectColumnList, AlgRoot sourceAl @Override public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { - PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalTable( partitionId ); + PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalSnapshot().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); AlgOptCluster cluster = AlgOptCluster.create( @@ -325,7 +328,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : to ) { - LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( ccp.namespaceId ).getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( ccp.namespaceId ).getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } @@ -349,7 +352,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { - PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalTable( partitionId ); + PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalSnapshot().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); AlgOptCluster cluster = AlgOptCluster.create( @@ -364,7 +367,7 @@ public AlgRoot buildInsertStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( ccp.namespaceId ).getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( ccp.namespaceId ).getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } @@ -386,7 +389,7 @@ public AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ) { - PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalTable( partitionId ); + PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalSnapshot().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); AlgOptCluster cluster = AlgOptCluster.create( @@ -399,11 +402,12 @@ private AlgRoot buildUpdateStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : to ) { - LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( ccp.namespaceId ).getColumn( ccp.columnId ); + LogicalColumn logicalColumn = snapshot.getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } @@ -463,7 +467,7 @@ public static List selectSourcePlacements( LogicalTable Catalog catalog = Catalog.getInstance(); long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getKey() != excludingAdapterId && entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -474,15 +478,16 @@ public static List selectSourcePlacements( LogicalTable for ( LogicalColumn logicalColumn : columns ) { columnIds.add( logicalColumn.id ); } + AllocSnapshot snapshot = catalog.getSnapshot().getAllocSnapshot(); // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); for ( LogicalColumn column : table.columns ) { if ( columnIds.contains( column.id ) ) { - if ( catalog.getAllocRel( column.namespaceId ).getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( catalog.getAllocRel( column.namespaceId ).getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); + if ( snapshot.getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { + placementList.add( snapshot.getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); } else { - for ( CatalogColumnPlacement placement : catalog.getAllocRel( column.namespaceId ).getColumnPlacements( column.id ) ) { + for ( CatalogColumnPlacement placement : snapshot.getColumnPlacements( column.id ) ) { if ( placement.adapterId != excludingAdapterId ) { placementList.add( placement ); break; @@ -510,19 +515,20 @@ public static List selectSourcePlacements( LogicalTable */ @Override public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { - CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); + CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); + AllocSnapshot snapshot = Catalog.getInstance().getSnapshot().getAllocSnapshot(); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getAllocRel( sourceTable.namespaceId ).getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( snapshot.getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : sourcePrimaryKey.columnIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getColumn( cid ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } @@ -533,7 +539,7 @@ public void copySelectiveData( Transaction transaction, CatalogAdapter store, Lo AlgRoot sourceAlg = getSourceIterator( sourceStatement, placementDistribution ); AlgRoot targetAlg; - if ( Catalog.getInstance().getAllocRel( targetTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.id, targetTable.id ).size() == columns.size() ) { + if ( snapshot.getColumnPlacementsOnAdapterPerTable( store.id, targetTable.id ).size() == columns.size() ) { // There have been no placements for this table on this store before. Build insert statement targetAlg = buildInsertStatement( targetStatement, targetColumnPlacements, targetPartitionIds.get( 0 ) ); } else { @@ -612,19 +618,19 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo throw new RuntimeException( "Unsupported migration scenario. Table ID mismatch" ); } - CatalogPrimaryKey primaryKey = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); + CatalogPrimaryKey primaryKey = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getAllocRel( sourceTable.namespaceId ).getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getColumn( cid ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } @@ -632,7 +638,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo // Add partition columns to select column list long partitionColumnId = targetTable.partitionProperty.partitionColumnId; - LogicalColumn partitionColumn = Catalog.getInstance().getLogicalRel( sourceTable.namespaceId ).getColumn( partitionColumnId ); + LogicalColumn partitionColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getColumn( partitionColumnId ); if ( !selectColumnList.contains( partitionColumn ) ) { selectColumnList.add( partitionColumn ); } @@ -656,7 +662,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo Map targetAlgs = new HashMap<>(); AlgRoot sourceAlg = getSourceIterator( sourceStatement, placementDistribution ); - if ( Catalog.getInstance().getAllocRel( sourceTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( store.id, sourceTable.id ).size() == columns.size() ) { + if ( Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.id, sourceTable.id ).size() == columns.size() ) { // There have been no placements for this table on this store before. Build insert statement targetPartitionIds.forEach( id -> targetAlgs.put( id, buildInsertStatement( targetStatements.get( id ), targetColumnPlacements, id ) ) ); } else { diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index d085482b0e..14beb2034b 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -29,6 +29,7 @@ import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; @@ -44,6 +45,10 @@ */ public class UiRoutingPageUtil { + + private static Snapshot snapshot; + + public static void outputSingleResult( ProposedRoutingPlan proposedRoutingPlan, AlgNode optimalAlgNode, InformationManager queryAnalyzer ) { addPhysicalPlanPage( optimalAlgNode, queryAnalyzer ); @@ -84,6 +89,7 @@ private static void addRoutedPlanPage( AlgNode routedNode, InformationManager qu private static void addSelectedAdapterTable( InformationManager queryAnalyzer, ProposedRoutingPlan proposedRoutingPlan, InformationPage page ) { + snapshot = Catalog.getInstance().getSnapshot(); InformationGroup group = new InformationGroup( page, "Selected Placements" ); queryAnalyzer.addGroup( group ); InformationTable table = new InformationTable( @@ -91,14 +97,15 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P ImmutableList.of( "Entity", "Field", "Partition (Group --> ID)", "Adapter", "Physical Name" ) ); if ( proposedRoutingPlan.getPhysicalPlacementsOfPartitions() != null ) { proposedRoutingPlan.getPhysicalPlacementsOfPartitions().forEach( ( k, v ) -> { - CatalogPartition catalogPartition = Catalog.getInstance().getAllocRel( k ).getPartition( k ); + + CatalogPartition catalogPartition = snapshot.getAllocSnapshot().getPartition( k ); LogicalTable catalogTable = Catalog.getInstance().getLogicalEntity( catalogPartition.tableId ).unwrap( LogicalTable.class ); - CatalogPartitionGroup catalogPartitionGroup = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionGroup( catalogPartition.partitionGroupId ); + CatalogPartitionGroup catalogPartitionGroup = snapshot.getAllocSnapshot().getPartitionGroup( catalogPartition.partitionGroupId ); v.forEach( p -> { - CatalogColumnPlacement catalogColumnPlacement = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getColumnPlacement( p.left, p.right ); - CatalogPartitionPlacement catalogPartitionPlacement = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( p.left, k ); - LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( catalogTable.namespaceId ).getColumn( catalogColumnPlacement.columnId ); + CatalogColumnPlacement catalogColumnPlacement = snapshot.getAllocSnapshot().getColumnPlacement( p.left, p.right ); + CatalogPartitionPlacement catalogPartitionPlacement = snapshot.getAllocSnapshot().getPartitionPlacement( p.left, k ); + LogicalColumn logicalColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogColumnPlacement.columnId ); table.addRow( catalogTable.getNamespaceName() + "." + catalogTable.name, logicalColumn.name, diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 3119b5da4a..545d260455 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -41,7 +41,6 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; @@ -249,7 +248,7 @@ private List handleRelationalOnGraphScan( AlgNode node, Statem AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); - algBuilder.lpgScan( catalog.getNamespaces( new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); + algBuilder.lpgScan( catalog.getSnapshot().getNamespaces( new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); algBuilder.lpgMatch( List.of( algBuilder.lpgNodeMatch( List.of( logicalTable.getLogicalTableName() ) ) ), List.of( "n" ) ); algBuilder.lpgProject( List.of( rexBuilder.makeLpgGetId(), rexBuilder.makeLpgPropertiesExtract(), rexBuilder.makeLpgLabels() ), diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 93fc6db1b0..8f72df8ed6 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -69,6 +69,8 @@ import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.TranslatableEntity; +import org.polypheny.db.catalog.snapshot.AllocSnapshot; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; @@ -99,6 +101,7 @@ public abstract class BaseRouter implements Router { .build(); final static Catalog catalog = Catalog.getInstance(); + private static AllocSnapshot allocSnapshot; static { @@ -106,6 +109,9 @@ public abstract class BaseRouter implements Router { } + private LogicalRelSnapshot snapshot; + + /** * Execute the table scan on the first placement of a table */ @@ -113,7 +119,7 @@ protected static Map> selectPlacement( Logica // Find the adapter with the most column placements long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -123,10 +129,11 @@ protected static Map> selectPlacement( Logica // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); for ( LogicalColumn column : table.columns ) { - if ( catalog.getAllocRel( table.namespaceId ).getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacements( column.id ).get( 0 ) ); + allocSnapshot = Catalog.getInstance().getSnapshot().getAllocSnapshot(); + if ( allocSnapshot.getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { + placementList.add( allocSnapshot.getColumnPlacements( column.id ).get( 0 ) ); } else { - placementList.add( Catalog.getInstance().getAllocRel( table.namespaceId ).getColumnPlacements( column.id ).get( 0 ) ); + placementList.add( allocSnapshot.getColumnPlacements( column.id ).get( 0 ) ); } } @@ -287,17 +294,18 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< // We need to join placements on different adapters // Get primary key - long pkid = catalog.getLogicalRel( currentPlacements.get( 0 ).namespaceId ).getTable( currentPlacements.get( 0 ).tableId ).primaryKey; - List pkColumnIds = catalog.getLogicalRel( currentPlacements.get( 0 ).namespaceId ).getPrimaryKey( pkid ).columnIds; + snapshot = catalog.getSnapshot().getRelSnapshot( currentPlacements.get( 0 ).namespaceId ); + long pkid = snapshot.getTable( currentPlacements.get( 0 ).tableId ).primaryKey; + List pkColumnIds = snapshot.getPrimaryKey( pkid ).columnIds; List pkColumns = new LinkedList<>(); for ( long pkColumnId : pkColumnIds ) { - pkColumns.add( catalog.getLogicalRel( currentPlacements.get( 0 ).namespaceId ).getColumn( pkColumnId ) ); + pkColumns.add( snapshot.getColumn( pkColumnId ) ); } // Add primary key for ( Entry> entry : placementsByAdapter.entrySet() ) { for ( LogicalColumn pkColumn : pkColumns ) { - CatalogColumnPlacement pkPlacement = catalog.getAllocRel( currentPlacements.get( 0 ).namespaceId ).getColumnPlacements( pkColumn.id ).get( 0 ); + CatalogColumnPlacement pkPlacement = Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumn.id ).get( 0 ); if ( !entry.getValue().contains( pkPlacement ) ) { entry.getValue().add( pkPlacement ); } @@ -308,7 +316,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< boolean first = true; for ( List ccps : placementsByAdapter.values() ) { CatalogColumnPlacement ccp = ccps.get( 0 ); - CatalogPartitionPlacement cpp = catalog.getAllocRel( currentPlacements.get( 0 ).namespaceId ).getPartitionPlacement( ccp.adapterId, partitionId ); + CatalogPartitionPlacement cpp = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionPlacement( ccp.adapterId, partitionId ); handleScan( builder, @@ -356,7 +364,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< CatalogColumnPlacement placement = new ArrayList<>( placements.values() ).get( 0 ).get( 0 ); // todo dl: remove after RowType refactor - if ( catalog.getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { + if ( catalog.getSnapshot().getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); builder.push( new LogicalTransformer( node.getCluster(), @@ -377,7 +385,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< private void buildFinalProject( RoutedAlgBuilder builder, List currentPlacements ) { List rexNodes = new ArrayList<>(); List placementList = currentPlacements.stream() - .map( col -> catalog.getLogicalRel( currentPlacements.get( 0 ).namespaceId ).getColumn( col.columnId ) ) + .map( col -> catalog.getSnapshot().getRelSnapshot( currentPlacements.get( 0 ).namespaceId ).getColumn( col.columnId ) ) .sorted( Comparator.comparingInt( col -> col.position ) ) .collect( Collectors.toList() ); for ( LogicalColumn logicalColumn : placementList ) { @@ -409,7 +417,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab } for ( long adapterId : placements ) { - PhysicalGraph graph = snapshot.getPhysicalGraph( catalogGraph.id, adapterId ); + PhysicalGraph graph = snapshot.getPhysicalSnapshot().getPhysicalGraph( catalogGraph.id, adapterId ); if ( !(graph instanceof TranslatableEntity) ) { // needs substitution later on @@ -431,7 +439,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = catalog.getLogicalRel( namespace.id ).getTables( null ); + List tables = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTables( null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, selectPlacement( t ) ) ) ) .collect( Collectors.toList() ); @@ -445,7 +453,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace na private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List collections = catalog.getLogicalDoc( namespace.id ).getCollections( null ); + List collections = catalog.getSnapshot().getDocSnapshot( namespace.id ).getCollections( null ); List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); @@ -534,7 +542,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st } for ( Long placementId : placements ) { - CatalogAdapter adapter = catalog.getAdapter( placementId ); + CatalogAdapter adapter = catalog.getSnapshot().getAdapter( placementId ); NamespaceType sourceModel = collection.namespaceType; if ( !adapter.supportedNamespaces.contains( sourceModel ) ) { @@ -545,7 +553,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st // CatalogCollectionPlacement placement = catalog.getAllocDoc( alg.entity ).getCollectionPlacement( collection.id, placementId ); // String namespaceName = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, collection.getNamespaceName(), placement.physicalNamespaceName ); // String collectionName = collection.name + "_" + placement.id; - PhysicalTable collectionTable = snapshot.getPhysicalTable( collection.id, adapterId ); + PhysicalTable collectionTable = snapshot.getPhysicalSnapshot().getPhysicalTable( collection.id, adapterId ); // we might previously have pushed the non-native transformer builder.clear(); return builder.push( LogicalDocumentScan.create( alg.getCluster(), collectionTable ) ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index 457692a07e..b2d283fbf3 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -68,7 +68,7 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build for ( long partition : partitionIds ) { if ( cachedPlan.physicalPlacementsOfPartitions.get( partition ) != null ) { List colPlacements = cachedPlan.physicalPlacementsOfPartitions.get( partition ).stream() - .map( placementInfo -> catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( placementInfo.left, placementInfo.right ) ) + .map( placementInfo -> catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( placementInfo.left, placementInfo.right ) ) .collect( Collectors.toList() ); placement.put( partition, colPlacements ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index c38849801c..65417106f0 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -117,6 +117,10 @@ @Slf4j public class DmlRouterImpl extends BaseRouter implements DmlRouter { + + private Snapshot snapshot; + + /** * Default implementation: Execute DML on all placements */ @@ -129,6 +133,7 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { } LogicalTable catalogTable = modify.getEntity().unwrap( LogicalTable.class ); + Snapshot snapshot = catalog.getSnapshot(); // Get placements of this table @@ -145,11 +150,11 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { } long pkid = catalogTable.primaryKey; - List pkColumnIds = catalog.getLogicalRel( modify.entity.namespaceId ).getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = catalog.getLogicalRel( modify.entity.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + List pkColumnIds = snapshot.getRelSnapshot( modify.entity.namespaceId ).getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = snapshot.getRelSnapshot( modify.entity.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); // Essentially gets a list of all stores where this table resides - List pkPlacements = catalog.getAllocRel( modify.entity.namespaceId ).getColumnPlacements( pkColumn.id ); + List pkPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); if ( catalogTable.partitionProperty.isPartitioned && log.isDebugEnabled() ) { log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, catalogTable.partitionProperty.partitionGroupIds ); @@ -157,8 +162,8 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { log.debug( "\t\t -> '{}' {}\t{}", dataPlacement.adapterId, - catalog.getAllocRel( modify.entity.namespaceId ).getPartitionGroupsOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ), - catalog.getAllocRel( modify.entity.namespaceId ).getPartitionGroupsIndexOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ) ); + snapshot.getAllocSnapshot().getPartitionGroupsOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ), + snapshot.getAllocSnapshot().getPartitionGroupsIndexOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ) ); } } @@ -176,10 +181,9 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { Map newParameterValues = new HashMap<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { - Snapshot snapshot = statement.getTransaction().getSnapshot(); // Get placements on store - List placementsOnAdapter = catalog.getAllocRel( modify.entity.namespaceId ).getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); + List placementsOnAdapter = snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); // If this is an update, check whether we need to execute on this store at all List updateColumnList = modify.getUpdateColumnList(); @@ -195,8 +199,8 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { String columnName = updateColumnListIterator.next(); sourceExpressionListIterator.next(); try { - LogicalColumn logicalColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); - if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { + LogicalColumn logicalColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + if ( !snapshot.getAllocSnapshot().checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { updateColumnListIterator.remove(); sourceExpressionListIterator.remove(); } @@ -262,7 +266,7 @@ public AlgNode visit( LogicalFilter filter ) { for ( String cn : updateColumnList ) { try { - if ( catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, cn ).id == catalogTable.partitionProperty.partitionColumnId ) { + if ( snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, cn ).id == catalogTable.partitionProperty.partitionColumnId ) { if ( log.isDebugEnabled() ) { log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", catalogTable.partitionProperty.partitionColumnId, index ); } @@ -375,7 +379,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { for ( Map.Entry>> partitionMapping : tuplesOnPartition.entrySet() ) { Long currentPartitionId = partitionMapping.getKey(); - if ( !catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( currentPartitionId ) ) { + if ( !snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( currentPartitionId ) ) { continue; } @@ -391,13 +395,13 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( pkPlacement.adapterId, currentPartitionId ), + snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, currentPartitionId ), statement, cluster, true, statement.getDataContext().getParameterValues() ).build(); - PhysicalTable physical = snapshot.getPhysicalTable( currentPartitionId ); + PhysicalTable physical = snapshot.getPhysicalSnapshot().getPhysicalTable( currentPartitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML @@ -419,7 +423,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { } else if ( modify.getInput() instanceof LogicalProject && ((LogicalProject) modify.getInput()).getInput() instanceof LogicalValues ) { - String partitionColumnName = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name; + String partitionColumnName = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name; List fieldNames = modify.getInput().getRowType().getFieldNames(); LogicalRelModify ltm = modify; @@ -448,7 +452,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { tempPartitionId = partitionManager.getTargetPartitionId( catalogTable, currentRow.get( partitionValueIndex ).toString() ); - if ( !catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( tempPartitionId ) ) { + if ( !snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( tempPartitionId ) ) { continue; } @@ -474,13 +478,13 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( pkPlacement.adapterId, entry.getKey() ), + snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, entry.getKey() ), statement, cluster, false, entry.getValue() ).build(); - PhysicalTable physical = snapshot.getPhysicalTable( entry.getKey() ); + PhysicalTable physical = snapshot.getPhysicalSnapshot().getPhysicalTable( entry.getKey() ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML @@ -520,8 +524,8 @@ else if ( identifiedPartitionForSetValue != -1 ) { } if ( log.isDebugEnabled() ) { - String partitionColumnName = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name; - String partitionName = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroup( identPart ).partitionGroupName; + String partitionColumnName = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name; + String partitionName = snapshot.getAllocSnapshot().getPartitionGroup( identPart ).partitionGroupName; log.debug( "INSERT: partitionColumn-value: '{}' should be put on partition: {} ({}), which is partitioned with column {}", partitionValue, identPart, partitionName, partitionColumnName ); } @@ -557,11 +561,11 @@ else if ( identifiedPartitionForSetValue != -1 ) { for ( long partitionId : accessedPartitionList ) { - if ( !catalog.getAllocRel( catalogTable.namespaceId ).getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( partitionId ) ) { + if ( !snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( partitionId ) ) { continue; } - PhysicalTable physical = snapshot.getPhysicalTable( partitionId ); + PhysicalTable physical = snapshot.getPhysicalSnapshot().getPhysicalTable( partitionId ); // Build DML Modify adjustedModify; @@ -570,7 +574,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( pkPlacement.adapterId, partitionId ), + snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, partitionId ), statement, cluster, false, @@ -697,10 +701,10 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, } for ( long placementId : placements ) { - CatalogAdapter adapter = Catalog.getInstance().getAdapter( placementId ); + CatalogAdapter adapter = snapshot.getAdapter( placementId ); CatalogCollectionPlacement placement = Catalog.getInstance().getAllocDoc( alg.entity.namespaceId ).getCollectionPlacement( collection.id, placementId ); - PhysicalCollection document = snapshot.getPhysicalCollection( placement.id ); + PhysicalCollection document = snapshot.getPhysicalSnapshot().getPhysicalCollection( placement.id ); if ( !adapter.supportedNamespaces.contains( NamespaceType.DOCUMENT ) ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, statement, placementId, queryInformation ) ); @@ -741,10 +745,10 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Logical boolean usedSubstitution = false; for ( long adapterId : placements ) { - CatalogAdapter adapter = Catalog.getInstance().getAdapter( adapterId ); + CatalogAdapter adapter = snapshot.getAdapter( adapterId ); CatalogGraphPlacement graphPlacement = Catalog.getInstance().getAllocGraph( alg.entity.namespaceId ).getGraphPlacement( catalogGraph.id, adapterId ); - PhysicalGraph graph = snapshot.getPhysicalGraph( catalogGraph.id, adapterId ); + PhysicalGraph graph = snapshot.getPhysicalSnapshot().getPhysicalGraph( catalogGraph.id, adapterId ); if ( graph == null ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, adapterId, statement ) ); @@ -1290,21 +1294,22 @@ private AlgBuilder buildDml( private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, LogicalTable catalogTable, Statement statement ) { LogicalTable fromTable = catalogTable; // Select from other table - if ( statement.getDataContext().getSnapshot().isPartitioned( fromTable.id ) ) { + snapshot = statement.getDataContext().getSnapshot(); + if ( snapshot.getAllocSnapshot().isPartitioned( fromTable.id ) ) { throw new UnsupportedOperationException( "DMLs from other partitioned tables is not supported" ); } long pkid = fromTable.primaryKey; - List pkColumnIds = catalog.getLogicalRel( catalogTable.namespaceId ).getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacements( pkColumn.id ); + List pkColumnIds = snapshot.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + List pkPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); List nodes = new ArrayList<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { - catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); + snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); - CatalogPartitionPlacement partition = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( pkPlacement.adapterId, fromTable.partitionProperty.partitionIds.get( 0 ) ); + CatalogPartitionPlacement partition = snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, fromTable.partitionProperty.partitionIds.get( 0 ) ); nodes.add( super.handleScan( builder, @@ -1348,11 +1353,11 @@ private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, L } else { throw new RuntimeException( "Invalid column name: " + field.getName() ); } - column = catalog.getLogicalRel( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + column = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); } catch ( UnknownColumnException e ) { throw new RuntimeException( e ); } - if ( !catalog.getAllocRel( catalogTable.namespaceId ).checkIfExistsColumnPlacement( placements.get( 0 ).adapterId, column.id ) ) { + if ( !snapshot.getAllocSnapshot().checkIfExistsColumnPlacement( placements.get( 0 ).adapterId, column.id ) ) { throw new RuntimeException( "Current implementation of vertical partitioning does not allow conditions on partitioned columns. " ); // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // TODO: Use indexes diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index 8abd3617cc..7109e15fb3 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -143,7 +143,7 @@ protected Set> selectPlacement( LogicalTable catalo List usedColumns = queryInformation.getAllColumnsPerTable( catalogTable.id ); // Filter for placements by adapters - List adapters = catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByAdapter( catalogTable.id ).entrySet() + List adapters = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() .stream() .filter( elem -> elem.getValue().containsAll( usedColumns ) ) .map( Entry::getKey ) @@ -152,7 +152,7 @@ protected Set> selectPlacement( LogicalTable catalo final Set> result = new HashSet<>(); for ( long adapterId : adapters ) { List placements = usedColumns.stream() - .map( colId -> catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacement( adapterId, colId ) ) + .map( colId -> catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( adapterId, colId ) ) .collect( Collectors.toList() ); if ( !placements.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java index 5036dc2be4..4cc8eb6c29 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java @@ -32,7 +32,7 @@ public class CreateAllPlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { - LogicalTable catalogTable = Catalog.getInstance().getLogicalRel( addedColumn.namespaceId ).getTable( addedColumn.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getSnapshot().getRelSnapshot( addedColumn.namespaceId ).getTable( addedColumn.tableId ); return catalogTable.dataPlacements.stream() .map( elem -> AdapterManager.getInstance().getStore( elem ) ) .collect( Collectors.toList() ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index 828b35519f..6b00ed71b2 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -30,7 +30,7 @@ public class CreateSinglePlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { - LogicalTable catalogTable = Catalog.getInstance().getLogicalRel( addedColumn.namespaceId ).getTable( addedColumn.tableId ); + LogicalTable catalogTable = Catalog.getInstance().getSnapshot().getRelSnapshot( addedColumn.namespaceId ).getTable( addedColumn.tableId ); return ImmutableList.of( AdapterManager.getInstance().getStore( catalogTable.dataPlacements.get( 0 ) ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 112952c426..e224fce027 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -324,7 +324,7 @@ private void extractWriteConstraints( LogicalEntity logicalTable ) { for ( long constraintTable : logicalTable.getConstraintIds() ) { for ( long constraintPartitionIds - : Catalog.getInstance().getLogicalRel( logicalTable.namespaceId ).getTable( constraintTable ).partitionProperty.partitionIds ) { + : Catalog.getInstance().getSnapshot().getRelSnapshot( logicalTable.namespaceId ).getTable( constraintTable ).partitionProperty.partitionIds ) { EntityIdentifier id = new EntityIdentifier( constraintTable, constraintPartitionIds, NamespaceLevel.ENTITY_LEVEL ); if ( !accessMap.containsKey( id ) ) { diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java index f3e8520cae..c311893f4b 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java @@ -43,11 +43,6 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.snapshot.AllocSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; -import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationManager; @@ -140,31 +135,6 @@ public Snapshot getSnapshot() { } - public LogicalRelSnapshot getRelSnapshot() { - return Catalog.getInstance().getRelSnapshot(); - } - - - public LogicalGraphSnapshot getGraphSnapshot() { - return Catalog.getInstance().getGraphSnapshot(); - } - - - public LogicalDocSnapshot getDocSnapshot() { - return Catalog.getInstance().getDocSnapshot(); - } - - - public PhysicalSnapshot getPhysicalSnapshot() { - return Catalog.getInstance().getPhysicalSnapshot(); - } - - - public AllocSnapshot getAllocSnapshot() { - return Catalog.getInstance().getAllocSnapshot(); - } - - @Override public InformationManager getQueryAnalyzer() { return InformationManager.getInstance( xid.toString() ); diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 8021d1fe2e..21378e5717 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -53,6 +54,7 @@ import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -90,6 +92,7 @@ public class MaterializedViewManagerImpl extends MaterializedViewManager { private final List intervalToUpdate; final Map updateCandidates; + private Snapshot snapshot; public MaterializedViewManagerImpl( TransactionManager transactionManager ) { @@ -178,8 +181,9 @@ public synchronized void addMaterializedInfo( Long materializedId, MaterializedC public void addTables( Transaction transaction, List tableNames ) { if ( tableNames.size() > 1 ) { try { - LogicalNamespace namespace = Catalog.getInstance().getNamespace( tableNames.get( 0 ) ); - LogicalTable catalogTable = Catalog.getInstance().getLogicalRel( namespace.id ).getTable( tableNames.get( 1 ) ); + snapshot = Catalog.getInstance().getSnapshot(); + LogicalNamespace namespace = snapshot.getNamespace( tableNames.get( 0 ) ); + LogicalTable catalogTable = snapshot.getRelSnapshot( namespace.id ).getTable( tableNames.get( 1 ) ); long id = catalogTable.id; if ( !catalogTable.getConnectedViews().isEmpty() ) { updateCandidates.put( transaction.getXid(), id ); @@ -211,12 +215,12 @@ public void updateCommittedXid( PolyXid xid ) { * @param potentialInteresting id of underlying table that was updated */ public void materializedUpdate( Long potentialInteresting ) { - Catalog catalog = Catalog.getInstance(); - LogicalTable catalogTable = catalog.getLogicalEntity( potentialInteresting ).unwrap( LogicalTable.class ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + LogicalTable catalogTable = snapshot.getNamespaces( null ).stream().map( n -> snapshot.getRelSnapshot( n.id ).getTable( potentialInteresting ) ).filter( Objects::nonNull ).findFirst().orElse( null ); List connectedViews = catalogTable.getConnectedViews(); for ( long id : connectedViews ) { - LogicalTable view = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( id ); + LogicalTable view = snapshot.getRelSnapshot( catalogTable.namespaceId ).getTable( id ); if ( view.entityType == EntityType.MATERIALIZED_VIEW ) { MaterializedCriteria materializedCriteria = materializedInfo.get( view.id ); if ( materializedCriteria.getCriteriaType() == CriteriaType.UPDATE ) { @@ -318,9 +322,9 @@ public void addData( Transaction transaction, List stores, Map columnPlacements.add( Catalog.getInstance().getAllocRel( materializedView.namespaceId ).getColumnPlacement( id, column.id ) ) ); + columns.get( id ).forEach( column -> columnPlacements.add( snapshot.getAllocSnapshot().getColumnPlacement( id, column.id ) ) ); // If partitions should be allowed for materialized views this needs to be changed that all partitions are considered - AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, Catalog.getInstance().getAllocRel( materializedView.namespaceId ).getPartitionsOnDataPlacement( id, materializedView.id ).get( 0 ) ); + AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( id, materializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( id ), algRoot, sourceStatement, targetStatement, targetRel, true, materializedView.isOrdered() ); } @@ -350,9 +354,9 @@ public void updateData( Transaction transaction, Long materializedId ) { List logicalColumns = new ArrayList<>(); int localAdapterIndex = catalogMaterializedView.dataPlacements.indexOf( id ); - catalog.getAllocRel( catalogMaterializedView.namespaceId ).getDataPlacement( catalogMaterializedView.dataPlacements.get( localAdapterIndex ), catalogMaterializedView.id ) + snapshot.getAllocSnapshot().getDataPlacement( catalogMaterializedView.dataPlacements.get( localAdapterIndex ), catalogMaterializedView.id ) .columnPlacementsOnAdapter.forEach( col -> - logicalColumns.add( catalog.getLogicalRel( catalogMaterializedView.namespaceId ).getColumn( col ) ) + logicalColumns.add( snapshot.getRelSnapshot( catalogMaterializedView.namespaceId ).getColumn( col ) ) ); columns.put( id, logicalColumns ); } @@ -367,7 +371,7 @@ public void updateData( Transaction transaction, Long materializedId ) { columnPlacements.clear(); - columns.get( id ).forEach( column -> columnPlacements.add( Catalog.getInstance().getAllocRel( column.namespaceId ).getColumnPlacement( id, column.id ) ) ); + columns.get( id ).forEach( column -> columnPlacements.add( snapshot.getAllocSnapshot().getColumnPlacement( id, column.id ) ) ); // Build {@link AlgNode} to build delete Statement from materialized view AlgBuilder deleteAlgBuilder = AlgBuilder.create( deleteStatement ); @@ -382,7 +386,7 @@ public void updateData( Transaction transaction, Long materializedId ) { targetRel = dataMigrator.buildDeleteStatement( targetStatementDelete, columnPlacements, - Catalog.getInstance().getAllocRel( catalogMaterializedView.namespaceId ).getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); + snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( id ), AlgRoot.of( deleteRel, Kind.SELECT ), @@ -398,7 +402,7 @@ public void updateData( Transaction transaction, Long materializedId ) { targetRel = dataMigrator.buildInsertStatement( targetStatementInsert, columnPlacements, - Catalog.getInstance().getAllocRel( catalogMaterializedView.namespaceId ).getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); + snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( id ), AlgRoot.of( insertRel, Kind.SELECT ), diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java index f6ada03c13..cc4e64e259 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java @@ -42,7 +42,7 @@ class QueryResult { public static QueryResult fromCatalogColumn( LogicalColumn column ) { - return new QueryResult( Catalog.getInstance().getRelSnapshot( column.namespaceId ).getTable( column.tableId ), column ); + return new QueryResult( Catalog.getInstance().getSnapshot().getRelSnapshot( column.namespaceId ).getTable( column.tableId ), column ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java index 8e2d0a2e53..df7954e5f0 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java @@ -86,7 +86,7 @@ public StatisticColumn( long schemaId, long tableId, long columnId, PolyType typ this.type = type; this.columnType = columnType; - LogicalRelSnapshot snapshot = Catalog.getInstance().getRelSnapshot( schemaId ); + LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ); if ( snapshot.getLogicalTable( tableId ) != null ) { this.schema = snapshot.getNamespace( schemaId ).name; this.table = snapshot.getTable( tableId ).name; diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 5523d906cf..de29054fc7 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -308,7 +308,7 @@ private List getLogicalTables( Pattern schemaPattern, Pattern tabl return namespaces .stream() .flatMap( - n -> catalog.getRelSnapshot( n.id ).getTables( tablePattern ).stream() ).collect( Collectors.toList() ); + n -> catalog.getSnapshot().getRelSnapshot( n.id ).getTables( tablePattern ).stream() ).collect( Collectors.toList() ); } @@ -319,7 +319,7 @@ public MetaResultSet getColumns( final ConnectionHandle ch, final String databas if ( log.isTraceEnabled() ) { log.trace( "getColumns( ConnectionHandle {}, String {}, Pat {}, Pat {}, Pat {} )", ch, database, schemaPattern, tablePattern, columnPattern ); } - final List columns = getLogicalTables( schemaPattern, tablePattern ).stream().flatMap( t -> catalog.getRelSnapshot( t.namespaceId ).getColumns( + final List columns = getLogicalTables( schemaPattern, tablePattern ).stream().flatMap( t -> catalog.getSnapshot().getRelSnapshot( t.namespaceId ).getColumns( (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ), (columnPattern == null || columnPattern.s == null) ? null : new Pattern( columnPattern.s ) ).stream() ).collect( Collectors.toList() ); @@ -532,7 +532,7 @@ public MetaResultSet getPrimaryKeys( final ConnectionHandle ch, final String dat List primaryKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.primaryKey != null ) { - final CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + final CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); primaryKeyColumns.addAll( primaryKey.getCatalogPrimaryKeyColumns() ); } } @@ -568,7 +568,7 @@ public MetaResultSet getImportedKeys( final ConnectionHandle ch, final String da final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List importedKeys = catalog.getRelSnapshot( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); + List importedKeys = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); importedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -611,7 +611,7 @@ public MetaResultSet getExportedKeys( final ConnectionHandle ch, final String da final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List exportedKeys = catalog.getRelSnapshot( catalogTable.namespaceId ).getExportedKeys( catalogTable.id ); + List exportedKeys = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getExportedKeys( catalogTable.id ); exportedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -727,7 +727,7 @@ public MetaResultSet getIndexInfo( final ConnectionHandle ch, final String datab final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List catalogIndexColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List catalogIndexInfos = catalog.getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, unique ); + List catalogIndexInfos = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, unique ); catalogIndexInfos.forEach( info -> catalogIndexColumns.addAll( info.getCatalogIndexColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java index 9905746388..1664983e6f 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java @@ -56,8 +56,8 @@ public static ColumnIndex createIndex( String inDatabase, String schemaName, Str try { log.debug( "Creating ColumnIndex." ); Catalog catalog = Catalog.getInstance(); - LogicalNamespace namespace = catalog.getNamespace( schemaName ); - LogicalColumn column = catalog.getLogicalRel( namespace.id ).getColumn( tableName, columnName ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schemaName ); + LogicalColumn column = catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumn( tableName, columnName ); return new ColumnIndex( column, schemaName, tableName, columnName ); } catch ( UnknownTableException | UnknownSchemaException | UnknownColumnException e ) { log.error( "Cannot find a underlying column for the specified column name: {}.{}.{}.", schemaName, tableName, columnName, e ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java index 8f344eb0ae..d282e9ac13 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java @@ -20,7 +20,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.cql.exception.UnknownIndexException; @@ -49,8 +48,8 @@ public static TableIndex createIndex( String schemaName, String tableName ) thro try { log.debug( "Creating TableIndex." ); Catalog catalog = Catalog.getInstance(); - LogicalNamespace namespace = catalog.getNamespace( schemaName ); - LogicalTable table = catalog.getLogicalRel( namespace.id ).getTable( tableName ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schemaName ); + LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( tableName ); return new TableIndex( table, schemaName, tableName ); } catch ( UnknownTableException e ) { throw new UnknownIndexException( "Cannot find a underlying table for the specified table name: " + schemaName + "." + tableName + "." ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index b4de346abe..871a6fe230 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -89,7 +89,7 @@ public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable List fieldTypes = new LinkedList<>(); List fieldIds = new ArrayList<>( allocationTable.placements.size() ); for ( CatalogColumnPlacement placement : allocationTable.placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getLogicalRel( allocationTable.namespaceId ).getColumn( placement.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( allocationTable.namespaceId ).getColumn( placement.columnId ); AlgDataType sqlType = sqlType( typeFactory, logicalColumn.type, logicalColumn.length, logicalColumn.scale, null ); fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); fieldTypes.add( CsvFieldType.getCsvFieldType( logicalColumn.type ) ); @@ -98,7 +98,8 @@ public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable String csvFileName = Catalog .getInstance() - .getAllocRel( allocationTable.namespaceId ) + .getSnapshot() + .getAllocSnapshot() .getColumnPlacementsOnAdapterPerTable( csvSource.getAdapterId(), catalogTable.id ).iterator().next() .physicalSchemaName; Source source; diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 4ffb2d2fb4..e3074c117e 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -161,6 +161,8 @@ public Map> getExportedColumns() { if ( csvDir.getProtocol().equals( "jar" ) ) { List> placements = Catalog .getInstance() + .getSnapshot() + .getPhysicalSnapshot() .getPhysicalsOnAdapter( getAdapterId() ); fileNames = new HashSet<>(); for ( PhysicalEntity ccp : placements ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java index f00b8a366f..e4225ac268 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java @@ -18,9 +18,7 @@ import java.util.List; import lombok.Getter; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; @@ -54,7 +52,7 @@ public CypherAlterDatabaseAlias( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - List graphs = Catalog.getInstance().getNamespaces( new Pattern( targetName ) ); + List graphs = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( targetName ) ); if ( graphs.size() != 1 ) { if ( !ifExists ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java index c0c4af6e5e..d4b04de4d7 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java @@ -18,9 +18,7 @@ import java.util.List; import lombok.Getter; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; @@ -57,7 +55,7 @@ public CypherCreateDatabaseAlias( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - List graphs = Catalog.getInstance().getNamespaces( new Pattern( targetName ) ); + List graphs = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( targetName ) ); if ( graphs.size() != 1 ) { throw new RuntimeException( "Error while creating a new graph database alias." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java index 3ab3e51e70..630953eb5c 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java @@ -18,9 +18,7 @@ import java.util.List; import lombok.Getter; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; @@ -48,7 +46,7 @@ public CypherDropAlias( ParserPos pos, CypherSimpleEither graphs = Catalog.getInstance().getNamespaces( new Pattern( aliasName ) ); + List graphs = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( aliasName ) ); if ( graphs.size() != 1 ) { throw new RuntimeException( "Error while dropping a graph database alias." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java index 6b5e84b0b2..a179a88c1f 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java @@ -18,9 +18,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; @@ -65,7 +63,7 @@ public void execute( Context context, Statement statement, QueryParameters param } } - List databases = Catalog.getInstance().getNamespaces( new Pattern( databaseName ) ); + List databases = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( databaseName ) ); if ( databases.size() != 1 ) { if ( !ifExists ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index 025d641500..da23e35f2e 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -48,9 +48,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.cypher.CypherNode; import org.polypheny.db.cypher.CypherNode.CypherFamily; @@ -109,7 +107,7 @@ public AlgRoot convert( CypherNode query, ExtendedQueryParameters parameters, Al databaseId = parameters.getDatabaseId(); } - LogicalGraph graph = Catalog.getInstance().getLogicalGraph( databaseId ).getGraph( databaseId ); + LogicalGraph graph = this.snapshot.getGraphSnapshot( databaseId ).getGraph( databaseId ); if ( parameters.isFullGraph() ) { // simple full graph scan @@ -138,7 +136,7 @@ private AlgNode buildFullScan( LogicalGraph graph ) { private long getDatabaseId( ExtendedQueryParameters parameters ) { - return Catalog.getInstance().getNamespace( parameters.getDatabaseName() ).id; + return snapshot.getNamespace( parameters.getDatabaseName() ).id; } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java index a6236c6e09..3dbabcca6f 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java @@ -22,8 +22,6 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; @@ -62,10 +60,10 @@ public CypherAddPlacement( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - Catalog catalog = Catalog.getInstance(); + AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = catalog.getNamespaces( new Pattern( this.database ) ).stream().map( g -> catalog.getLogicalGraph( g.id ).getGraph( g.id ) ).collect( Collectors.toList() ); + List graphs = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( this.database ) ).stream().map( g -> statement.getTransaction().getSnapshot().getGraphSnapshot( g.id ).getGraph( g.id ) ).collect( Collectors.toList() ); List dataStores = Stream.of( store ) .map( store -> (DataStore) adapterManager.getAdapter( store ) ) diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java index b1d7b64a3d..58b63b0fb6 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java @@ -21,9 +21,7 @@ import java.util.stream.Stream; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; @@ -53,10 +51,9 @@ public CypherDropPlacement( @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = catalog.getNamespaces( new Pattern( this.databaseName ) ); + List graphs = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( this.databaseName ) ); DataStore dataStore = Stream.of( storeName ) .map( store -> (DataStore) adapterManager.getAdapter( storeName ) ) diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 673d20fa29..30d6bb0636 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -119,9 +119,9 @@ public Namespace getCurrentSchema() { @Override public void addIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { - List ccps = Catalog.getInstance().getAllocRel( catalogIndex.key.namespaceId ).getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); + List ccps = context.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( catalog.getAllocRel( catalogIndex.key.namespaceId ).getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().getAllocSnapshot().getPartitionPlacement( getAdapterId(), id ) ) ); String physicalIndexName = getPhysicalIndexName( catalogIndex.key.tableId, catalogIndex.id ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { @@ -159,7 +159,7 @@ public void addIndex( Context context, CatalogIndex catalogIndex, List par @Override public void dropIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( catalog.getAllocRel( catalogIndex.key.namespaceId ).getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( catalog.getSnapshot().getAllocSnapshot().getPartitionPlacement( getAdapterId(), id ) ) ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { StringBuilder builder = new StringBuilder(); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 50d8541a4a..92999d8346 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -36,7 +36,6 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plugins.PolyPluginManager; @@ -123,8 +122,8 @@ public void truncate( Context context, LogicalTable catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. - String physicalTableName = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalTableName; - String physicalSchemaName = Catalog.getInstance().getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalSchemaName; + String physicalTableName = context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalTableName; + String physicalSchemaName = context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalSchemaName; StringBuilder builder = new StringBuilder(); builder.append( "TRUNCATE TABLE " ) .append( dialect.quoteIdentifier( physicalSchemaName ) ) diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 4caa099c7b..7f7946a975 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -167,7 +167,7 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica @Override public void addColumn( Context context, LogicalTable catalogTable, LogicalColumn logicalColumn ) { String physicalColumnName = getPhysicalColumnName( logicalColumn.id ); - for ( CatalogPartitionPlacement partitionPlacement : catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { + for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { String physicalTableName = partitionPlacement.physicalTableName; String physicalSchemaName = partitionPlacement.physicalSchemaName; StringBuilder query = buildAddColumnQuery( physicalSchemaName, physicalTableName, physicalColumnName, catalogTable, logicalColumn ); @@ -266,7 +266,7 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac if ( !this.dialect.supportsNestedArrays() && logicalColumn.collectionsType != null ) { return; } - for ( CatalogPartitionPlacement partitionPlacement : catalog.getAllocRel( logicalColumn.namespaceId ).getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { + for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) @@ -296,7 +296,7 @@ public void dropTable( Context context, LogicalTable catalogTable, List pa String physicalSchemaName; List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().getAllocSnapshot().getPartitionPlacement( getAdapterId(), id ) ) ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { catalog.getAllocRel( catalogTable.namespaceId ).deletePartitionPlacement( getAdapterId(), partitionPlacement.partitionId ); @@ -320,7 +320,7 @@ public void dropTable( Context context, LogicalTable catalogTable, List pa @Override public void dropColumn( Context context, CatalogColumnPlacement columnPlacement ) { - for ( CatalogPartitionPlacement partitionPlacement : catalog.getAllocRel( columnPlacement.namespaceId ).getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { + for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) @@ -337,7 +337,7 @@ public void truncate( Context context, LogicalTable catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. - for ( CatalogPartitionPlacement partitionPlacement : catalog.getAllocRel( catalogTable.namespaceId ).getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ) ) { + for ( CatalogPartitionPlacement partitionPlacement : catalog.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ) ) { String physicalTableName = partitionPlacement.physicalTableName; String physicalSchemaName = partitionPlacement.physicalSchemaName; StringBuilder builder = new StringBuilder(); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java index 43624f723a..8dde6f9b03 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java @@ -104,8 +104,9 @@ public Pair validate( Transaction transaction, Node parsed, b public boolean needsDdlGeneration( Node query, QueryParameters parameters ) { if ( query instanceof MqlCollectionStatement ) { return Catalog.getInstance() + .getSnapshot() .getNamespaces( Pattern.of( ((MqlQueryParameters) parameters).getDatabase() ) ) - .stream().flatMap( n -> Catalog.getInstance().getLogicalDoc( n.id ).getCollections( null ).stream() ) + .stream().flatMap( n -> Catalog.getInstance().getSnapshot().getDocSnapshot( n.id ).getCollections( null ).stream() ) .noneMatch( t -> t.name.equals( ((MqlCollectionStatement) query).getCollection() ) ); } return false; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java index 0790c259e2..d026900ea8 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java @@ -21,7 +21,6 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.ddl.DdlManager; @@ -42,13 +41,12 @@ public MqlAddPlacement( ParserPos pos, String collection, List stores ) @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - final Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); long namespaceId; - namespaceId = catalog.getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; + namespaceId = context.getSnapshot().getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; - List collections = catalog.getLogicalDoc( namespaceId ).getCollections( new Pattern( getCollection() ) ); + List collections = context.getSnapshot().getDocSnapshot( namespaceId ).getCollections( new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { throw new RuntimeException( "Error while adding new collection placement, collection not found." ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java index ec37168a47..001ff15d8e 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java @@ -21,10 +21,8 @@ import org.bson.BsonDocument; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -63,10 +61,9 @@ public String toString() { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - long schemaId = catalog.getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; + long schemaId = context.getSnapshot().getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; PlacementType placementType = PlacementType.AUTOMATIC; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java index 7f8c3bf7c0..aa63140e92 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java @@ -21,12 +21,10 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryLanguage; @@ -55,10 +53,9 @@ public MqlCreateView( ParserPos pos, String name, String source, BsonArray pipel @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - Catalog catalog = Catalog.getInstance(); String database = ((MqlQueryParameters) parameters).getDatabase(); - long schemaId = catalog.getNamespace( database ).id; + long schemaId = context.getSnapshot().getNamespace( database ).id; Node mqlNode = statement.getTransaction() .getProcessor( QueryLanguage.from( "mongo" ) ) diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java index b3ee6474b7..b33c969a01 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java @@ -21,10 +21,8 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -43,12 +41,11 @@ public MqlDeletePlacement( ParserPos pos, String collection, List stores @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - final Catalog catalog = Catalog.getInstance(); AdapterManager adapterManager = AdapterManager.getInstance(); - long namespaceId = catalog.getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; + long namespaceId = context.getSnapshot().getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; - List collections = catalog.getLogicalDoc( namespaceId ).getCollections( new Pattern( getCollection() ) ); + List collections = context.getSnapshot().getDocSnapshot( namespaceId ).getCollections( new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { throw new RuntimeException( "Error while adding new collection placement, collection not found." ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java index 2ce57c64e2..2e060dd44a 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java @@ -17,7 +17,6 @@ package org.polypheny.db.languages.mql; import java.util.List; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.Pattern; @@ -46,16 +45,15 @@ public Type getMqlKind() { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { DdlManager ddlManager = DdlManager.getInstance(); - Catalog catalog = Catalog.getInstance(); String database = ((MqlQueryParameters) parameters).getDatabase(); - if ( catalog.getNamespaces( new Pattern( database ) ).size() != 1 ) { + if ( context.getSnapshot().getNamespaces( new Pattern( database ) ).size() != 1 ) { // dropping a document database( Polyschema ), which does not exist, which is a no-op return; } - LogicalNamespace namespace = catalog.getNamespaces( new Pattern( database ) ).get( 0 ); - List collections = catalog.getLogicalDoc( namespace.id ).getCollections( new Pattern( getCollection() ) ); + LogicalNamespace namespace = context.getSnapshot().getNamespaces( new Pattern( database ) ).get( 0 ); + List collections = context.getSnapshot().getDocSnapshot( namespace.id ).getCollections( new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { // dropping a collection, which does not exist, which is a no-op return; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index 2e23d48534..492e4f316c 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -18,11 +18,9 @@ import java.util.List; import java.util.Optional; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -54,12 +52,11 @@ public Type getMqlKind() { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - Catalog catalog = Catalog.getInstance(); String database = ((MqlQueryParameters) parameters).getDatabase(); try { - LogicalNamespace schema = catalog.getNamespace( database ); - List tables = catalog.getLogicalRel( schema.id ).getTables( null ); + LogicalNamespace schema = context.getSnapshot().getNamespace( database ); + List tables = context.getSnapshot().getRelSnapshot( schema.id ).getTables( null ); if ( dropTarget ) { Optional newTable = tables.stream() diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java index 8ba49669e1..cf50c254b8 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/tools/PigAlgBuilder.java @@ -96,7 +96,7 @@ public PigAlgBuilder scan( String... tableNames ) { @Override - public PigAlgBuilder scan( Iterable tableNames ) { + public PigAlgBuilder scan( List tableNames ) { lastAlias = null; return (PigAlgBuilder) super.scan( tableNames ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index f631f6ff58..b34a98fb5f 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -23,14 +23,11 @@ import io.activej.serializer.annotations.Serialize; import java.beans.PropertyChangeSupport; import java.util.Collections; -import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; import lombok.Getter; -import lombok.NonNull; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.allocation.PolyAllocDocCatalog; import org.polypheny.db.catalog.allocation.PolyAllocGraphCatalog; @@ -46,20 +43,16 @@ import org.polypheny.db.catalog.catalogs.PhysicalCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logical.DocumentCatalog; import org.polypheny.db.catalog.logical.GraphCatalog; import org.polypheny.db.catalog.logical.RelationalCatalog; @@ -67,21 +60,16 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.physical.PolyPhysicalCatalog; -import org.polypheny.db.catalog.snapshot.FullSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; +import org.polypheny.db.catalog.snapshot.impl.SnapshotBuilder; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.type.PolyType; /** * Central catalog, which distributes the operations to the corresponding model catalogs. - * Object are as follows: - * Namespace -> Schema (Relational), Graph (Graph), Database (Document) - * Entity -> Table (Relational), does not exist (Graph), Collection (Document) - * Field -> Column (Relational), does not exist (Graph), Field (Document) */ @Slf4j public class PolyCatalog extends Catalog implements Serializable { @@ -108,10 +96,12 @@ public class PolyCatalog extends Catalog implements Serializable { public final Map interfaces; private final IdBuilder idBuilder = IdBuilder.getInstance(); - private FullSnapshot fullSnapshot; protected final PropertyChangeSupport listeners = new PropertyChangeSupport( this ); + @Getter + private Snapshot snapshot; + public PolyCatalog() { this( @@ -176,7 +166,7 @@ private void insertDefaultData() throws UnknownAdapterException { long adapter = addAdapter( "hr", defaultSource.getAdapterName(), AdapterType.SOURCE, defaultSource.getDefaultSettings() ); // init schema - CatalogAdapter csv = getAdapter( "hr" ); + CatalogAdapter csv = getSnapshot().getAdapter( "hr" ); long id = getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); @@ -203,20 +193,20 @@ private void insertDefaultData() throws UnknownAdapterException { * Initiates default columns for csv files */ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownTableException, GenericCatalogException, UnknownColumnException, UnknownTableException, UnknownColumnException, GenericCatalogException { - LogicalNamespace schema = getNamespace( "public" ); - LogicalTable depts = getLogicalRel( schema.id ).getTable( "depts" ); + LogicalNamespace schema = getSnapshot().getNamespace( "public" ); + LogicalTable depts = getSnapshot().getRelSnapshot( schema.id ).getTable( "depts" ); addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - LogicalTable emps = getLogicalRel( schema.id ).getTable( "emps" ); + LogicalTable emps = getSnapshot().getRelSnapshot( schema.id ).getTable( "emps" ); addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); - LogicalTable emp = getLogicalRel( schema.id ).getTable( "emp" ); + LogicalTable emp = getSnapshot().getRelSnapshot( schema.id ).getTable( "emp" ); addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); @@ -228,7 +218,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownTableExcep addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); - LogicalTable work = getLogicalRel( schema.id ).getTable( "work" ); + LogicalTable work = getSnapshot().getRelSnapshot( schema.id ).getTable( "work" ); addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); @@ -240,25 +230,25 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownTableExcep addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); // set all needed primary keys - getLogicalRel( schema.id ).addPrimaryKey( depts.id, Collections.singletonList( getLogicalRel( schema.id ).getColumn( depts.id, "deptno" ).id ) ); - getLogicalRel( schema.id ).addPrimaryKey( emps.id, Collections.singletonList( getLogicalRel( schema.id ).getColumn( emps.id, "empid" ).id ) ); - getLogicalRel( schema.id ).addPrimaryKey( emp.id, Collections.singletonList( getLogicalRel( schema.id ).getColumn( emp.id, "employeeno" ).id ) ); - getLogicalRel( schema.id ).addPrimaryKey( work.id, Collections.singletonList( getLogicalRel( schema.id ).getColumn( work.id, "employeeno" ).id ) ); + getLogicalRel( schema.id ).addPrimaryKey( depts.id, Collections.singletonList( getSnapshot().getRelSnapshot( schema.id ).getColumn( depts.id, "deptno" ).id ) ); + getLogicalRel( schema.id ).addPrimaryKey( emps.id, Collections.singletonList( getSnapshot().getRelSnapshot( schema.id ).getColumn( emps.id, "empid" ).id ) ); + getLogicalRel( schema.id ).addPrimaryKey( emp.id, Collections.singletonList( getSnapshot().getRelSnapshot( schema.id ).getColumn( emp.id, "employeeno" ).id ) ); + getLogicalRel( schema.id ).addPrimaryKey( work.id, Collections.singletonList( getSnapshot().getRelSnapshot( schema.id ).getColumn( work.id, "employeeno" ).id ) ); // set foreign keys getLogicalRel( schema.id ).addForeignKey( emps.id, - ImmutableList.of( getLogicalRel( schema.id ).getColumn( emps.id, "deptno" ).id ), + ImmutableList.of( getSnapshot().getRelSnapshot( schema.id ).getColumn( emps.id, "deptno" ).id ), depts.id, - ImmutableList.of( getLogicalRel( schema.id ).getColumn( depts.id, "deptno" ).id ), + ImmutableList.of( getSnapshot().getRelSnapshot( schema.id ).getColumn( depts.id, "deptno" ).id ), "fk_emps_depts", ForeignKeyOption.NONE, ForeignKeyOption.NONE ); getLogicalRel( schema.id ).addForeignKey( work.id, - ImmutableList.of( getLogicalRel( schema.id ).getColumn( work.id, "employeeno" ).id ), + ImmutableList.of( getSnapshot().getRelSnapshot( schema.id ).getColumn( work.id, "employeeno" ).id ), emp.id, - ImmutableList.of( getLogicalRel( schema.id ).getColumn( emp.id, "employeeno" ).id ), + ImmutableList.of( getSnapshot().getRelSnapshot( schema.id ).getColumn( emp.id, "employeeno" ).id ), "fk_work_emp", ForeignKeyOption.NONE, ForeignKeyOption.NONE ); @@ -266,7 +256,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownTableExcep private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !getLogicalRel( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { + if ( !getSnapshot().getRelSnapshot( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); String filename = table.name + ".csv"; if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { @@ -283,7 +273,7 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !getLogicalRel( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { + if ( !getSnapshot().getRelSnapshot( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); getAllocRel( table.namespaceId ).addColumnPlacement( table, adapter.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, colId, position ); @@ -292,7 +282,7 @@ private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, Strin private void updateSnapshot() { - this.fullSnapshot = new FullSnapshot( idBuilder.getNewSnapshotId(), logicalCatalogs, allocationCatalogs, physicalCatalogs ); + this.snapshot = SnapshotBuilder.createSnapshot( idBuilder.getNewSnapshotId(), logicalCatalogs, allocationCatalogs, physicalCatalogs ); } @@ -313,7 +303,7 @@ public void rollback() { private void validateNamespaceType( long id, NamespaceType type ) { - if ( logicalCatalogs.get( id ).getLogicalNamespace().namespaceType != type ) { + if ( getSnapshot().getNamespace( id ).namespaceType != type ) { throw new RuntimeException( "error while retrieving catalog" ); } } @@ -363,25 +353,28 @@ public AllocationGraphCatalog getAllocGraph( long namespaceId ) { @Override public LogicalEntity getLogicalEntity( String entityName ) { - for ( LogicalCatalog catalog : logicalCatalogs.values() ) { + throw new NotImplementedException(); + + /*for ( LogicalCatalog catalog : logicalCatalogs.values() ) { LogicalEntity entity = catalog.getEntity( entityName ); if ( entity != null ) { return entity; } } - return null; + return null;*/ } @Override public LogicalEntity getLogicalEntity( long id ) { - for ( LogicalCatalog catalog : logicalCatalogs.values() ) { + throw new NotImplementedException(); + /*for ( LogicalCatalog catalog : logicalCatalogs.values() ) { LogicalEntity entity = catalog.getEntity( id ); if ( entity != null ) { return entity; } } - return null; + return null;*/ } @@ -471,45 +464,6 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case } - @Override - public @NonNull List getNamespaces( Pattern name ) { - if ( name == null ) { - return logicalCatalogs.values().stream().map( LogicalCatalog::getLogicalNamespace ).collect( Collectors.toList() ); - } - - return logicalCatalogs.values().stream().filter( c -> - c.getLogicalNamespace().caseSensitive - ? c.getLogicalNamespace().name.toLowerCase( Locale.ROOT ).matches( name.pattern ) - : c.getLogicalNamespace().name.matches( name.pattern ) ) - .map( LogicalCatalog::getLogicalNamespace ).collect( Collectors.toList() ); - } - - - @Override - public LogicalNamespace getNamespace( long id ) { - return logicalCatalogs.get( id ).getLogicalNamespace(); - } - - - @Override - public LogicalNamespace getNamespace( String name ) { - List namespaces = getNamespaces( Pattern.of( name ) ); - if ( namespaces.isEmpty() ) { - return null; - } else if ( namespaces.size() > 1 ) { - throw new RuntimeException( "multiple namespaces retrieved" ); - } - return namespaces.get( 0 ); - - } - - - @Override - public boolean checkIfExistsNamespace( String name ) { - return !getNamespaces( Pattern.of( name ) ).isEmpty(); - } - - @Override public void renameNamespace( long id, String name ) { if ( logicalCatalogs.get( id ) == null ) { @@ -530,42 +484,6 @@ public void deleteNamespace( long id ) { } - @Override - public CatalogUser getUser( String name ) throws UnknownUserException { - return users.values().stream().filter( u -> u.name.equals( name ) ).findFirst().orElse( null ); - } - - - @Override - public CatalogUser getUser( long id ) { - return users.get( id ); - } - - - @Override - public List getAdapters() { - return List.copyOf( adapters.values() ); - } - - - @Override - public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { - return adapters.values().stream().filter( a -> a.uniqueName.equals( uniqueName ) ).findFirst().orElse( null ); - } - - - @Override - public CatalogAdapter getAdapter( long id ) { - return adapters.get( id ); - } - - - @Override - public boolean checkIfExistsAdapter( long id ) { - return adapters.containsKey( id ); - } - - @Override public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { long id = idBuilder.getNewAdapterId(); @@ -589,24 +507,6 @@ public void deleteAdapter( long id ) { } - @Override - public List getQueryInterfaces() { - return List.copyOf( interfaces.values() ); - } - - - @Override - public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { - return interfaces.values().stream().filter( i -> i.name.equals( uniqueName ) ).findFirst().orElse( null ); - } - - - @Override - public CatalogQueryInterface getQueryInterface( long id ) { - return interfaces.get( id ); - } - - @Override public long addQueryInterface( String uniqueName, String clazz, Map settings ) { long id = idBuilder.getNewInterfaceId(); @@ -635,35 +535,6 @@ public void clear() { } - @Override - public Snapshot getSnapshot() { - return new FullSnapshot( idBuilder.getNewSnapshotId(), logicalCatalogs.get( namespaceId ), allocationCatalogs, physicalCatalogs.get( namespaceId ) ); - } - - - @Override - public List> getAllocationsOnAdapter( long id ) { - return allocationCatalogs.values().stream().flatMap( c -> c.getAllocationsOnAdapter( id ).stream() ).collect( Collectors.toList() ); - } - - - @Override - public List> getPhysicalsOnAdapter( long id ) { - return physicalCatalogs.values().stream().flatMap( c -> c.getPhysicalsOnAdapter( id ).stream() ).collect( Collectors.toList() ); - } - - - @Override - public List getIndexes() { - return List.of(); - } - - - @Override - public List getTablesForPeriodicProcessing() { - return List.of(); - } - @Override public PolyCatalog copy() { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java index b5fe167759..db267ca77a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java @@ -24,7 +24,6 @@ import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.PlacementType; @@ -40,11 +39,6 @@ public PolyAllocDocCatalog copy() { } - @Override - public List> getAllocationsOnAdapter( long id ) { - return null; - } - @Override public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java index ea823e045d..e51b7f576b 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java @@ -22,7 +22,6 @@ import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; public class PolyAllocGraphCatalog implements Serializable, AllocationGraphCatalog { @@ -60,9 +59,5 @@ public PolyAllocGraphCatalog copy() { } - @Override - public List> getAllocationsOnAdapter( long id ) { - return null; - } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index ab7285942a..aed015765e 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -16,8 +16,6 @@ package org.polypheny.db.catalog.allocation; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; @@ -25,7 +23,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.Nullable; @@ -33,18 +30,13 @@ import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; -import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogPartition; -import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.util.Pair; @@ -139,12 +131,7 @@ public PolyAllocRelCatalog copy() { return deserialize( serialize(), PolyAllocRelCatalog.class ); } - // move to Snapshot - @Override - public List getAllocationsOnAdapter( long id ) { - return null; - } @Nullable @@ -194,60 +181,6 @@ private void deleteColumnPlacementAlloc( long allocTableId, long columnId, boole } - @Override - public CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ) { - return allocations.get( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ) ).placements.stream().filter( p -> p.columnId == columnId ).findFirst().orElse( null ); - } - - - @Override - public boolean checkIfExistsColumnPlacement( long adapterId, long columnId ) { - return allocations.get( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ) ).placements.stream().anyMatch( p -> p.columnId == columnId ); - } - - - @Override - public List getColumnPlacements( long columnId ) { - return logicalColumnToPlacements.get( columnId ); - } - - - @Override - public List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ) { - return adapterLogicalTableToAllocs.get( Pair.of( adapterId, tableId ) ).stream().flatMap( a -> a.placements.stream() ).collect( Collectors.toList() ); - } - - - @Override - public List getColumnPlacementsOnAdapter( long adapterId ) { - return adapterToAllocs.get( adapterId ).stream().flatMap( a -> a.placements.stream() ).collect( Collectors.toList() ); - } - - - @Override - public List getColumnPlacementsByColumn( long columnId ) { - return null; - } - - - @Override - public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { - return null; - } - - - @Override - public long getPartitionGroupByPartition( long partitionId ) { - return 0; - } - - - @Override - public List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ) { - return null; - } - - @Override public void updateColumnPlacementType( long adapterId, long columnId, PlacementType placementType ) { @@ -284,12 +217,6 @@ public void deletePartitionGroup( long tableId, long schemaId, long partitionGro } - @Override - public CatalogPartitionGroup getPartitionGroup( long partitionGroupId ) { - return null; - } - - @Override public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { return 0; @@ -302,18 +229,6 @@ public void deletePartition( long tableId, long schemaId, long partitionId ) { } - @Override - public CatalogPartition getPartition( long partitionId ) { - return null; - } - - - @Override - public List getPartitionsByTable( long tableId ) { - return null; - } - - @Override public void partitionTable( long tableId, PartitionType partitionType, long partitionColumnId, int numPartitionGroups, List partitionGroupIds, PartitionProperty partitionProperty ) { @@ -332,18 +247,6 @@ public void updateTablePartitionProperties( long tableId, PartitionProperty part } - @Override - public List getPartitionGroups( long tableId ) { - return null; - } - - - @Override - public List getPartitionGroups( Pattern schemaNamePattern, Pattern tableNamePattern ) { - return null; - } - - @Override public void updatePartitionGroup( long partitionGroupId, List partitionIds ) { @@ -368,102 +271,6 @@ public void updatePartition( long partitionId, Long partitionGroupId ) { } - @Override - public List getPartitions( long partitionGroupId ) { - return null; - } - - - @Override - public List getPartitions( Pattern schemaNamePattern, Pattern tableNamePattern ) { - return null; - } - - - @Override - public List getPartitionGroupNames( long tableId ) { - return null; - } - - - @Override - public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { - return null; - } - - - @Override - public List getAdaptersByPartitionGroup( long tableId, long partitionGroupId ) { - return null; - } - - - @Override - public List getPartitionGroupsOnDataPlacement( long adapterId, long tableId ) { - return null; - } - - - @Override - public List getPartitionsOnDataPlacement( long adapterId, long tableId ) { - return null; - } - - - @Override - public List getPartitionGroupsIndexOnDataPlacement( long adapterId, long tableId ) { - return null; - } - - - @Override - public CatalogDataPlacement getDataPlacement( long adapterId, long tableId ) { - return null; - } - - - @Override - public List getDataPlacements( long tableId ) { - return null; - } - - - @Override - public List getAllFullDataPlacements( long tableId ) { - return null; - } - - - @Override - public List getAllColumnFullDataPlacements( long tableId ) { - return null; - } - - - @Override - public List getAllPartitionFullDataPlacements( long tableId ) { - return null; - } - - - @Override - public List getDataPlacementsByRole( long tableId, DataPlacementRole role ) { - return null; - } - - - @Override - public List getPartitionPlacementsByRole( long tableId, DataPlacementRole role ) { - return null; - } - - - @Override - public List getPartitionPlacementsByIdAndRole( long tableId, long partitionId, DataPlacementRole role ) { - return null; - } - - @Override public boolean validateDataPlacementsConstraints( long tableId, long adapterId, List columnIdsToBeRemoved, List partitionsIdsToBeRemoved ) { return false; @@ -554,36 +361,6 @@ public void deletePartitionPlacement( long adapterId, long partitionId ) { } - @Override - public CatalogPartitionPlacement getPartitionPlacement( long adapterId, long partitionId ) { - return null; - } - - - @Override - public List getPartitionPlacementsByAdapter( long adapterId ) { - return null; - } - - - @Override - public List getPartitionPlacementsByTableOnAdapter( long adapterId, long tableId ) { - return null; - } - - - @Override - public List getAllPartitionPlacementsByTable( long tableId ) { - return null; - } - - - @Override - public List getPartitionPlacements( long partitionId ) { - return null; - } - - @Override public void addTableToPeriodicProcessing( long tableId ) { @@ -595,17 +372,4 @@ public void removeTableFromPeriodicProcessing( long tableId ) { } - - @Override - public boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ) { - return false; - } - - - @Override - public List getAllocationsFromLogical( long logicalId ) { - return logicalTableToAllocs.get( logicalId ); - } - - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java index 386fb87801..1591653e39 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java @@ -46,6 +46,7 @@ public class DocumentCatalog implements Serializable, LogicalDocumentCatalog { IdBuilder idBuilder = IdBuilder.getInstance(); @Serialize + @Getter public PusherMap collections; @Getter @Serialize diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java index cd004c6c00..9372196051 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java @@ -18,10 +18,11 @@ import io.activej.serializer.BinarySerializer; import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import lombok.Builder; import lombok.Getter; import lombok.Value; -import lombok.With; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; import org.polypheny.db.adapter.DataStore; @@ -30,12 +31,10 @@ import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.logistic.Pattern; @Value @SuperBuilder(toBuilder = true) @@ -47,6 +46,9 @@ public class GraphCatalog implements Serializable, LogicalGraphCatalog { public LogicalNamespace logicalNamespace; public IdBuilder idBuilder = IdBuilder.getInstance(); + @Getter + ConcurrentHashMap graphs; + @NonFinal @Builder.Default @@ -54,37 +56,19 @@ public class GraphCatalog implements Serializable, LogicalGraphCatalog { public GraphCatalog( LogicalNamespace logicalNamespace ) { - this.logicalNamespace = logicalNamespace; + this( logicalNamespace, new ConcurrentHashMap<>() ); } - @Override - public GraphCatalog copy() { - return deserialize( serialize(), GraphCatalog.class ); - } - - - @Override - public boolean checkIfExistsEntity( String entityName ) { - return false; - } - - - @Override - public boolean checkIfExistsEntity( long tableId ) { - return false; - } - - - @Override - public LogicalEntity getEntity( String name ) { - return null; + public GraphCatalog( LogicalNamespace logicalNamespace, Map graphs ) { + this.logicalNamespace = logicalNamespace; + this.graphs = new ConcurrentHashMap<>( graphs ); } @Override - public LogicalEntity getEntity( long id ) { - return null; + public GraphCatalog copy() { + return deserialize( serialize(), GraphCatalog.class ); } @@ -118,18 +102,6 @@ public void deleteGraph( long id ) { } - @Override - public LogicalGraph getGraph( long id ) { - return null; - } - - - @Override - public List getGraphs( Pattern graphName ) { - return null; - } - - @Override public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 1f1eab7a84..15568264aa 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.ConcurrentHashMap; import lombok.Builder; import lombok.Getter; import lombok.Value; @@ -34,7 +33,6 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; @@ -62,27 +60,31 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog public BinarySerializer serializer = Serializable.builder.get().build( RelationalCatalog.class ); @Serialize - public PusherMap tables; + @Getter + public Map tables; @Serialize - public PusherMap columns; + @Getter + public Map columns; @Serialize @Getter public LogicalNamespace logicalNamespace; @Serialize + @Getter public Map indexes; @Serialize + @Getter public Map keys; @Serialize + @Getter public Map keyColumns; public IdBuilder idBuilder = IdBuilder.getInstance(); - ConcurrentHashMap names; @NonFinal @Builder.Default @@ -100,14 +102,12 @@ public RelationalCatalog( @Deserialize("keyColumns") Map keyColumns ) { this.logicalNamespace = logicalNamespace; - this.tables = new PusherMap<>( tables ); - this.columns = new PusherMap<>( columns ); + this.tables = tables; + this.columns = columns; this.indexes = indexes; this.keys = keys; this.keyColumns = keyColumns; - this.names = new ConcurrentHashMap<>(); - this.tables.addRowConnection( this.names, ( k, v ) -> logicalNamespace.caseSensitive ? v.name : v.name.toLowerCase(), ( k, v ) -> v ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java deleted file mode 100644 index c402b9433b..0000000000 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/snapshot/FullSnapshot.java +++ /dev/null @@ -1,276 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.snapshot; - -import com.google.common.collect.ImmutableMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.Getter; -import lombok.Value; -import org.polypheny.db.catalog.catalogs.AllocationCatalog; -import org.polypheny.db.catalog.catalogs.LogicalCatalog; -import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; -import org.polypheny.db.catalog.catalogs.PhysicalCatalog; -import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.allocation.AllocationCollection; -import org.polypheny.db.catalog.entity.allocation.AllocationGraph; -import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalCollection; -import org.polypheny.db.catalog.entity.physical.PhysicalGraph; -import org.polypheny.db.catalog.entity.physical.PhysicalTable; -import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.util.Pair; -import org.polypheny.db.util.Triple; - -@Value -public class FullSnapshot implements Snapshot { - - @Getter - long id; - PhysicalCatalog physicalCatalog; - LogicalCatalog logicalCatalog; - Map allocationCatalogs; - - ImmutableMap namespaceIds; - - ImmutableMap namespaceNames; - - ImmutableMap tableIds; - - ImmutableMap, LogicalTable> tableNames; - - - ImmutableMap columnIds; - - ImmutableMap, LogicalColumn> columnNames; - - ImmutableMap collectionIds; - ImmutableMap, LogicalCollection> collectionNames; - - ImmutableMap graphId; - - ImmutableMap graphName; - - - public FullSnapshot( long id, LogicalCatalog logicalCatalog, Map allocationCatalogs, PhysicalCatalog physicalCatalog ) { - this.id = id; - this.logicalCatalog = logicalCatalog; - this.allocationCatalogs = allocationCatalogs; - this.physicalCatalog = physicalCatalog; - - namespaceIds = ImmutableMap.copyOf( logicalCatalogs.values().stream().map( LogicalCatalog::getLogicalNamespace ).collect( Collectors.toMap( n -> n.id, n -> n ) ) ); - namespaceNames = ImmutableMap.copyOf( namespaceIds.values().stream().collect( Collectors.toMap( n -> n.name, n -> n ) ) ); - - tableIds = logicalCatalogs.values().stream() - .filter( c -> c.getLogicalNamespace().namespaceType == NamespaceType.RELATIONAL ) - .map( c -> (LogicalRelationalCatalog) c ).flatMap( c -> c. ) - } - - - @Override - public LogicalNamespace getNamespace( long id ) { - return null; - } - - - @Override - public LogicalNamespace getNamespace( String name ) { - return null; - } - - - @Override - public List getNamespaces( Pattern name ) { - return null; - } - - - @Override - public CatalogEntity getEntity( long id ) { - return null; - } - - - @Override - public CatalogEntity getEntity( long namespaceId, String name ) { - return null; - } - - - @Override - public CatalogEntity getEntity( long namespaceId, Pattern name ) { - return null; - } - - - @Override - public CatalogEntity getEntity( List names ) { - return null; - } - - - @Override - public LogicalTable getLogicalTable( List names ) { - return null; - } - - - @Override - public LogicalCollection getLogicalCollection( List names ) { - return null; - } - - - @Override - public LogicalGraph getLogicalGraph( List names ) { - return null; - } - - - @Override - public LogicalTable getLogicalTable( long id ) { - return null; - } - - - @Override - public LogicalTable getLogicalTable( long namespaceId, String name ) { - return null; - } - - - @Override - public List getLogicalTables( long namespaceId, Pattern name ) { - return null; - } - - - @Override - public LogicalColumn getLogicalColumn( long id ) { - return null; - } - - - @Override - public LogicalCollection getLogicalCollection( long id ) { - return null; - } - - - @Override - public LogicalCollection getLogicalCollection( long namespaceId, String name ) { - return null; - } - - - @Override - public List getLogicalCollections( long namespaceId, Pattern name ) { - return null; - } - - - @Override - public LogicalGraph getLogicalGraph( long id ) { - return null; - } - - - @Override - public LogicalGraph getLogicalGraph( long namespaceId, String name ) { - return null; - } - - - @Override - public List getLogicalGraphs( long namespaceId, Pattern name ) { - return null; - } - - - @Override - public AllocationTable getAllocTable( long id ) { - return null; - } - - - @Override - public AllocationCollection getAllocCollection( long id ) { - return null; - } - - - @Override - public AllocationGraph getAllocGraph( long id ) { - return null; - } - - - @Override - public PhysicalTable getPhysicalTable( long id ) { - return null; - } - - - @Override - public PhysicalTable getPhysicalTable( long logicalId, long adapterId ) { - return null; - } - - - @Override - public PhysicalCollection getPhysicalCollection( long id ) { - return null; - } - - - @Override - public PhysicalCollection getPhysicalCollection( long logicalId, long adapterId ) { - return null; - } - - - @Override - public PhysicalGraph getPhysicalGraph( long id ) { - return null; - } - - - @Override - public PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ) { - return null; - } - - - @Override - public boolean isPartitioned( long id ) { - return false; - } - - - @Override - public LogicalColumn getColumn( long columnId ) { - return null; - } - -} diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 812bb4f787..6c941399db 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -30,6 +30,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -49,6 +50,7 @@ import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.iface.AuthenticationException; import org.polypheny.db.iface.Authenticator; import org.polypheny.db.languages.OperatorRegistry; @@ -71,7 +73,6 @@ @Slf4j public class RequestParser { - private final Catalog catalog; private final TransactionManager transactionManager; private final Authenticator authenticator; private final String databaseName; @@ -83,6 +84,7 @@ public class RequestParser { private final static Pattern SORTING_ENTRY_PATTERN = Pattern.compile( "^(?[a-zA-Z]\\w*(?:\\.[a-zA-Z]\\w*\\.[a-zA-Z]\\w*)?)(?:@(?ASC|DESC))?$" ); + private final Snapshot snapshop; public RequestParser( final TransactionManager transactionManager, final Authenticator authenticator, final String userName, final String databaseName ) { @@ -92,7 +94,7 @@ public RequestParser( final TransactionManager transactionManager, final Authent @VisibleForTesting RequestParser( Catalog catalog, TransactionManager transactionManager, Authenticator authenticator, String userName, String databaseName ) { - this.catalog = catalog; + this.snapshop = catalog.getSnapshot(); this.transactionManager = transactionManager; this.authenticator = authenticator; this.userName = userName; @@ -259,8 +261,8 @@ LogicalTable parseCatalogTableName( String tableName ) throws ParserException { } try { - LogicalNamespace namespace = catalog.getNamespace( tableElements[0] ); - LogicalTable table = this.catalog.getLogicalRel( namespace.id ).getTable( tableElements[1] ); + LogicalNamespace namespace = snapshop.getNamespace( tableElements[0] ); + LogicalTable table = snapshop.getRelSnapshot( namespace.id ).getTable( tableElements[1] ); if ( log.isDebugEnabled() ) { log.debug( "Finished parsing table \"{}\".", tableName ); } @@ -362,7 +364,7 @@ List generateRequestColumnsWithProject( String projectionString, Set notYetAdded = new HashSet<>( validColumns ); notYetAdded.removeAll( projectedColumns ); for ( long columnId : notYetAdded ) { - LogicalColumn column = this.catalog.getSnapshot().getColumn( columnId ); + LogicalColumn column = snapshop.getNamespaces( null ).stream().map( n -> this.snapshop.getRelSnapshot( n.id ).getColumn( columnId ) ).filter( Objects::nonNull ).findFirst().orElse( null ); int calculatedPosition = tableOffsets.get( column.tableId ) + column.position - 1; RequestColumn requestColumn = new RequestColumn( column, calculatedPosition, calculatedPosition, null, null, false ); columns.add( requestColumn ); @@ -419,9 +421,9 @@ private LogicalColumn getCatalogColumnFromString( String name ) throws ParserExc throw new ParserException( ParserErrorCode.PROJECTION_MALFORMED, name ); } - LogicalNamespace namespace = this.catalog.getNamespace( splitString[0] ); + LogicalNamespace namespace = snapshop.getNamespace( splitString[0] ); - return this.catalog.getLogicalRel( namespace.id ).getColumn( splitString[1], splitString[2] ); + return snapshop.getRelSnapshot( namespace.id ).getColumn( splitString[1], splitString[2] ); } @@ -749,7 +751,7 @@ private List> parseInsertStatementValues( Map rowVal public Map generateNameMapping( List tables ) { Map nameMapping = new HashMap<>(); for ( LogicalTable table : tables ) { - for ( LogicalColumn column : this.catalog.getLogicalRel( table.namespaceId ).getColumns( table.id ) ) { + for ( LogicalColumn column : snapshop.getRelSnapshot( table.namespaceId ).getColumns( table.id ) ) { nameMapping.put( column.getSchemaName() + "." + column.getTableName() + "." + column.name, column ); } } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 74322d22a7..7e37e39367 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -157,10 +157,10 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi RexBuilder rexBuilder = new RexBuilder( typeFactory ); Snapshot snapshot = statement.getTransaction().getSnapshot(); - LogicalNamespace namespace = Catalog.getInstance().getNamespace( resourcePatchRequest.tables.get( 0 ).getNamespaceName() ); + LogicalNamespace namespace = snapshot.getNamespace( resourcePatchRequest.tables.get( 0 ).getNamespaceName() ); LogicalTable table = null; try { - table = Catalog.getInstance().getLogicalRel( namespace.id ).getTable( resourcePatchRequest.tables.get( 0 ).name ); + table = snapshot.getRelSnapshot( namespace.id ).getTable( resourcePatchRequest.tables.get( 0 ).name ); } catch ( UnknownTableException e ) { throw new RuntimeException( e ); } @@ -220,7 +220,7 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, JavaTypeFactory typeFactory = transaction.getTypeFactory(); RexBuilder rexBuilder = new RexBuilder( typeFactory ); - LogicalTable table = getLogicalTable( resourceDeleteRequest.tables.get( 0 ).getNamespaceName(), resourceDeleteRequest.tables.get( 0 ).getName() ); + LogicalTable table = getLogicalTable( getTransaction().getSnapshot(), resourceDeleteRequest.tables.get( 0 ).getNamespaceName(), resourceDeleteRequest.tables.get( 0 ).getName() ); // Table Scans algBuilder = this.tableScans( algBuilder, rexBuilder, resourceDeleteRequest.tables ); @@ -264,12 +264,11 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, } - private static LogicalTable getLogicalTable( String namespaceName, String tableName ) { - Catalog catalog = Catalog.getInstance(); - LogicalNamespace namespace = catalog.getNamespace( namespaceName ); + private static LogicalTable getLogicalTable( Snapshot snapshot, String namespaceName, String tableName ) { + LogicalNamespace namespace = snapshot.getNamespace( namespaceName ); LogicalTable table; try { - table = catalog.getLogicalRel( namespace.id ).getTable( tableName ); + table = snapshot.getRelSnapshot( namespace.id ).getTable( tableName ); } catch ( UnknownTableException e ) { throw new RuntimeException( e ); } @@ -284,7 +283,7 @@ String processPostResource( final ResourcePostRequest insertValueRequest, final JavaTypeFactory typeFactory = transaction.getTypeFactory(); RexBuilder rexBuilder = new RexBuilder( typeFactory ); - LogicalTable table = getLogicalTable( insertValueRequest.tables.get( 0 ).getNamespaceName(), insertValueRequest.tables.get( 0 ).getName() ); + LogicalTable table = getLogicalTable( transaction.getSnapshot(), insertValueRequest.tables.get( 0 ).getNamespaceName(), insertValueRequest.tables.get( 0 ).getName() ); // Values AlgDataType tableRowType = table.getRowType(); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index 22f5a8cec6..b7a2cc8adc 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -87,6 +87,8 @@ public class SqlProcessorImpl extends Processor { @Setter private PolyphenyDbSqlValidator validator; + private final Snapshot snapshot = Catalog.getInstance().getSnapshot(); + static { SqlParser.ConfigBuilder configConfigBuilder = Parser.configBuilder(); @@ -247,7 +249,7 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { if ( oldColumnList != null ) { LogicalTable catalogTable = getCatalogTable( transaction, (SqlIdentifier) insert.getTargetTable() ); - NamespaceType namespaceType = Catalog.getInstance().getNamespace( catalogTable.namespaceId ).namespaceType; + NamespaceType namespaceType = Catalog.getInstance().getSnapshot().getNamespace( catalogTable.namespaceId ).namespaceType; catalogTable = getCatalogTable( transaction, (SqlIdentifier) insert.getTargetTable() ); @@ -360,17 +362,17 @@ private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tab long schemaId; String tableOldName; if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = Catalog.getInstance().getNamespace( tableName.names.get( 1 ) ).id; + schemaId = snapshot.getNamespace( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = Catalog.getInstance().getNamespace( tableName.names.get( 0 ) ).id; + schemaId = snapshot.getNamespace( tableName.names.get( 0 ) ).id; tableOldName = tableName.names.get( 1 ); } else { // TableName - schemaId = Catalog.getInstance().getNamespace( transaction.getDefaultSchema().name ).id; + schemaId = snapshot.getNamespace( transaction.getDefaultSchema().name ).id; tableOldName = tableName.names.get( 0 ); } try { - catalogTable = Catalog.getInstance().getLogicalRel( schemaId ).getTable( tableOldName ); + catalogTable = snapshot.getRelSnapshot( schemaId ).getTable( tableOldName ); } catch ( UnknownTableException e ) { throw new RuntimeException( e ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 92daff0848..0e01535c96 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -30,6 +30,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Operator; import org.polypheny.db.prepare.Context; @@ -47,6 +48,7 @@ public abstract class SqlDdl extends SqlCall { protected static final SqlOperator DDL_OPERATOR = new SqlSpecialOperator( "DDL", Kind.OTHER_DDL ); private final SqlOperator operator; + private final Snapshot snapshot = Catalog.getInstance().getSnapshot(); /** @@ -68,19 +70,18 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName LogicalTable catalogTable; long schemaId; String tableOldName; - Catalog catalog = Catalog.getInstance(); if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getNamespace( tableName.names.get( 1 ) ).id; + schemaId = snapshot.getNamespace( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getNamespace( tableName.names.get( 0 ) ).id; + schemaId = snapshot.getNamespace( tableName.names.get( 0 ) ).id; tableOldName = tableName.names.get( 1 ); } else { // TableName - schemaId = catalog.getNamespace( context.getDefaultSchemaName() ).id; + schemaId = snapshot.getNamespace( context.getDefaultSchemaName() ).id; tableOldName = tableName.names.get( 0 ); } try { - catalogTable = catalog.getLogicalRel( schemaId ).getTable( tableOldName ); + catalogTable = snapshot.getRelSnapshot( schemaId ).getTable( tableOldName ); } catch ( UnknownTableException e ) { throw new RuntimeException( e ); } @@ -91,7 +92,7 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName protected LogicalColumn getCatalogColumn( long namespaceId, long tableId, SqlIdentifier columnName ) { LogicalColumn logicalColumn; try { - logicalColumn = Catalog.getInstance().getLogicalRel( namespaceId ).getColumn( tableId, columnName.getSimple() ); + logicalColumn = snapshot.getRelSnapshot( namespaceId ).getColumn( tableId, columnName.getSimple() ); } catch ( UnknownColumnException e ) { throw CoreUtil.newContextException( columnName.getPos(), RESOURCE.columnNotFoundInTable( columnName.getSimple(), tableId + "" ) ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java index bcb5ecae9b..81d58f42db 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java @@ -42,6 +42,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypePrecedenceList; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; @@ -606,7 +607,8 @@ public static SqlLiteral symbol( Enum o, ParserPos parserPos ) { public static AlgDataType getNamedType( Identifier node, Snapshot snapshot ) { - LogicalTable table = snapshot.getLogicalTable( node.getNames() ); + LogicalNamespace namespace = snapshot.getNamespace( node.getNames().get( 0 ) ); + LogicalTable table = snapshot.getRelSnapshot( namespace.id ).getLogicalTable( node.getNames() ); if ( table != null ) { return table.getRowType(); } else { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java index 4c4dfe4fae..85f66c7392 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java @@ -36,6 +36,7 @@ import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -72,6 +73,7 @@ public class SqlCreateMaterializedView extends SqlCreate implements ExecutableSt private final SqlIdentifier freshnessId; private static final SqlOperator OPERATOR = new SqlSpecialOperator( "CREATE MATERIALIZED VIEW", Kind.CREATE_MATERIALIZED_VIEW ); + private Snapshot snapshot = Catalog.getInstance().getSnapshot(); /** @@ -113,20 +115,19 @@ public List getSqlOperandList() { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - Catalog catalog = Catalog.getInstance(); long schemaId; String viewName; MaterializedViewManager.getInstance().isCreatingMaterialized = true; if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; + schemaId = snapshot.getNamespace( name.names.get( 1 ) ).id; viewName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 0 ) ).id; + schemaId = snapshot.getNamespace( name.names.get( 0 ) ).id; viewName = name.names.get( 1 ); } else { // TableName - schemaId = catalog.getNamespace( context.getDefaultSchemaName() ).id; + schemaId = snapshot.getNamespace( context.getDefaultSchemaName() ).id; viewName = name.names.get( 0 ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 61aab71300..93cc996c8e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -38,6 +38,7 @@ import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; import org.polypheny.db.ddl.DdlManager.ConstraintInformation; @@ -86,6 +87,7 @@ public class SqlCreateTable extends SqlCreate implements ExecutableStatement { private final List> partitionQualifierList; private static final SqlOperator OPERATOR = new SqlSpecialOperator( "CREATE TABLE", Kind.CREATE_TABLE ); + private final Snapshot snapshot = Catalog.getInstance().getSnapshot(); /** @@ -203,16 +205,16 @@ public void execute( Context context, Statement statement, QueryParameters param if ( query != null ) { throw new RuntimeException( "Not supported yet" ); } - Catalog catalog = Catalog.getInstance(); + String tableName; long schemaId; // Cannot use getLogicalTable() here since table does not yet exist if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 0 ) ).id; + schemaId = snapshot.getNamespace( name.names.get( 0 ) ).id; tableName = name.names.get( 1 ); } else { // TableName - schemaId = catalog.getNamespace( context.getDefaultSchemaName() ).id; + schemaId = snapshot.getNamespace( context.getDefaultSchemaName() ).id; tableName = name.names.get( 0 ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java index 088639ff6e..55f2a1b5ed 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateView.java @@ -33,6 +33,7 @@ import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -68,6 +69,9 @@ public class SqlCreateView extends SqlCreate implements ExecutableStatement { private static final SqlOperator OPERATOR = new SqlSpecialOperator( "CREATE VIEW", Kind.CREATE_VIEW ); + private final Snapshot snapshot = Catalog.getInstance().getSnapshot(); + + /** * Creates a SqlCreateView. */ @@ -98,18 +102,17 @@ public List getSqlOperandList() { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - Catalog catalog = Catalog.getInstance(); String viewName; long schemaId; if ( name.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 1 ) ).id; + schemaId = snapshot.getNamespace( name.names.get( 1 ) ).id; viewName = name.names.get( 2 ); } else if ( name.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getNamespace( name.names.get( 0 ) ).id; + schemaId = snapshot.getNamespace( name.names.get( 0 ) ).id; viewName = name.names.get( 1 ); } else { // TableName - schemaId = catalog.getNamespace( context.getDefaultSchemaName() ).id; + schemaId = snapshot.getNamespace( context.getDefaultSchemaName() ).id; viewName = name.names.get( 0 ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java index 78b556d43c..fc2ae675ea 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java @@ -27,9 +27,9 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.LastPlacementException; import org.polypheny.db.languages.ParserPos; @@ -146,7 +146,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // If name partitions are specified else if ( !partitionGroupNamesList.isEmpty() && partitionGroupList.isEmpty() ) { - List catalogPartitionGroups = catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getSnapshot().getAllocSnapshot().getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNamesList.stream().map( Object::toString ) .collect( Collectors.toList() ) ) { boolean isPartOfTable = false; @@ -159,14 +159,14 @@ else if ( !partitionGroupNamesList.isEmpty() && partitionGroupList.isEmpty() ) { } if ( !isPartOfTable ) { throw new RuntimeException( "Specified Partition-Name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "', has only " + catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupNames( tableId ) + " partitions" ); + + catalogTable.name + "', has only " + catalog.getSnapshot().getAllocSnapshot().getPartitionGroupNames( tableId ) + " partitions" ); } } } // Check if in-memory dataPartitionPlacement Map should even be changed and therefore start costly partitioning // Avoid unnecessary partitioning when the placement is already partitioned in the same way it has been specified - if ( tempPartitionList.equals( catalog.getAllocRel( catalogTable.namespaceId ).getPartitionGroupsOnDataPlacement( storeId, tableId ) ) ) { + if ( tempPartitionList.equals( catalog.getSnapshot().getAllocSnapshot().getPartitionGroupsOnDataPlacement( storeId, tableId ) ) ) { log.info( "The data placement for table: '{}' on store: '{}' already contains all specified partitions of statement: {}", catalogTable.name, storeName, partitionGroupList ); return; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index 4a796deede..29b44e13b1 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -25,6 +25,7 @@ import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.sql.language.SqlCall; @@ -81,7 +82,8 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, @Override public SqlValidatorNamespace getTableNamespace( List names ) { - CatalogEntity table = validator.snapshot.getLogicalTable( names ); + LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); + CatalogEntity table = validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ); return table != null ? new EntityNamespace( validator, table ) : null; @@ -93,9 +95,10 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path final List resolves = ((ResolvedImpl) resolved).resolves; // Look in the default schema, then default catalog, then root schema. - LogicalTable table = validator.snapshot.getLogicalTable( names ); + LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); + LogicalTable table = validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ); if ( table != null ) { - resolves.add( new Resolve( validator.snapshot.getLogicalTable( names ) ) ); + resolves.add( new Resolve( validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ) ) ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index f21f4e8985..b3fb106ba9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -3391,7 +3391,7 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc private @Nullable CatalogEntity findTable( String tableName, boolean caseSensitive ) { - return snapshot.getLogicalTable( List.of( tableName ) ); + return snapshot.getNamespaces( null ).stream().map( n -> snapshot.getRelSnapshot( n.id ).getLogicalTable( tableName ) ).filter( Objects::isNull ).findFirst().orElse( null ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 3089b135bd..1f0e8de3a7 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -39,9 +39,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; @@ -620,7 +619,8 @@ public static boolean isTableRelational( SqlValidatorImpl validator ) { return false; } SqlIdentifier id = ((SqlIdentifier) validator.getTableScope().getNode()); - LogicalGraph graph = validator.getSnapshot().getLogicalGraph( id.names ); + return validator.snapshot.getNamespace( id.names.get( 0 ) ).namespaceType == NamespaceType.RELATIONAL; + /*LogicalGraph graph = validator.snapshot.getGraphSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ); if ( graph != null ) { return false; } @@ -628,7 +628,7 @@ public static boolean isTableRelational( SqlValidatorImpl validator ) { if ( collection != null ) { return false; } - return true; + return true;*/ } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java index 8d9127a2eb..7e0a3f93dd 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java @@ -20,6 +20,7 @@ import java.util.List; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.sql.language.SqlWithItem; import org.polypheny.db.util.NameMatcher; @@ -69,7 +70,8 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path if ( names.size() == 1 && names.equals( withItem.name.names ) ) { //final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); //final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); - CatalogEntity entity = validator.snapshot.getLogicalTable( names ); + LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); + CatalogEntity entity = validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 0 ) ); resolved.found( entity ); return; } @@ -82,7 +84,8 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, if ( names.size() == 1 && names.equals( withItem.name.names ) ) { final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); final Step path = Path.EMPTY.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); - CatalogEntity entity = validator.snapshot.getLogicalTable( names ); + LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); + CatalogEntity entity = validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 0 ) ); resolved.found( entity ); return; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java index 0edfd3745d..3c662ba516 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java @@ -41,7 +41,7 @@ public class SchemaToJsonMapper { public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogTable, boolean exportPrimaryKey, boolean exportDefaultValues ) { List columns = new LinkedList<>(); - for ( LogicalColumn logicalColumn : Catalog.getInstance().getLogicalRel( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : Catalog.getInstance().getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { String defaultValue = null; String defaultFunctionName = null; if ( exportDefaultValues ) { @@ -61,7 +61,7 @@ public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogT } List primaryKeyColumnNames = null; if ( exportPrimaryKey ) { - for ( CatalogKey catalogKey : Catalog.getInstance().getLogicalRel( catalogTable.namespaceId ).getTableKeys( catalogTable.id ) ) { + for ( CatalogKey catalogKey : Catalog.getInstance().getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTableKeys( catalogTable.id ) ) { if ( catalogKey.id == catalogTable.primaryKey ) { primaryKeyColumnNames = catalogKey.getColumnNames(); break; diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index bd5cbca23a..6f31b75720 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -130,6 +130,7 @@ import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -302,7 +303,7 @@ Result getTable( final UIRequest request ) { // determine if it is a view or a table LogicalTable catalogTable; try { - catalogTable = catalog.getRelSnapshot( catalog.getSnapshot().getNamespace( t[0] ).id ).getTable( t[1] ); + catalogTable = catalog.getSnapshot().getRelSnapshot( catalog.getSnapshot().getNamespace( t[0] ).id ).getTable( t[1] ); result.setNamespaceType( catalogTable.getNamespaceType() ); if ( catalogTable.modifiable ) { result.setType( ResultType.TABLE ); @@ -318,12 +319,12 @@ Result getTable( final UIRequest request ) { ArrayList cols = new ArrayList<>(); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); } - for ( LogicalColumn logicalColumn : catalog.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( @@ -384,7 +385,7 @@ void getSchemaTree( final Context ctx ) { ArrayList tableTree = new ArrayList<>(); ArrayList viewTree = new ArrayList<>(); ArrayList collectionTree = new ArrayList<>(); - List tables = catalog.getRelSnapshot( schema.id ).getTables( null ); + List tables = catalog.getSnapshot().getRelSnapshot( schema.id ).getTables( null ); for ( LogicalTable table : tables ) { String icon = "fa fa-table"; if ( table.entityType == EntityType.SOURCE ) { @@ -398,7 +399,7 @@ void getSchemaTree( final Context ctx ) { SidebarElement tableElement = new SidebarElement( schema.name + "." + table.name, table.name, schema.namespaceType, request.routerLinkRoot, icon ); if ( request.depth > 2 ) { - List columns = catalog.getRelSnapshot( table.namespaceId ).getColumns( table.id ); + List columns = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( table.id ); for ( LogicalColumn column : columns ) { tableElement.addChild( new SidebarElement( schema.name + "." + table.name + "." + column.name, column.name, schema.namespaceType, request.routerLinkRoot, icon ).setCssClass( "sidebarColumn" ) ); } @@ -471,7 +472,7 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getRelSnapshot( schemaId ).getTables( null ); + List tables = catalog.getSnapshot().getRelSnapshot( schemaId ).getTables( null ); ArrayList result = new ArrayList<>(); for ( LogicalTable t : tables ) { result.add( new DbTable( t.name, t.getNamespaceName(), t.modifiable, t.entityType ) ); @@ -657,7 +658,7 @@ void insertRow( final Context ctx ) { StringJoiner columns = new StringJoiner( ",", "(", ")" ); StringJoiner values = new StringJoiner( ",", "(", ")" ); - List logicalColumns = catalog.getRelSnapshot( catalog.getLogicalEntity( tableId ).namespaceId ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getSnapshot().getRelSnapshot( catalog.getLogicalEntity( tableId ).namespaceId ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); try { int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -949,9 +950,9 @@ private String computeWherePK( final String tableName, final String columnName, Map catalogColumns = getCatalogColumns( tableName, columnName ); LogicalTable catalogTable; catalogTable = catalog.getLogicalEntity( tableName ).unwrap( LogicalTable.class ); - CatalogPrimaryKey pk = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey pk = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( long colId : pk.columnIds ) { - String colName = catalog.getRelSnapshot( catalogTable.namespaceId ).getColumn( colId ).name; + String colName = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( colId ).name; String condition; if ( filter.containsKey( colName ) ) { String val = filter.get( colName ); @@ -1023,7 +1024,7 @@ void updateRow( final Context ctx ) throws ServletException, IOException { LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); - List logicalColumns = catalog.getRelSnapshot( namespace.id ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -1121,15 +1122,15 @@ void getColumns( final Context ctx ) { try { LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); - LogicalTable catalogTable = catalog.getRelSnapshot( namespace.id ).getTable( t[1] ); + LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( namespace.id ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( namespace.id ).getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); } - for ( LogicalColumn logicalColumn : catalog.getRelSnapshot( namespace.id ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumns( catalogTable.id ) ) { String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( @@ -1167,7 +1168,7 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk UIRequest request = ctx.bodyAsClass( UIRequest.class ); LogicalNamespace namespace = catalog.getSnapshot().getNamespace( request.getSchemaName() ); - LogicalTable catalogTable = catalog.getRelSnapshot( namespace.id ).getTable( request.getTableName() ); + LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { ImmutableMap> underlyingTable = ((CatalogView) catalogTable).getUnderlyingTables(); @@ -1190,17 +1191,17 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk } ctx.json( new Result( columns.toArray( new DbColumn[0] ), null ).setType( ResultType.VIEW ) ); } else { - List allocs = catalog.getAllocRel( catalogTable.namespaceId ).getAllocationsFromLogical( catalogTable.id ); - if ( catalog.getAllocRel( catalogTable.namespaceId ).getAllocationsFromLogical( catalogTable.id ).size() != 1 ) { + List allocs = catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ); + if ( catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } long adapterId = allocs.get( 0 ).adapterId; - CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); List pkColumnNames = primaryKey.getColumnNames(); List columns = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { - LogicalColumn col = catalog.getRelSnapshot( namespace.id ).getColumn( ccp.columnId ); + for ( CatalogColumnPlacement ccp : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { + LogicalColumn col = catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumn( ccp.columnId ); columns.add( new DbColumn( col.name, col.type.getName(), @@ -1226,8 +1227,8 @@ void getAvailableSourceColumns( final Context ctx ) throws UnknownTableException UIRequest request = ctx.bodyAsClass( UIRequest.class ); LogicalNamespace namespace = catalog.getSnapshot().getNamespace( request.getSchemaName() ); - LogicalTable table = catalog.getRelSnapshot( namespace.id ).getTable( request.getTableName() ); - ImmutableMap> placements = catalog.getAllocRel( table.namespaceId ).getColumnPlacementsByAdapter( table.id ); + LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( request.getTableName() ); + ImmutableMap> placements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( table.id ); Set adapterIds = placements.keySet(); if ( adapterIds.size() > 1 ) { log.warn( String.format( "The number of sources of an entity should not be > 1 (%s.%s)", request.getSchemaName(), request.getTableName() ) ); @@ -1300,7 +1301,7 @@ void getMaterializedInfo( final Context ctx ) throws UnknownTableException, Unkn private LogicalTable getLogicalTable( String schema, String table ) throws UnknownTableException { LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schema ); - return catalog.getRelSnapshot( namespace.id ).getTable( table ); + return catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( table ); } @@ -1604,7 +1605,7 @@ void getConstraints( final Context ctx ) { // get primary key if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : primaryKey.getColumnNames() ) { if ( !temp.containsKey( "" ) ) { temp.put( "", new ArrayList<>() ); @@ -1618,7 +1619,7 @@ void getConstraints( final Context ctx ) { // get unique constraints. temp.clear(); - List constraints = catalog.getRelSnapshot( catalogTable.namespaceId ).getConstraints( catalogTable.id ); + List constraints = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getConstraints( catalogTable.id ); for ( CatalogConstraint catalogConstraint : constraints ) { if ( catalogConstraint.type == ConstraintType.UNIQUE ) { temp.put( catalogConstraint.name, new ArrayList<>( catalogConstraint.key.getColumnNames() ) ); @@ -1759,7 +1760,7 @@ void getIndexes( final Context ctx ) { Result result; try { LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); - List catalogIndexes = catalog.getRelSnapshot( catalogTable.id ).getIndexes( catalogTable.id, false ); + List catalogIndexes = catalog.getSnapshot().getRelSnapshot( catalogTable.id ).getIndexes( catalogTable.id, false ); DbColumn[] header = { new DbColumn( "Name" ), @@ -1894,9 +1895,9 @@ void getUnderlyingTable( final Context ctx ) throws UnknownTableException { for ( Entry> entry : underlyingTableOriginal.entrySet() ) { List columns = new ArrayList<>(); for ( Long ids : entry.getValue() ) { - columns.add( catalog.getRelSnapshot( catalogTable.namespaceId ).getColumn( ids ).name ); + columns.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( ids ).name ); } - underlyingTable.put( catalog.getRelSnapshot( catalogTable.namespaceId ).getTable( entry.getKey() ).name, columns ); + underlyingTable.put( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTable( entry.getKey() ).name, columns ); } ctx.json( new UnderlyingTables( underlyingTable ) ); } else { @@ -1917,24 +1918,25 @@ void getPlacements( final Context ctx ) { private Placement getPlacements( final Index index ) { String schemaName = index.getSchema(); String tableName = index.getTable(); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); try { LogicalTable table = getLogicalTable( schemaName, tableName ); - Placement p = new Placement( table.partitionProperty.isPartitioned, catalog.getAllocRel( table.namespaceId ).getPartitionGroupNames( table.id ), table.entityType ); + Placement p = new Placement( table.partitionProperty.isPartitioned, snapshot.getAllocSnapshot().getPartitionGroupNames( table.id ), table.entityType ); if ( table.entityType == EntityType.VIEW ) { return p; } else { long pkid = table.primaryKey; - List pkColumnIds = Catalog.getInstance().getRelSnapshot( table.namespaceId ).getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = Catalog.getInstance().getRelSnapshot( table.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = catalog.getAllocRel( table.namespaceId ).getColumnPlacements( pkColumn.id ); + List pkColumnIds = snapshot.getRelSnapshot( table.namespaceId ).getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = snapshot.getRelSnapshot( table.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + List pkPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement placement : pkPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); p.addAdapter( new RelationalStore( adapter.getUniqueName(), adapter.getUniqueName(), - catalog.getAllocRel( table.namespaceId ).getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), - catalog.getAllocRel( table.namespaceId ).getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), + snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), + snapshot.getAllocSnapshot().getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), table.partitionProperty.numPartitionGroups, table.partitionProperty.partitionType ) ); } @@ -2061,7 +2063,7 @@ void getPartitionFunctionModel( final Context ctx ) throws UnknownColumnExceptio LogicalNamespace namespace = Catalog.getInstance().getSnapshot().getNamespace( request.schemaName ); - partitionColumn = Catalog.getInstance().getRelSnapshot( namespace.id ).getColumn( request.tableName, request.column ); + partitionColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( namespace.id ).getColumn( request.tableName, request.column ); if ( !partitionManager.supportsColumnOfType( partitionColumn.type ) ) { ctx.json( new PartitionFunctionModel( "The partition function " + request.method + " does not support columns of type " + partitionColumn.type ) ); @@ -2515,12 +2517,12 @@ void getUml( final Context ctx ) { List catalogEntities = Catalog.getInstance().getSnapshot().getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( request.schema ) ) .stream() .filter( s -> s.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( s -> catalog.getRelSnapshot( s.id ).getTables( null ).stream() ).collect( Collectors.toList() ); + .flatMap( s -> catalog.getSnapshot().getRelSnapshot( s.id ).getTables( null ).stream() ).collect( Collectors.toList() ); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { // get foreign keys - List foreignKeys = catalog.getRelSnapshot( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); + List foreignKeys = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); for ( CatalogForeignKey catalogForeignKey : foreignKeys ) { for ( int i = 0; i < catalogForeignKey.getReferencedKeyColumnNames().size(); i++ ) { fKeys.add( ForeignKey.builder() @@ -2545,14 +2547,14 @@ void getUml( final Context ctx ) { // get primary key with its columns if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey catalogPrimaryKey = catalog.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey catalogPrimaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : catalogPrimaryKey.getColumnNames() ) { table.addPrimaryKeyField( columnName ); } } // get unique constraints - List catalogConstraints = catalog.getRelSnapshot( catalogTable.namespaceId ).getConstraints( catalogTable.id ); + List catalogConstraints = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getConstraints( catalogTable.id ); for ( CatalogConstraint catalogConstraint : catalogConstraints ) { if ( catalogConstraint.type == ConstraintType.UNIQUE ) { // TODO: unique constraints can be over multiple columns. @@ -2566,7 +2568,7 @@ void getUml( final Context ctx ) { } // get unique indexes - List catalogIndexes = catalog.getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, true ); + List catalogIndexes = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, true ); for ( CatalogIndex catalogIndex : catalogIndexes ) { // TODO: unique indexes can be over multiple columns. if ( catalogIndex.key.getColumnNames().size() == 1 && @@ -3198,7 +3200,7 @@ public static Result executeSqlSelect( final Statement statement, final UIReques try { LogicalNamespace namespace = crud.catalog.getSnapshot().getNamespace( t[0] ); - catalogTable = crud.catalog.getRelSnapshot( namespace.id ).getTable( t[1] ); + catalogTable = crud.catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); entityType = catalogTable.entityType; } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); @@ -3232,7 +3234,7 @@ public static Result executeSqlSelect( final Statement statement, final UIReques // Get column default values if ( catalogTable != null ) { try { - LogicalColumn logicalColumn = crud.catalog.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = crud.catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); if ( logicalColumn != null ) { if ( logicalColumn.defaultValue != null ) { dbCol.defaultValue = logicalColumn.defaultValue.value; @@ -3606,7 +3608,7 @@ private Map getCatalogColumns( String schemaName, String Map dataTypes = new HashMap<>(); try { LogicalTable table = getLogicalTable( schemaName, tableName ); - List logicalColumns = catalog.getRelSnapshot( table.namespaceId ).getColumns( table.id ); + List logicalColumns = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( table.id ); for ( LogicalColumn logicalColumn : logicalColumns ) { dataTypes.put( logicalColumn.name, logicalColumn ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java b/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java index b9388b16ee..f4610a946d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java +++ b/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java @@ -27,7 +27,6 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.Util; import org.polypheny.db.webui.models.SortDirection; import org.polypheny.db.webui.models.SortState; import org.polypheny.db.webui.models.UIAlgNode; @@ -85,7 +84,7 @@ private static AlgBuilder buildStep( AlgBuilder builder, final UIAlgNode node ) } switch ( node.type ) { case "Scan": - return builder.scan( Util.tokenize( node.tableName, "." ) ).as( node.tableName ); + return builder.scan( node.tableName ).as( node.tableName ); case "Join": return builder.join( node.join, builder.call( getOperator( node.operator ), builder.field( node.inputCount, field1[0], field1[1] ), builder.field( node.inputCount, field2[0], field2[1] ) ) ); case "Filter": diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index 58ed27dfba..bfd71d6ba4 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -146,7 +146,7 @@ public void onMessage( final WsMessageContext ctx ) { } else {//TableRequest, is equal to UIRequest UIRequest uiRequest = ctx.messageAsClass( UIRequest.class ); try { - LogicalNamespace namespace = Catalog.getInstance().getNamespace( uiRequest.getSchemaName() ); + LogicalNamespace namespace = Catalog.getInstance().getSnapshot().getNamespace( uiRequest.getSchemaName() ); switch ( namespace.namespaceType ) { case RELATIONAL: result = crud.getTable( uiRequest ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 49d96a51ac..ff65eef988 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -192,7 +192,7 @@ public static Result getResult( QueryLanguage language, Statement statement, Que String[] t = request.tableId.split( "\\." ); LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); try { - catalogTable = catalog.getRelSnapshot( namespace.id ).getTable( t[1] ); + catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); } catch ( UnknownTableException e ) { log.error( "Caught exception", e ); } @@ -322,7 +322,7 @@ private Placement getPlacements( final Index index ) { if ( namespaces.size() != 1 ) { throw new RuntimeException(); } - List graphs = catalog.getGraphSnapshot( namespaces.get( 0 ).id ).getGraphs( new Pattern( graphName ) ); + List graphs = catalog.getSnapshot().getGraphSnapshot( namespaces.get( 0 ).id ).getGraphs( new Pattern( graphName ) ); if ( graphs.size() != 1 ) { log.error( "The requested graph does not exist." ); return new Placement( new RuntimeException( "The requested graph does not exist." ) ); @@ -368,7 +368,7 @@ public void getCollectionPlacements( Context context ) { Catalog catalog = Catalog.getInstance(); long namespaceId; namespaceId = catalog.getSnapshot().getNamespace( namespace ).id; - List collections = catalog.getDocSnapshot( namespaceId ).getCollections( new Pattern( collectionName ) ); + List collections = catalog.getSnapshot().getDocSnapshot( namespaceId ).getCollections( new Pattern( collectionName ) ); if ( collections.size() != 1 ) { context.json( new Placement( new UnknownCollectionException( 0 ) ) ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java index 4357c4a0ef..21d006346d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java @@ -29,7 +29,6 @@ import org.apache.commons.lang3.math.NumberUtils; import org.polypheny.db.StatisticsManager; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; @@ -89,8 +88,8 @@ public void getTableStatistics( Context ctx ) { long tableId; long schemaId; try { - schemaId = Catalog.getInstance().getNamespace( request.tableId.split( "\\." )[0] ).id; - tableId = Catalog.getInstance().getLogicalRel( schemaId ).getTable( request.tableId.split( "\\." )[1] ).id; + schemaId = Catalog.getInstance().getSnapshot().getNamespace( request.tableId.split( "\\." )[0] ).id; + tableId = Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ).getTable( request.tableId.split( "\\." )[1] ).id; ctx.json( statisticsManager.getTableStatistic( schemaId, tableId ) ); } catch ( UnknownTableException e ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java index 56570a990a..5a5c699303 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java @@ -66,8 +66,8 @@ public String getQuery( String tableId, Statement statement, HttpServletRequest LogicalColumn logicalColumn; try { LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); - LogicalTable table = catalog.getRelSnapshot( namespace.id ).getTable( split[1] ); - logicalColumn = catalog.getRelSnapshot( table.namespaceId ).getColumn( table.id, entry.getKey() ); + LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( split[1] ); + logicalColumn = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumn( table.id, entry.getKey() ); } catch ( UnknownColumnException | UnknownTableException e ) { log.error( "Could not determine column type", e ); return null; From da34abf12e1b9caaf3bd98d2a74552aea4422cce Mon Sep 17 00:00:00 2001 From: datomo Date: Thu, 9 Mar 2023 23:25:58 +0100 Subject: [PATCH 043/436] added methods for baseSnapshot and LogicalRelSnapshot --- .../org/polypheny/db/adapter/Adapter.java | 2 +- .../org/polypheny/db/catalog/Catalog.java | 6 + .../catalog/snapshot/LogicalDocSnapshot.java | 2 + .../catalog/snapshot/LogicalRelSnapshot.java | 11 +- .../db/catalog/snapshot/Snapshot.java | 46 +----- .../snapshot/impl/LogicalDocSnapshotImpl.java | 10 +- .../impl/LogicalGraphSnapshotImpl.java | 4 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 67 +++++---- .../snapshot/impl/SnapshotBuilder.java | 16 +- .../catalog/snapshot/impl/SnapshotImpl.java | 138 +++++++----------- .../java/org/polypheny/db/schema/Schemas.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 4 +- .../statistics/StatisticColumn.java | 2 +- .../languages/mql2alg/MqlToAlgConverter.java | 6 +- .../org/polypheny/db/catalog/PolyCatalog.java | 6 +- .../polypheny/db/sql/language/SqlUtil.java | 2 +- 16 files changed, 134 insertions(+), 190 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 44830a311e..e832fdfcc8 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -486,7 +486,7 @@ public void addInformationPhysicalNames() { Snapshot snapshot = Catalog.getInstance().getSnapshot(); group.setRefreshFunction( () -> { physicalColumnNames.reset(); - List> physicalsOnAdapter = snapshot.getPhysicalsOnAdapter( adapterId ); + List> physicalsOnAdapter = snapshot.getPhysicalSnapshot().getPhysicalsOnAdapter( adapterId ); for ( PhysicalEntity entity : physicalsOnAdapter ) { if ( entity.namespaceType != NamespaceType.RELATIONAL ) { diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index fd42c64263..c4ccc635c4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -241,4 +241,10 @@ protected final boolean isValidIdentifier( final String str ) { public abstract Snapshot getSnapshot(); + public abstract Map getUsers(); + + public abstract Map getAdapters(); + + public abstract Map getInterfaces(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java index 7459c54260..96a8573fd6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java @@ -50,4 +50,6 @@ public interface LogicalDocSnapshot { List getLogicalCollections( long namespaceId, Pattern name ); + LogicalCollection getCollection( String collection ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 4ad92dfe84..4e9574d303 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -24,7 +24,6 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -227,6 +226,8 @@ public interface LogicalRelSnapshot { */ public abstract CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException; + public abstract List getIndexes(); + /** * Gets a collection of index for the given key. * @@ -278,21 +279,13 @@ public interface LogicalRelSnapshot { */ public abstract CatalogIndex getIndex( long indexId ); - //// LOGICAL ENTITIES - @Deprecated - LogicalTable getLogicalTable( List names ); - LogicalTable getLogicalTable( long id ); LogicalTable getLogicalTable( String name ); - List getLogicalTables( long namespaceId, Pattern name ); - LogicalColumn getLogicalColumn( long id ); - LogicalNamespace getNamespace( long id ); - boolean checkIfExistsEntity( String newName ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index b18804a9c1..619986d952 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.snapshot; import java.util.List; +import javax.annotation.Nullable; import lombok.NonNull; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; @@ -30,10 +31,7 @@ import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownUserException; @@ -62,7 +60,7 @@ default Expression getSnapshotExpression( long id ) { * @param name Pattern for the schema name. null returns all. * @return List of schemas which fit to the specified filter. If there is no schema which meets the criteria, an empty list is returned. */ - public abstract @NonNull List getNamespaces( Pattern name ); + public abstract @NonNull List getNamespaces( @Nullable Pattern name ); /** * Returns the schema with the specified id. @@ -161,14 +159,6 @@ default Expression getSnapshotExpression( long id ) { public abstract CatalogQueryInterface getQueryInterface( long id ); - public abstract List> getAllocationsOnAdapter( long id ); - - - public abstract List> getPhysicalsOnAdapter( long adapterId ); - - - public abstract List getIndexes(); - public abstract List getTablesForPeriodicProcessing(); @@ -176,14 +166,6 @@ default Expression getSnapshotExpression( long id ) { CatalogEntity getEntity( long id ); - CatalogEntity getEntity( long namespaceId, String name ); - - CatalogEntity getEntity( long namespaceId, Pattern name ); - - @Deprecated - CatalogEntity getEntity( List names ); - - //// OTHERS @Override @@ -197,28 +179,6 @@ default List getOperatorList() { } - /** - * Checks if there is a table with the specified name in the specified schema. - * - * @param entityName The name to check for - * @return true if there is a table with this name, false if not. - */ - public abstract boolean checkIfExistsEntity( String entityName ); - - /** - * Checks if there is a table with the specified id. - * - * @param tableId id of the table - * @return true if there is a table with this id, false if not. - */ - public abstract boolean checkIfExistsEntity( long tableId ); - - LogicalNamespace getLogicalNamespace(); - - - LogicalEntity getEntity( String name ); - - public abstract LogicalDocSnapshot getDocSnapshot( long namespaceId ); public abstract LogicalGraphSnapshot getGraphSnapshot( long namespaceId ); @@ -232,4 +192,6 @@ default List getOperatorList() { public abstract AllocSnapshot getAllocSnapshot(); + List getIndexes(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java index 8164959d75..04a5d23430 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java @@ -17,14 +17,14 @@ package org.polypheny.db.catalog.snapshot.impl; import java.util.List; -import org.polypheny.db.catalog.catalogs.LogicalCatalog; +import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; public class LogicalDocSnapshotImpl implements LogicalDocSnapshot { - public LogicalDocSnapshotImpl( LogicalCatalog value ) { + public LogicalDocSnapshotImpl( LogicalDocumentCatalog value ) { } @@ -64,4 +64,10 @@ public List getLogicalCollections( long namespaceId, Pattern return null; } + + @Override + public LogicalCollection getCollection( String collection ) { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java index 7d6d5f767f..d14f46e765 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java @@ -17,14 +17,14 @@ package org.polypheny.db.catalog.snapshot.impl; import java.util.List; -import org.polypheny.db.catalog.catalogs.LogicalCatalog; +import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; public class LogicalGraphSnapshotImpl implements LogicalGraphSnapshot { - public LogicalGraphSnapshotImpl( LogicalCatalog value ) { + public LogicalGraphSnapshotImpl( LogicalGraphCatalog value ) { } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index 8d4420053d..7d7a04c4f3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -38,7 +38,6 @@ import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownConstraintException; import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.Pattern; @@ -63,7 +62,9 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap> tableKeys; - ImmutableMap indexes; + ImmutableMap index; + + ImmutableMap> keyToIndexes; ImmutableMap, LogicalColumn> tableColumnIdColumn; @@ -75,9 +76,9 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { namespace = catalog.getLogicalNamespace(); tables = ImmutableMap.copyOf( catalog.getTables() ); - tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> e.getValue().name, Entry::getValue ) ) ); + tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> namespace.caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase(), Entry::getValue ) ) ); columns = ImmutableMap.copyOf( catalog.getColumns() ); - columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> e.getValue().name, Entry::getValue ) ) ); + columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespace.caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase(), Entry::getValue ) ) ); Map> tableChildren = new HashMap<>(); columns.forEach( ( k, v ) -> { @@ -106,7 +107,17 @@ public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { this.tableColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( tables.get( c.getValue().tableId ).name, c.getValue().name ), Entry::getValue ) ) ); this.tableIdColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( c.getValue().tableId, c.getValue().name ), Entry::getValue ) ) ); - this.indexes = catalog.getIndexes(); + this.index = catalog.getIndexes(); + + Map> keyToIndexes = new HashMap<>(); + this.index.forEach( ( k, v ) -> { + if ( keyToIndexes.containsKey( v.keyId ) ) { + keyToIndexes.get( v.keyId ).add( v ); + } else { + keyToIndexes.put( v.keyId, new ArrayList<>( List.of( v ) ) ); + } + } ); + this.keyToIndexes = ImmutableMap.copyOf( keyToIndexes ); } @@ -257,81 +268,69 @@ public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) th } + @Override + public List getIndexes() { + return index.values().asList(); + } + + @Override public List getIndexes( CatalogKey key ) { - return indexes.get( key.id ); + return keyToIndexes.get( key.id ); } @Override public List getForeignKeys( CatalogKey key ) { - return null; + return keyToIndexes.get( key.id ); } @Override public List getIndexes( long tableId, boolean onlyUnique ) { - return null; + return tableKeys.get( tableId ).stream().flatMap( k -> getIndexes( k ).stream() ).collect( Collectors.toList() ); } @Override - public CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException { - return null; + public CatalogIndex getIndex( long tableId, String indexName ) { + return getIndex().values().stream().filter( i -> i.getKey().tableId == tableId && i.name.equals( indexName ) ).findFirst().orElse( null ); } @Override public boolean checkIfExistsIndex( long tableId, String indexName ) { - return false; + return getIndex( tableId, indexName ) != null; } @Override public CatalogIndex getIndex( long indexId ) { - return null; - } - - - @Override - public LogicalTable getLogicalTable( List names ) { - return null; + return index.get( indexId ); } @Override public LogicalTable getLogicalTable( long id ) { - return null; + return tables.get( id ); } @Override public LogicalTable getLogicalTable( String name ) { - return null; - } - - - @Override - public List getLogicalTables( long namespaceId, Pattern name ) { - return null; + return tableNames.get( name ); } @Override public LogicalColumn getLogicalColumn( long id ) { - return null; - } - - - @Override - public LogicalNamespace getNamespace( long id ) { - return null; + return columns.get( id ); } @Override public boolean checkIfExistsEntity( String newName ) { - return false; + return tableNames.containsKey( newName ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java index e63a8eeb71..12eb1897f3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java @@ -19,9 +19,14 @@ import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.catalogs.AllocationCatalog; import org.polypheny.db.catalog.catalogs.LogicalCatalog; +import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; +import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; +import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; @@ -32,15 +37,16 @@ public class SnapshotBuilder { - public static Snapshot createSnapshot( long id, Map logicalCatalogs, Map allocationCatalogs, Map physicalCatalogs ) { + public static Snapshot createSnapshot( long id, Catalog catalog, Map logicalCatalogs, Map allocationCatalogs, Map physicalCatalogs ) { Map rels = buildRelSnapshots( logicalCatalogs ); Map docs = buildDocSnapshots( logicalCatalogs ); Map graphs = buildGraphSnapshots( logicalCatalogs ); AllocSnapshot alloc = buildAlloc( allocationCatalogs ); PhysicalSnapshot physical = buildPhysical( physicalCatalogs ); + Map namespaces = logicalCatalogs.entrySet().stream().collect( Collectors.toMap( Entry::getKey, e -> e.getValue().getLogicalNamespace() ) ); - return new SnapshotImpl( id, rels, docs, graphs, alloc, physical ); + return new SnapshotImpl( id, catalog, namespaces, rels, docs, graphs, alloc, physical ); } @@ -59,7 +65,7 @@ private static Map buildGraphSnapshots( Map e.getValue().getLogicalNamespace().namespaceType == NamespaceType.GRAPH ) - .collect( Collectors.toMap( Entry::getKey, e -> new LogicalGraphSnapshotImpl( e.getValue() ) ) ); + .collect( Collectors.toMap( Entry::getKey, e -> new LogicalGraphSnapshotImpl( (LogicalGraphCatalog) e.getValue() ) ) ); } @@ -68,7 +74,7 @@ private static Map buildDocSnapshots( Map e.getValue().getLogicalNamespace().namespaceType == NamespaceType.DOCUMENT ) - .collect( Collectors.toMap( Entry::getKey, e -> new LogicalDocSnapshotImpl( e.getValue() ) ) ); + .collect( Collectors.toMap( Entry::getKey, e -> new LogicalDocSnapshotImpl( (LogicalDocumentCatalog) e.getValue() ) ) ); } @@ -77,7 +83,7 @@ private static Map buildRelSnapshots( Map e.getValue().getLogicalNamespace().namespaceType == NamespaceType.RELATIONAL ) - .collect( Collectors.toMap( Entry::getKey, e -> new LogicalRelSnapshotImpl( e.getValue() ) ) ); + .collect( Collectors.toMap( Entry::getKey, e -> new LogicalRelSnapshotImpl( (LogicalRelationalCatalog) e.getValue() ) ) ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index 702db195d9..5235d48ff9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -19,18 +19,19 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import lombok.Getter; import lombok.NonNull; import lombok.Value; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownUserException; @@ -50,120 +51,123 @@ public class SnapshotImpl implements Snapshot { ImmutableMap graphs; AllocSnapshot alloc; PhysicalSnapshot physical; + @Getter long id; + ImmutableMap users; + ImmutableMap userNames; + ImmutableMap interfaces; - public SnapshotImpl( long id, Map relationals, Map documents, Map graphs, AllocSnapshot alloc, PhysicalSnapshot physical ) { + ImmutableMap interfaceNames; + ImmutableMap adapters; + + ImmutableMap adapterNames; + + ImmutableMap namespaces; + + ImmutableMap namespaceNames; + + + public SnapshotImpl( long id, Catalog catalog, Map namespaces, Map relationals, Map documents, Map graphs, AllocSnapshot alloc, PhysicalSnapshot physical ) { this.id = id; this.relationals = ImmutableMap.copyOf( relationals ); this.documents = ImmutableMap.copyOf( documents ); this.graphs = ImmutableMap.copyOf( graphs ); - this.alloc = alloc; + this.namespaces = ImmutableMap.copyOf( namespaces ); - this.physical = physical; - } + this.namespaceNames = ImmutableMap.copyOf( namespaces.values().stream().collect( Collectors.toMap( n -> n.caseSensitive ? n.name : n.name.toLowerCase(), n -> n ) ) ); + this.alloc = alloc; - @Override - public long getId() { - return 0; + this.physical = physical; + this.users = ImmutableMap.copyOf( catalog.getUsers() ); + this.userNames = ImmutableMap.copyOf( users.values().stream().collect( Collectors.toMap( u -> u.name, u -> u ) ) ); + this.interfaces = ImmutableMap.copyOf( catalog.getInterfaces() ); + this.interfaceNames = ImmutableMap.copyOf( interfaces.values().stream().collect( Collectors.toMap( i -> i.name, i -> i ) ) ); + this.adapters = ImmutableMap.copyOf( catalog.getAdapters() ); + this.adapterNames = ImmutableMap.copyOf( adapters.values().stream().collect( Collectors.toMap( a -> a.uniqueName, a -> a ) ) ); } @Override - public @NonNull List getNamespaces( Pattern name ) { - return null; + public @NonNull List getNamespaces( @Nullable Pattern name ) { + if ( name == null ) { + return namespaces.values().asList(); + } + return namespaces.values().stream().filter( n -> n.caseSensitive ? n.name.matches( name.toRegex() ) : n.name.toLowerCase().matches( name.toLowerCase().toRegex() ) ).collect( Collectors.toList() ); } @Override public LogicalNamespace getNamespace( long id ) { - return null; + return namespaces.get( id ); } @Override public LogicalNamespace getNamespace( String name ) { - return null; + return namespaceNames.get( name ); } @Override public boolean checkIfExistsNamespace( String name ) { - return false; + return namespaceNames.containsKey( name ); } @Override public CatalogUser getUser( String name ) throws UnknownUserException { - return null; + return userNames.get( name ); } @Override public CatalogUser getUser( long id ) { - return null; + return users.get( id ); } @Override public List getAdapters() { - return null; + return adapters.values().asList(); } @Override public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { - return null; + return adapterNames.get( uniqueName ); } @Override public CatalogAdapter getAdapter( long id ) { - return null; + return adapters.get( id ); } @Override public boolean checkIfExistsAdapter( long id ) { - return false; + return adapters.containsKey( id ); } @Override public List getQueryInterfaces() { - return null; + return interfaces.values().asList(); } @Override public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { - return null; + return interfaceNames.get( uniqueName ); } @Override public CatalogQueryInterface getQueryInterface( long id ) { - return null; - } - - - @Override - public List> getAllocationsOnAdapter( long id ) { - return null; - } - - - @Override - public List> getPhysicalsOnAdapter( long adapterId ) { - return null; - } - - - @Override - public List getIndexes() { - return null; + return interfaces.get( id ); } @@ -175,49 +179,7 @@ public List getTablesForPeriodicProcessing() { @Override public CatalogEntity getEntity( long id ) { - return null; - } - - - @Override - public CatalogEntity getEntity( long namespaceId, String name ) { - return null; - } - - - @Override - public CatalogEntity getEntity( long namespaceId, Pattern name ) { - return null; - } - - - @Override - public CatalogEntity getEntity( List names ) { - return null; - } - - - @Override - public boolean checkIfExistsEntity( String entityName ) { - return false; - } - - - @Override - public boolean checkIfExistsEntity( long tableId ) { - return false; - } - - - @Override - public LogicalNamespace getLogicalNamespace() { - return null; - } - - - @Override - public LogicalEntity getEntity( String name ) { - return null; + return relationals.values().stream().map( r -> r.getLogicalTable( id ) ).findFirst().orElse( null ); } @@ -250,4 +212,10 @@ public AllocSnapshot getAllocSnapshot() { return alloc; } + + @Override + public List getIndexes() { + return relationals.values().stream().flatMap( r -> r.getIndexes().stream() ).collect( Collectors.toList() ); + } + } diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 40d2d793dc..12c1bda439 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -206,7 +206,7 @@ public static Queryable queryable( DataContext root, Class clazz, Iter */ public static Queryable queryable( DataContext root, Snapshot snapshot, Class clazz, String tableName ) { //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); - LogicalTable table = snapshot.getRelSnapshot( 0 ).getLogicalTable( List.of( tableName ) ); + LogicalTable table = snapshot.getRelSnapshot( 0 ).getLogicalTable( tableName ); return table.unwrap( QueryableEntity.class ).asQueryable( root, snapshot, table.id ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index b11dd87011..22212063ef 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -246,9 +246,9 @@ private void handleSource( DataSource adapter ) { for ( Map.Entry> entry : exportedColumns.entrySet() ) { // Make sure the table name is unique String tableName = entry.getKey(); - if ( catalog.getSnapshot().checkIfExistsEntity( tableName ) ) { // apparently we put them all into 1? + if ( catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).checkIfExistsEntity( tableName ) ) { // apparently we put them all into 1? int i = 0; - while ( catalog.getSnapshot().checkIfExistsEntity( tableName + i ) ) { + while ( catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).checkIfExistsEntity( tableName + i ) ) { i++; } tableName += i; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java index df7954e5f0..36a1cbf843 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java @@ -88,7 +88,7 @@ public StatisticColumn( long schemaId, long tableId, long columnId, PolyType typ LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ); if ( snapshot.getLogicalTable( tableId ) != null ) { - this.schema = snapshot.getNamespace( schemaId ).name; + this.schema = Catalog.getInstance().getSnapshot().getNamespace( schemaId ).name; this.table = snapshot.getTable( tableId ).name; this.column = snapshot.getColumn( columnId ).name; } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 14cba71ae1..db139c1e65 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -63,6 +63,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; @@ -317,9 +318,8 @@ public AlgRoot convert( MqlCollectionStatement query ) { private CatalogEntity getEntity( MqlCollectionStatement query, String dbSchemaName ) { - List names = ImmutableList.of( dbSchemaName, query.getCollection() ); - - return snapshot.getEntity( names ); + LogicalNamespace namespace = snapshot.getNamespace( dbSchemaName ); + return snapshot.getDocSnapshot( namespace.id ).getCollection( query.getCollection() ); /* if ( table == null || table.getEntity() == null ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index b34a98fb5f..e6631feef4 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -87,12 +87,15 @@ public class PolyCatalog extends Catalog implements Serializable { public final Map physicalCatalogs; @Serialize + @Getter public final Map users; @Serialize + @Getter public final Map adapters; @Serialize + @Getter public final Map interfaces; private final IdBuilder idBuilder = IdBuilder.getInstance(); @@ -282,7 +285,7 @@ private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, Strin private void updateSnapshot() { - this.snapshot = SnapshotBuilder.createSnapshot( idBuilder.getNewSnapshotId(), logicalCatalogs, allocationCatalogs, physicalCatalogs ); + this.snapshot = SnapshotBuilder.createSnapshot( idBuilder.getNewSnapshotId(), this, logicalCatalogs, allocationCatalogs, physicalCatalogs ); } @@ -535,7 +538,6 @@ public void clear() { } - @Override public PolyCatalog copy() { return deserialize( serialize(), PolyCatalog.class ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java index 81d58f42db..31fb76fca0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java @@ -608,7 +608,7 @@ public static SqlLiteral symbol( Enum o, ParserPos parserPos ) { public static AlgDataType getNamedType( Identifier node, Snapshot snapshot ) { LogicalNamespace namespace = snapshot.getNamespace( node.getNames().get( 0 ) ); - LogicalTable table = snapshot.getRelSnapshot( namespace.id ).getLogicalTable( node.getNames() ); + LogicalTable table = snapshot.getRelSnapshot( namespace.id ).getLogicalTable( node.getNames().get( 1 ) ); if ( table != null ) { return table.getRowType(); } else { From 007131fc9f648d5736e6c4f78f79afda3359eaef Mon Sep 17 00:00:00 2001 From: datomo Date: Fri, 10 Mar 2023 15:18:18 +0100 Subject: [PATCH 044/436] removing redundant values from entities --- .../org/polypheny/db/StatisticsManager.java | 2 +- .../org/polypheny/db/adapter/Adapter.java | 9 +- .../org/polypheny/db/adapter/index/Index.java | 2 +- .../db/adapter/java/ReflectiveSchema.java | 3 +- .../db/algebra/core/common/Scan.java | 3 + .../db/algebra/core/relational/RelScan.java | 7 - .../common/LogicalConstraintEnforcer.java | 6 +- .../algebra/rules/LoptSemiJoinOptimizer.java | 3 +- .../org/polypheny/db/catalog/Catalog.java | 17 +- .../catalogs/LogicalRelationalCatalog.java | 19 +- .../db/catalog/entity/CatalogConstraint.java | 30 +- .../db/catalog/entity/CatalogIndex.java | 2 + .../entity/CatalogMaterializedView.java | 18 -- .../db/catalog/entity/CatalogPrimaryKey.java | 17 +- .../db/catalog/entity/CatalogView.java | 25 +- .../entity/allocation/AllocationEntity.java | 4 +- .../entity/logical/LogicalCollection.java | 28 +- .../catalog/entity/logical/LogicalColumn.java | 2 + .../catalog/entity/logical/LogicalEntity.java | 10 - .../catalog/entity/logical/LogicalGraph.java | 26 +- .../catalog/entity/logical/LogicalTable.java | 52 +--- .../entity/physical/PhysicalTable.java | 2 +- .../catalog/snapshot/LogicalRelSnapshot.java | 15 +- .../db/catalog/snapshot/Snapshot.java | 3 + .../snapshot/impl/LogicalRelSnapshotImpl.java | 119 ++++---- .../catalog/snapshot/impl/SnapshotImpl.java | 31 ++- .../polypheny/db/interpreter/ScanNode.java | 6 +- .../db/monitoring/events/StatementEvent.java | 2 +- .../db/prepare/AlgOptEntityImpl.java | 6 +- .../processing/LogicalAlgAnalyzeShuttle.java | 21 +- .../polypheny/db/rex/RexTableInputRef.java | 5 +- .../java/org/polypheny/db/schema/Schemas.java | 2 +- .../org/polypheny/db/tools/AlgBuilder.java | 2 +- .../db/view/MaterializedViewManager.java | 11 +- .../org/polypheny/db/catalog/MockCatalog.java | 13 - .../org/polypheny/db/ddl/DdlManagerImpl.java | 12 +- .../db/processing/AbstractQueryProcessor.java | 14 +- .../processing/ConstraintEnforceAttacher.java | 2 +- .../db/routing/UiRoutingPageUtil.java | 2 +- .../db/routing/routers/AbstractDqlRouter.java | 2 +- .../db/routing/routers/BaseRouter.java | 46 ++-- .../db/routing/routers/DmlRouterImpl.java | 2 +- .../routers/FullPlacementQueryRouter.java | 4 +- .../db/view/MaterializedViewManagerImpl.java | 25 +- .../events/metrics/DmlDataPoint.java | 2 +- .../statistics/StatisticColumn.java | 2 +- .../statistics/StatisticRepository.java | 10 +- .../monitoring/statistics/StatisticTable.java | 6 +- .../statistics/StatisticsManagerImpl.java | 77 +++--- .../java/org/polypheny/db/cql/TableIndex.java | 15 +- .../org/polypheny/db/catalog/IdBuilder.java | 12 +- .../org/polypheny/db/catalog/PolyCatalog.java | 95 ++----- .../db/catalog/logical/RelationalCatalog.java | 258 ++++++++++++++++-- .../polypheny/db/restapi/RequestParser.java | 15 +- .../java/org/polypheny/db/restapi/Rest.java | 13 +- .../polypheny/db/sql/SqlProcessorImpl.java | 7 +- .../org/polypheny/db/sql/language/SqlDdl.java | 7 +- .../polypheny/db/sql/language/SqlUtil.java | 2 +- .../db/sql/language/ddl/SqlCreateTable.java | 2 +- .../db/sql/language/validate/EmptyScope.java | 6 +- .../db/sql/language/validate/ListScope.java | 4 +- .../language/validate/SqlValidatorImpl.java | 4 +- .../language/validate/SqlValidatorUtil.java | 4 +- .../db/sql/language/validate/WithScope.java | 4 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 4 +- .../java/org/polypheny/db/webui/Crud.java | 108 ++++---- .../polypheny/db/webui/crud/LanguageCrud.java | 7 +- .../db/webui/crud/StatisticCrud.java | 14 +- .../models/requests/BatchUpdateRequest.java | 3 +- 69 files changed, 656 insertions(+), 657 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/StatisticsManager.java b/core/src/main/java/org/polypheny/db/StatisticsManager.java index 546e374127..bf557ffb9d 100644 --- a/core/src/main/java/org/polypheny/db/StatisticsManager.java +++ b/core/src/main/java/org/polypheny/db/StatisticsManager.java @@ -50,7 +50,7 @@ public static StatisticsManager getInstance() { public abstract void tablesToUpdate( long tableId ); // Use cache if possible - public abstract void tablesToUpdate( long tableId, Map> changedValues, String type, long schemaId ); + public abstract void tablesToUpdate( long tableId, Map> changedValues, String type, long schemaId ); protected abstract void reevaluateTable( long tableId ); diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index e832fdfcc8..4297069614 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -55,6 +55,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; @@ -493,13 +494,13 @@ public void addInformationPhysicalNames() { continue; } PhysicalTable physicalTable = (PhysicalTable) entity; - int i = 0; + LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( physicalTable.namespaceId ); + for ( long columnId : physicalTable.columnIds ) { physicalColumnNames.addRow( columnId, - physicalTable.logical.getColumnNames().get( i ), - physicalTable.namespaceName + "." + physicalTable.name + "." + physicalTable.getColumnNames().get( i ) ); - i++; + relSnapshot.getColumn( columnId ), + physicalTable.namespaceName + "." + physicalTable.name + "." + relSnapshot.getColumn( columnId ) ); } } } ); diff --git a/core/src/main/java/org/polypheny/db/adapter/index/Index.java b/core/src/main/java/org/polypheny/db/adapter/index/Index.java index 281eb60b29..6abd615d45 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/Index.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/Index.java @@ -93,7 +93,7 @@ public void rebuild( final Transaction transaction ) { cols.addAll( targetColumns ); } final AlgNode scan = builder - .scan( ImmutableList.of( table.getNamespaceName(), table.name ) ) + .scan( table ) .project( cols.stream().map( builder::field ).collect( Collectors.toList() ) ) .build(); final QueryProcessor processor = statement.getQueryProcessor(); diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index 5d81c9354d..2c23838aa1 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -34,7 +34,6 @@ package org.polypheny.db.adapter.java; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.Multimap; @@ -248,7 +247,7 @@ private static class ReflectiveEntity extends LogicalTable implements ScannableE ReflectiveEntity( Type elementType, Enumerable enumerable, Long id, Long partitionId, Long adapterId ) { //super( elementType, id, partitionId, adapterId ); - super( id, "test", List.of(), -1, "", EntityType.ENTITY, null, ImmutableList.of(), false, null, List.of() ); + super( id, "test", -1, EntityType.ENTITY, null, false ); this.elementType = elementType; this.enumerable = enumerable; throw new NotImplementedException(); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/Scan.java b/core/src/main/java/org/polypheny/db/algebra/core/common/Scan.java index 00f3326ce6..fdd66d892c 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/common/Scan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/Scan.java @@ -16,6 +16,7 @@ package org.polypheny.db.algebra.core.common; +import lombok.Getter; import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.plan.AlgOptCluster; @@ -23,6 +24,7 @@ public abstract class Scan extends AbstractAlgNode { + @Getter public final E entity; @@ -37,4 +39,5 @@ public Scan( AlgOptCluster cluster, AlgTraitSet traitSet, E entity ) { this.entity = entity; } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java index 8acc558d7d..fff15fe66f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java @@ -47,15 +47,8 @@ */ public abstract class RelScan extends Scan { - /** - * The entity definition. - */ - public final E entity; - - protected RelScan( AlgOptCluster cluster, AlgTraitSet traitSet, @NonNull E entity ) { super( cluster, traitSet, entity ); - this.entity = entity; } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index 7a862d81fd..a37e661994 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -163,8 +163,8 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { for ( final CatalogForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { builder.clear(); - final LogicalTable scanOptTable = snapshot.getLogicalTable( foreignKey.tableId ); - final LogicalTable refOptTable = snapshot.getLogicalTable( foreignKey.referencedKeyTableId ); + final LogicalTable scanOptTable = snapshot.getTable( foreignKey.tableId ); + final LogicalTable refOptTable = snapshot.getTable( foreignKey.referencedKeyTableId ); final AlgNode scan = LogicalRelScan.create( modify.getCluster(), scanOptTable ); final LogicalRelScan ref = LogicalRelScan.create( modify.getCluster(), refOptTable ); @@ -259,7 +259,7 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s //builder.scan( table.getSchemaName(), table.name ); for ( CatalogConstraint constraint : constraints ) { builder.clear(); - builder.scan( table.getNamespaceName(), table.name );//LogicalTableScan.create( modify.getCluster(), modify.getLogicalTable() ); + builder.scan( table );//LogicalTableScan.create( modify.getCluster(), modify.getTable() ); // Enforce uniqueness between the already existing values and the new values List keys = constraint.key .getColumnNames() diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java index e428051e31..132d8ad723 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java @@ -55,7 +55,6 @@ import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexCall; @@ -677,7 +676,7 @@ protected LcsEntity( CatalogEntityBuilder b ) { /** * Dummy class to allow code to compile. */ - private static class LcsScan { + static class LcsScan { } diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index c4ccc635c4..b7012f999c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -31,9 +31,10 @@ import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; -import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.entity.CatalogQueryInterface; +import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; @@ -90,15 +91,9 @@ public static Catalog getInstance() { public abstract AllocationGraphCatalog getAllocGraph( long namespaceId ); - // move to Snapshot - public abstract LogicalEntity getLogicalEntity( String entityName ); - - public abstract LogicalEntity getLogicalEntity( long id ); public abstract PhysicalCatalog getPhysical( long namespaceId ); - public abstract PhysicalEntity getPhysicalEntity( long id ); - public abstract Map getNodeInfo(); @@ -241,10 +236,10 @@ protected final boolean isValidIdentifier( final String str ) { public abstract Snapshot getSnapshot(); - public abstract Map getUsers(); + public abstract Map getUsers(); - public abstract Map getAdapters(); + public abstract Map getAdapters(); - public abstract Map getInterfaces(); + public abstract Map getInterfaces(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index cfa5e6fbb2..4f65bbc313 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -16,12 +16,12 @@ package org.polypheny.db.catalog.catalogs; -import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogView; @@ -98,14 +98,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract void deleteTable( long tableId ); - /** - * Change owner of a table - * - * @param tableId The if of the table - * @param ownerId ID of the new owner - */ - public abstract void setTableOwner( long tableId, long ownerId ); - /** * Set the primary key of a table * @@ -315,15 +307,16 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { */ public abstract void deleteIndex( long indexId ); - ImmutableMap getTables(); + Map getTables(); - ImmutableMap getColumns(); + Map getColumns(); LogicalNamespace getLogicalNamespace(); - ImmutableMap getIndexes(); + Map getIndexes(); - ImmutableMap getKeys(); + Map getKeys(); + Map getConstraints(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java index 52a2b75f4b..4387a4cb66 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java @@ -17,29 +17,37 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.NonNull; +import lombok.Value; import org.polypheny.db.catalog.logistic.ConstraintType; @EqualsAndHashCode +@Value public class CatalogConstraint implements Serializable { - public final long id; - public final long keyId; - public final ConstraintType type; - public final String name; - - public final CatalogKey key; + @Serialize + public long id; + @Serialize + public long keyId; + @Serialize + public ConstraintType type; + @Serialize + public String name; + @Serialize + public CatalogKey key; public CatalogConstraint( - final long id, - final long keyId, - @NonNull final ConstraintType constraintType, - final String name, - final CatalogKey key ) { + @Deserialize("id") final long id, + @Deserialize("keyId") final long keyId, + @Deserialize("type") @NonNull final ConstraintType constraintType, + @Deserialize("name") final String name, + @Deserialize("key") final CatalogKey key ) { this.id = id; this.keyId = keyId; this.type = constraintType; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java index 398b413750..081a668f05 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java @@ -26,11 +26,13 @@ import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.Value; +import lombok.experimental.SuperBuilder; import org.polypheny.db.catalog.logistic.IndexType; @EqualsAndHashCode(callSuper = false) @Value +@SuperBuilder(toBuilder = true) public class CatalogIndex implements Serializable { private static final long serialVersionUID = -318228681682792406L; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java index 1843fbb39c..7caf971e0a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java @@ -16,20 +16,14 @@ package org.polypheny.db.catalog.entity; -import java.util.List; -import java.util.Map; import lombok.EqualsAndHashCode; -import lombok.NonNull; import lombok.Value; -import lombok.With; import lombok.experimental.SuperBuilder; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.partition.properties.PartitionProperty; @EqualsAndHashCode(callSuper = true) @SuperBuilder(toBuilder = true) @@ -52,18 +46,12 @@ public class CatalogMaterializedView extends CatalogView { public CatalogMaterializedView( long id, String name, - List columns, long namespaceId, - String namespaceName, EntityType entityType, String query, Long primaryKey, - @NonNull List dataPlacements, boolean modifiable, - PartitionProperty partitionProperty, AlgCollation algCollation, - List connectedViews, - Map> underlyingTables, String language, MaterializedCriteria materializedCriteria, boolean ordered @@ -71,18 +59,12 @@ public CatalogMaterializedView( super( id, name, - namespaceName, - columns, namespaceId, entityType, query, primaryKey, - dataPlacements, modifiable, - partitionProperty, algCollation, - connectedViews, - underlyingTables, language ); this.query = query; this.algCollation = algCollation; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java index 44f146bf3e..31424c4d7b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java @@ -17,6 +17,8 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.LinkedList; import java.util.List; @@ -29,14 +31,19 @@ @EqualsAndHashCode(callSuper = true) public final class CatalogPrimaryKey extends CatalogKey { + @Serialize + private final CatalogKey key; - public CatalogPrimaryKey( @NonNull final CatalogKey catalogKey ) { + + public CatalogPrimaryKey( @Deserialize("key") @NonNull final CatalogKey key ) { super( - catalogKey.id, - catalogKey.tableId, - catalogKey.namespaceId, - catalogKey.columnIds, + key.id, + key.tableId, + key.namespaceId, + key.columnIds, EnforcementTime.ON_QUERY ); + + this.key = key; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index 50a8602c25..d2c29068c4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -17,15 +17,8 @@ package org.polypheny.db.catalog.entity; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Value; -import lombok.With; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; import org.polypheny.db.algebra.AbstractAlgNode; @@ -35,12 +28,9 @@ import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.view.ViewManager.ViewVisitor; @@ -52,7 +42,6 @@ public class CatalogView extends LogicalTable { private static final long serialVersionUID = -4771308114962700515L; - public ImmutableMap> underlyingTables; public String language; public AlgCollation algCollation; public String query; @@ -61,34 +50,22 @@ public class CatalogView extends LogicalTable { public CatalogView( long id, String name, - String namespaceName, - List columns, long namespaceId, EntityType entityType, String query, Long primaryKey, - List dataPlacements, boolean modifiable, - PartitionProperty partitionProperty, AlgCollation algCollation, - List connectedViews, - Map> underlyingTables, String language ) { super( id, name, - columns, namespaceId, - namespaceName, entityType, primaryKey, - dataPlacements, - modifiable, - partitionProperty, - connectedViews ); + modifiable ); this.query = query; this.algCollation = algCollation; - this.underlyingTables = ImmutableMap.copyOf( underlyingTables.entrySet().stream().collect( Collectors.toMap( Entry::getKey, t -> ImmutableList.copyOf( t.getValue() ) ) ) ); // mapdb cannot handle the class QueryLanguage, therefore we use the String here this.language = language; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java index cd78db8ef1..455e5d71c7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -16,10 +16,8 @@ package org.polypheny.db.catalog.entity.allocation; -import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Value; -import lombok.With; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; import org.polypheny.db.catalog.entity.logical.LogicalEntity; @@ -37,7 +35,7 @@ public abstract class AllocationEntity extends LogicalE protected AllocationEntity( L logical, long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( id, name, logical.namespaceId, logical.namespaceName, type, namespaceType ); + super( id, name, logical.namespaceId, type, namespaceType ); this.adapterId = adapterId; this.logical = logical; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index e2bacf4dc8..8ee5c89476 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -16,17 +16,11 @@ package org.polypheny.db.catalog.entity.logical; -import com.google.common.collect.ImmutableList; import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Getter; -import lombok.NonNull; import lombok.Value; -import lombok.With; +import lombok.experimental.SuperBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; @@ -36,14 +30,13 @@ @EqualsAndHashCode(callSuper = true) @Value -//@With +@SuperBuilder(toBuilder = true) public class LogicalCollection extends LogicalEntity implements CatalogObject { private static final long serialVersionUID = -6490762948368178584L; @Getter public long id; - public ImmutableList placements; public String name; public long namespaceId; public EntityType entityType; @@ -52,17 +45,14 @@ public class LogicalCollection extends LogicalEntity implements CatalogObject { public LogicalCollection( long namespaceId, - String namespaceName, long id, String name, - @NonNull Collection placements, EntityType type, String physicalName ) { - super( id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.DOCUMENT ); + super( id, name, namespaceId, EntityType.ENTITY, NamespaceType.DOCUMENT ); this.id = id; this.namespaceId = namespaceId; this.name = name; - this.placements = ImmutableList.copyOf( placements ); this.entityType = type; this.physicalName = physicalName; } @@ -74,18 +64,6 @@ public Serializable[] getParameterArray() { } - public LogicalCollection addPlacement( Long adapterId ) { - List placements = new ArrayList<>( this.placements ); - placements.add( adapterId ); - return new LogicalCollection( id, name, namespaceId, namespaceName, placements, EntityType.ENTITY, physicalName ); - } - - - public LogicalCollection removePlacement( long adapterId ) { - List placements = this.placements.stream().filter( id -> id != adapterId ).collect( Collectors.toList() ); - return new LogicalCollection( id, name, namespaceId, namespaceName, placements, EntityType.ENTITY, physicalName ); - } - @Override public Expression asExpression() { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java index d31487738d..bc715757ca 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java @@ -25,6 +25,7 @@ import lombok.SneakyThrows; import lombok.Value; import lombok.experimental.NonFinal; +import lombok.experimental.SuperBuilder; import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -37,6 +38,7 @@ @EqualsAndHashCode() @Value +@SuperBuilder(toBuilder = true) @NonFinal public class LogicalColumn implements CatalogObject, Comparable { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java index 953034a73f..17ea29b78d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java @@ -16,12 +16,7 @@ package org.polypheny.db.catalog.entity.logical; -import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; -import io.activej.serializer.annotations.SerializeClass; -import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; -import lombok.NoArgsConstructor; import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; @@ -35,19 +30,14 @@ @NonFinal public abstract class LogicalEntity extends CatalogEntity { - @Serialize - public String namespaceName; - public LogicalEntity( long id, String name, long namespaceId, - String namespaceName, EntityType type, NamespaceType namespaceType ) { super( id, name, namespaceId, type, namespaceType ); - this.namespaceName = namespaceName; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index 9196c2c0d6..3686820354 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -17,14 +17,8 @@ package org.polypheny.db.catalog.entity.logical; import com.drew.lang.annotations.NotNull; -import com.google.common.collect.ImmutableList; import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.stream.Collectors; import lombok.EqualsAndHashCode; -import lombok.NonNull; import lombok.Value; import lombok.experimental.SuperBuilder; import org.apache.calcite.linq4j.tree.Expression; @@ -39,25 +33,22 @@ public class LogicalGraph extends LogicalEntity implements Comparable { private static final long serialVersionUID = 7343856827901459672L; - - public ImmutableList placements; public int ownerId; public boolean modifiable; public boolean caseSensitive; - public LogicalGraph( long id, String name, long namespaceId, String namespaceName, int ownerId, boolean modifiable, @NonNull Collection placements, boolean caseSensitive ) { - super( id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.GRAPH ); + public LogicalGraph( long id, String name, long namespaceId, int ownerId, boolean modifiable, boolean caseSensitive ) { + super( id, name, namespaceId, EntityType.ENTITY, NamespaceType.GRAPH ); this.ownerId = ownerId; this.modifiable = modifiable; - this.placements = ImmutableList.copyOf( placements ); this.caseSensitive = caseSensitive; } public LogicalGraph( LogicalGraph graph ) { - this( graph.id, graph.name, graph.namespaceId, graph.namespaceName, graph.ownerId, graph.modifiable, graph.placements, graph.caseSensitive ); + this( graph.id, graph.name, graph.namespaceId, graph.ownerId, graph.modifiable, graph.caseSensitive ); } @@ -76,17 +67,6 @@ public int compareTo( @NotNull LogicalGraph o ) { } - public LogicalGraph addPlacement( long adapterId ) { - List placements = new ArrayList<>( this.placements ); - placements.add( adapterId ); - return toBuilder().placements( ImmutableList.copyOf( placements ) ).build(); - } - - - public LogicalGraph removePlacement( int adapterId ) { - return toBuilder().placements( ImmutableList.copyOf( placements.stream().filter( i -> i != adapterId ).collect( Collectors.toList() ) ) ).build(); - } - @Override public Expression asExpression() { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 7e48d88666..9e19c3be32 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -17,13 +17,10 @@ package org.polypheny.db.catalog.entity.logical; -import com.google.common.collect.ImmutableList; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.io.Serializable; -import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.RequiredArgsConstructor; @@ -37,7 +34,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.schema.ColumnStrategy; @EqualsAndHashCode(callSuper = false) @@ -47,9 +43,6 @@ public class LogicalTable extends LogicalEntity implements Comparable columns; - @Serialize public Long primaryKey; @@ -57,61 +50,30 @@ public class LogicalTable extends LogicalEntity implements Comparable dataPlacements; - - @Serialize - public ImmutableList connectedViews; - public LogicalTable( @Deserialize("id") final long id, @Deserialize("name") @NonNull final String name, - @Deserialize("columns") final List columns, @Deserialize("namespaceId") final long namespaceId, - @Deserialize("namespaceName") final String namespaceName, @Deserialize("entityType") @NonNull final EntityType type, @Deserialize("primaryKey") final Long primaryKey, - @Deserialize("dataPlacements") @NonNull final List dataPlacements, - @Deserialize("modifiable") boolean modifiable, - @Deserialize("partitionProperty") PartitionProperty partitionProperty, - @Deserialize("connectedViews") List connectedViews ) { - super( id, name, namespaceId, namespaceName, type, NamespaceType.RELATIONAL ); - this.columns = ImmutableList.copyOf( columns ); + @Deserialize("modifiable") boolean modifiable ) { + super( id, name, namespaceId, type, NamespaceType.RELATIONAL ); this.primaryKey = primaryKey; this.modifiable = modifiable; - this.partitionProperty = partitionProperty; - - this.connectedViews = ImmutableList.copyOf( connectedViews ); - - this.dataPlacements = ImmutableList.copyOf( dataPlacements ); - if ( type == EntityType.ENTITY && !modifiable ) { throw new RuntimeException( "Tables of table type TABLE must be modifiable!" ); } } - public List getColumnNames() { - return columns.stream().map( c -> c.name ).collect( Collectors.toList() ); - } - - - public List getColumnIds() { - return columns.stream().map( c -> c.id ).collect( Collectors.toList() ); - } - // Used for creating ResultSets @Override public Serializable[] getParameterArray() { return new Serializable[]{ - getNamespaceName(), name, entityType.name(), "", @@ -143,7 +105,7 @@ public int compareTo( LogicalTable o ) { public AlgDataType getRowType() { final AlgDataTypeFactory.Builder fieldInfo = AlgDataTypeFactory.DEFAULT.builder(); - for ( LogicalColumn column : columns ) { + for ( LogicalColumn column : Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumns( id ) ) { AlgDataType sqlType = column.getAlgDataType( AlgDataTypeFactory.DEFAULT ); fieldInfo.add( column.name, null, sqlType ).nullable( column.nullable ); } @@ -154,7 +116,7 @@ public AlgDataType getRowType() { @Override public Expression asExpression() { - return Expressions.call( Expressions.call( Catalog.class, "getInstance" ), "getLogicalTable", Expressions.constant( id ) ); + return Expressions.call( Expressions.call( Catalog.class, "getInstance" ), "getTable", Expressions.constant( id ) ); } @@ -168,12 +130,6 @@ public List getConnectedViews() { } - public LogicalTable withAddedColumn( LogicalColumn column ) { - List columns = new ArrayList<>( this.columns ); - columns.add( column ); - return toBuilder().columns( ImmutableList.copyOf( columns ) ).build(); - } - @RequiredArgsConstructor public static class PrimitiveCatalogTable { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 83574a65f8..934677ce3e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -78,7 +78,7 @@ public AlgProtoDataType buildProto() { final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); for ( CatalogColumnPlacement placement : placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getLogicalColumn( placement.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumn( placement.columnId ); AlgDataType sqlType = logicalColumn.getAlgDataType( typeFactory ); fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 4e9574d303..2fb41fce50 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -46,14 +46,6 @@ public interface LogicalRelSnapshot { */ public abstract List getTables( @Nullable Pattern name ); - /** - * Returns the table with the given id - * - * @param tableId The id of the table - * @return The table - */ - public abstract LogicalTable getTable( long tableId ); - /** * Returns the table with the given name in the specified schema. * @@ -61,7 +53,7 @@ public interface LogicalRelSnapshot { * @return The table * @throws UnknownTableException If there is no table with this name in the specified database and schema. */ - public abstract LogicalTable getTable( String tableName ) throws UnknownTableException; + public abstract LogicalTable getTable( String tableName ); /** @@ -280,11 +272,8 @@ public interface LogicalRelSnapshot { public abstract CatalogIndex getIndex( long indexId ); - LogicalTable getLogicalTable( long id ); - - LogicalTable getLogicalTable( String name ); + LogicalTable getTable( long id ); - LogicalColumn getLogicalColumn( long id ); boolean checkIfExistsEntity( String newName ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index 619986d952..7d9924eb4d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -31,6 +31,7 @@ import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; @@ -194,4 +195,6 @@ default List getOperatorList() { List getIndexes(); + LogicalEntity getLogicalEntity( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index 7d7a04c4f3..72f09d49b9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -24,7 +24,6 @@ import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.Value; -import org.apache.commons.lang3.NotImplementedException; import org.jetbrains.annotations.Nullable; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; @@ -56,7 +55,7 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap> tableColumns; ImmutableMap columns; - ImmutableMap columnNames; + ImmutableMap, LogicalColumn> columnNames; ImmutableMap keys; @@ -64,6 +63,11 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap index; + ImmutableMap constraints; + + ImmutableMap foreignKeys; + ImmutableMap primaryKeys; + ImmutableMap> keyToIndexes; ImmutableMap, LogicalColumn> tableColumnIdColumn; @@ -71,54 +75,84 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap, LogicalColumn> tableColumnNameColumn; ImmutableMap, LogicalColumn> tableIdColumnNameColumn; + ImmutableMap> tableConstraints; + ImmutableMap> tableForeignKeys; public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { namespace = catalog.getLogicalNamespace(); + tables = ImmutableMap.copyOf( catalog.getTables() ); tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> namespace.caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase(), Entry::getValue ) ) ); + columns = ImmutableMap.copyOf( catalog.getColumns() ); - columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespace.caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase(), Entry::getValue ) ) ); + columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespace.caseSensitive ? Pair.of( e.getValue().tableId, e.getValue().name ) : Pair.of( e.getValue().tableId, e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); + + //// tables Map> tableChildren = new HashMap<>(); columns.forEach( ( k, v ) -> { - if ( tableChildren.containsKey( v.tableId ) ) { - tableChildren.get( v.tableId ).add( v ); - } else { - tableChildren.put( v.tableId, new ArrayList<>( List.of( v ) ) ); + if ( !tableChildren.containsKey( v.tableId ) ) { + tableChildren.put( v.tableId, new ArrayList<>() ); } + tableChildren.get( v.tableId ).add( v ); } ); this.tableColumns = ImmutableMap.copyOf( tableChildren ); - keys = catalog.getKeys(); + this.tableColumnIdColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( c.getValue().tableId, c.getValue().id ), Entry::getValue ) ) ); + this.tableColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( tables.get( c.getValue().tableId ).name, c.getValue().name ), Entry::getValue ) ) ); + this.tableIdColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( c.getValue().tableId, c.getValue().name ), Entry::getValue ) ) ); + + //// KEYS + + keys = ImmutableMap.copyOf( catalog.getKeys() ); Map> tableKeys = new HashMap<>(); keys.forEach( ( k, v ) -> { - if ( tableKeys.containsKey( v.tableId ) ) { - tableKeys.get( v.tableId ).add( v ); - } else { - tableKeys.put( v.tableId, new ArrayList<>( List.of( v ) ) ); + if ( !tableKeys.containsKey( v.tableId ) ) { + tableKeys.put( v.tableId, new ArrayList<>() ); } + tableKeys.get( v.tableId ).add( v ); } ); this.tableKeys = ImmutableMap.copyOf( tableKeys ); - this.tableColumnIdColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( c.getValue().tableId, c.getValue().id ), Entry::getValue ) ) ); - this.tableColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( tables.get( c.getValue().tableId ).name, c.getValue().name ), Entry::getValue ) ) ); - this.tableIdColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( c.getValue().tableId, c.getValue().name ), Entry::getValue ) ) ); - - this.index = catalog.getIndexes(); + this.index = ImmutableMap.copyOf( catalog.getIndexes() ); Map> keyToIndexes = new HashMap<>(); this.index.forEach( ( k, v ) -> { - if ( keyToIndexes.containsKey( v.keyId ) ) { - keyToIndexes.get( v.keyId ).add( v ); - } else { - keyToIndexes.put( v.keyId, new ArrayList<>( List.of( v ) ) ); + if ( !keyToIndexes.containsKey( v.keyId ) ) { + keyToIndexes.put( v.keyId, new ArrayList<>() ); } + keyToIndexes.get( v.keyId ).add( v ); } ); this.keyToIndexes = ImmutableMap.copyOf( keyToIndexes ); + this.foreignKeys = ImmutableMap.copyOf( keys.entrySet().stream().filter( f -> f.getValue() instanceof CatalogForeignKey ).collect( Collectors.toMap( Entry::getKey, e -> (CatalogForeignKey) e.getValue() ) ) ); + + HashMap> tableForeignKeys = new HashMap<>(); + foreignKeys.forEach( ( k, v ) -> { + if ( !tableForeignKeys.containsKey( v.tableId ) ) { + tableForeignKeys.put( v.tableId, new ArrayList<>() ); + } + tableForeignKeys.get( v.tableId ).add( v ); + } ); + this.tableForeignKeys = ImmutableMap.copyOf( tableForeignKeys ); + + this.primaryKeys = ImmutableMap.copyOf( keys.entrySet().stream().filter( f -> f.getValue() instanceof CatalogPrimaryKey ).collect( Collectors.toMap( Entry::getKey, e -> (CatalogPrimaryKey) e.getValue() ) ) ); + + //// CONSTRAINTS + + this.constraints = ImmutableMap.copyOf( catalog.getConstraints() ); + + HashMap> tableConstraints = new HashMap<>(); + constraints.forEach( ( k, v ) -> { + if ( !tableConstraints.containsKey( v.key.tableId ) ) { + tableConstraints.put( v.key.tableId, new ArrayList<>() ); + } + tableConstraints.get( v.key.tableId ).add( v ); + } ); + this.tableConstraints = ImmutableMap.copyOf( tableConstraints ); } @@ -137,11 +171,6 @@ public LogicalTable getTable( long tableId ) { } - @Override - public LogicalTable getTable( String tableName ) throws UnknownTableException { - return tableNames.get( tableName ); - } - @Override public List getKeys() { @@ -178,11 +207,6 @@ public List getColumns( @Nullable Pattern tableName, @Nullable Pa } - @Override - public LogicalColumn getColumn( long columnId ) { - return columns.get( columnId ); - } - @Override public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { @@ -204,67 +228,68 @@ public boolean checkIfExistsColumn( long tableId, String columnName ) { @Override public CatalogPrimaryKey getPrimaryKey( long key ) { - return (CatalogPrimaryKey) keys.get( key ); + return primaryKeys.get( key ); } @Override public boolean isPrimaryKey( long keyId ) { - throw new NotImplementedException(); + return primaryKeys.containsKey( keyId ); } @Override public boolean isForeignKey( long keyId ) { - throw new NotImplementedException(); + return foreignKeys.containsKey( keyId ); } @Override public boolean isIndex( long keyId ) { - throw new NotImplementedException(); + return index.containsKey( keyId ); } @Override public boolean isConstraint( long keyId ) { - throw new NotImplementedException(); + return constraints.containsKey( keyId ); } @Override public List getForeignKeys( long tableId ) { - throw new NotImplementedException(); + return tableKeys.get( tableId ).stream().filter( k -> isForeignKey( k.id ) ).map( f -> (CatalogForeignKey) f ).collect( Collectors.toList() ); } @Override public List getExportedKeys( long tableId ) { - throw new NotImplementedException(); + return foreignKeys.values().stream().filter( k -> k.referencedKeyTableId == tableId ).collect( Collectors.toList() ); } @Override public List getConstraints( long tableId ) { - throw new NotImplementedException(); + List keysOfTable = getTableKeys( tableId ).stream().map( t -> t.id ).collect( Collectors.toList() ); + return constraints.values().stream().filter( c -> keysOfTable.contains( c.keyId ) ).collect( Collectors.toList() ); } @Override public List getConstraints( CatalogKey key ) { - throw new NotImplementedException(); + return constraints.values().stream().filter( c -> c.keyId == key.id ).collect( Collectors.toList() ); } @Override public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { - throw new NotImplementedException(); + return tableConstraints.get( tableId ).stream().filter( c -> c.name.equals( constraintName ) ).findFirst().orElse( null ); } @Override public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { - throw new NotImplementedException(); + return tableForeignKeys.get( tableId ).stream().filter( e -> e.name.equals( foreignKeyName ) ).findFirst().orElse( null ); } @@ -311,19 +336,13 @@ public CatalogIndex getIndex( long indexId ) { @Override - public LogicalTable getLogicalTable( long id ) { - return tables.get( id ); - } - - - @Override - public LogicalTable getLogicalTable( String name ) { + public LogicalTable getTable( String name ) { return tableNames.get( name ); } @Override - public LogicalColumn getLogicalColumn( long id ) { + public LogicalColumn getColumn( long id ) { return columns.get( id ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index 5235d48ff9..94eb9330cc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -31,6 +31,7 @@ import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; @@ -179,7 +180,7 @@ public List getTablesForPeriodicProcessing() { @Override public CatalogEntity getEntity( long id ) { - return relationals.values().stream().map( r -> r.getLogicalTable( id ) ).findFirst().orElse( null ); + return relationals.values().stream().map( r -> r.getTable( id ) ).findFirst().orElse( null ); } @@ -218,4 +219,32 @@ public List getIndexes() { return relationals.values().stream().flatMap( r -> r.getIndexes().stream() ).collect( Collectors.toList() ); } + + @Override + public LogicalEntity getLogicalEntity( long id ) { + LogicalEntity entity = null; + for ( LogicalRelSnapshot value : relationals.values() ) { + entity = value.getTable( id ); + if ( entity != null ) { + return entity; + } + } + + for ( LogicalDocSnapshot value : documents.values() ) { + entity = value.getCollection( id ); + if ( entity != null ) { + return entity; + } + } + + for ( LogicalGraphSnapshot value : graphs.values() ) { + entity = value.getGraph( id ); + if ( entity != null ) { + return entity; + } + } + + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index f37ebd9f5a..920d257dcb 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -52,7 +52,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexUtil; @@ -128,7 +128,7 @@ private static ScanNode createQueryable( Compiler compiler, RelScan alg, Immu final Enumerable rowEnumerable; if ( elementType instanceof Class ) { //noinspection unchecked - final Queryable queryable = (Queryable) Schemas.queryable( root, (Class) elementType, List.of( alg.entity.unwrap( LogicalTable.class ).getNamespaceName(), alg.entity.name ) ); + final Queryable queryable = (Queryable) Schemas.queryable( root, (Class) elementType, List.of( Catalog.getInstance().getSnapshot().getNamespace( alg.entity.namespaceId ).name, alg.entity.name ) ); ImmutableList.Builder fieldBuilder = ImmutableList.builder(); Class type = (Class) elementType; for ( Field field : type.getFields() ) { @@ -150,7 +150,7 @@ private static ScanNode createQueryable( Compiler compiler, RelScan alg, Immu return new Row( values ); } ); } else { - rowEnumerable = Schemas.queryable( root, Row.class, List.of( alg.entity.unwrap( LogicalTable.class ).getNamespaceName(), alg.getEntity().name ) ); + rowEnumerable = Schemas.queryable( root, Row.class, List.of( Catalog.getInstance().getSnapshot().getNamespace( alg.entity.namespaceId ).name, alg.getEntity().name ) ); } return createEnumerable( compiler, alg, rowEnumerable, null, filters, projects ); } diff --git a/core/src/main/java/org/polypheny/db/monitoring/events/StatementEvent.java b/core/src/main/java/org/polypheny/db/monitoring/events/StatementEvent.java index fa43738392..142b3babdd 100644 --- a/core/src/main/java/org/polypheny/db/monitoring/events/StatementEvent.java +++ b/core/src/main/java/org/polypheny/db/monitoring/events/StatementEvent.java @@ -58,7 +58,7 @@ public abstract class StatementEvent extends BaseEvent { protected LogicalQueryInformation logicalQueryInformation; protected String algCompareString; protected String physicalQueryClass; - protected final HashMap> changedValues = new HashMap<>(); + protected final HashMap> changedValues = new HashMap<>(); protected Integer indexSize = null; // Only used for ddl events protected long tableId; diff --git a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java index b45467d50e..8fa9c7de72 100644 --- a/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java +++ b/core/src/main/java/org/polypheny/db/prepare/AlgOptEntityImpl.java @@ -172,7 +172,7 @@ public T unwrap( Class clazz ) { } } if ( clazz == PolyphenyDbSchema.class ) { - return clazz.cast( Schemas.subSchema( ((PolyphenyDbCatalogReader) schema).snapshot, List.of( catalogEntity.unwrap( LogicalTable.class ).getNamespaceName(), catalogEntity.name ) ) ); + return clazz.cast( Schemas.subSchema( ((PolyphenyDbCatalogReader) schema).snapshot, List.of( Catalog.getInstance().getSnapshot().getNamespace( catalogEntity.namespaceId ).name, catalogEntity.name ) ) ); } return null; } @@ -189,7 +189,7 @@ public Expression getExpression( Class clazz ) { } else if ( catalogEntity != null ) { return Expressions.call( Expressions.call( Catalog.class, "getInstance" ), - "getLogicalTable", + "getTable", Expressions.constant( catalogEntity.id ) ); } @@ -343,7 +343,7 @@ public AlgDataType getRowType() { @Override public List getQualifiedName() { - return List.of( catalogEntity.unwrap( LogicalTable.class ).getNamespaceName(), catalogEntity.name ); + return List.of( Catalog.getInstance().getSnapshot().getNamespace( catalogEntity.namespaceId ).name, catalogEntity.name ); } diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index dabb956157..9100dc7de1 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -26,6 +26,7 @@ import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; +import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.relational.RelScan; @@ -54,15 +55,15 @@ import org.polypheny.db.algebra.logical.relational.LogicalJoin; import org.polypheny.db.algebra.logical.relational.LogicalMatch; import org.polypheny.db.algebra.logical.relational.LogicalMinus; -import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalSort; import org.polypheny.db.algebra.logical.relational.LogicalUnion; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.prepare.AlgOptEntityImpl; import org.polypheny.db.transaction.Statement; @@ -287,6 +288,9 @@ public AlgNode visit( LogicalMatch match ) { @Override public AlgNode visit( RelScan scan ) { + if ( scan.getEntity() == null ) { + throw new RuntimeException(); + } hashBasis.add( "Scan#" + scan.getEntity().id ); // get available columns for every table scan this.getAvailableColumns( scan ); @@ -394,9 +398,9 @@ private void getAvailableColumns( AlgNode scan ) { this.entityId.add( scan.getEntity().id ); final LogicalTable table = scan.getEntity().unwrap( LogicalTable.class ); if ( table != null ) { - final List columns = table.columns; - final List names = table.getColumnNames(); - final String baseName = table.getNamespaceName() + "." + table.name + "."; + final List columns = Catalog.getInstance().getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( table.id ); + final List names = columns.stream().map( c -> c.name ).collect( Collectors.toList() ); + final String baseName = Catalog.getInstance().getSnapshot().getNamespace( table.namespaceId ) + "." + table.name + "."; for ( int i = 0; i < columns.size(); i++ ) { this.availableColumns.putIfAbsent( columns.get( i ).id, baseName + names.get( i ) ); @@ -418,10 +422,11 @@ private void getPartitioningInfo( LogicalFilter filter ) { private void handleIfPartitioned( AlgNode node, LogicalTable catalogTable ) { // Only if table is partitioned - if ( catalogTable.partitionProperty.isPartitioned ) { + throw new NotImplementedException(); + /*if ( Catalog.getInstance().getSnapshot().getAllocSnapshot().isPartitioned( catalogTable.id ) ) { WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, - catalogTable.columns.stream().map( c -> c.id ).collect( Collectors.toList()).indexOf( catalogTable.partitionProperty.partitionColumnId ) ); + catalogTable.columns.stream().map( c -> c.id ).collect( Collectors.toList() ).indexOf( catalogTable.partitionProperty.partitionColumnId ) ); node.accept( whereClauseVisitor ); int scanId = node.getInput( 0 ).getId(); @@ -437,7 +442,7 @@ private void handleIfPartitioned( AlgNode node, LogicalTable catalogTable ) { .collect( Collectors.toSet() ) ); } } - } + }*/ } diff --git a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java index e3a9e87bcb..66640f400c 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java +++ b/core/src/main/java/org/polypheny/db/rex/RexTableInputRef.java @@ -39,9 +39,8 @@ import org.polypheny.db.algebra.metadata.BuiltInMetadata.AllPredicates; import org.polypheny.db.algebra.metadata.BuiltInMetadata.ExpressionLineage; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.plan.AlgOptEntity; /** @@ -164,7 +163,7 @@ public CatalogEntity getTable() { public List getQualifiedName() { - return List.of( table.unwrap( LogicalTable.class ).getNamespaceName(), table.name ); + return List.of( Catalog.getInstance().getSnapshot().getNamespace( table.namespaceId ).name, table.name ); } diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 12c1bda439..bf1ba6a129 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -206,7 +206,7 @@ public static Queryable queryable( DataContext root, Class clazz, Iter */ public static Queryable queryable( DataContext root, Snapshot snapshot, Class clazz, String tableName ) { //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); - LogicalTable table = snapshot.getRelSnapshot( 0 ).getLogicalTable( tableName ); + LogicalTable table = snapshot.getRelSnapshot( 0 ).getTable( tableName ); return table.unwrap( QueryableEntity.class ).asQueryable( root, snapshot, table.id ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 3f35fc213b..fc55c0c00c 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -1330,7 +1330,7 @@ public RexNode patternExclude( RexNode node ) { public AlgBuilder scan( List tableNames ) { final List names = ImmutableList.copyOf( tableNames ); LogicalNamespace namespace = snapshot.getNamespace( tableNames.get( 0 ) ); - final LogicalTable entity = snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ); + final LogicalTable entity = snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 1 ) ); if ( entity == null ) { throw RESOURCE.tableNotFound( String.join( ".", names ) ).ex(); } diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index 28971b1831..1cfb46c918 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -29,7 +29,6 @@ import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.transaction.Transaction; @@ -68,7 +67,7 @@ public abstract void addData( AlgRoot algRoot, CatalogMaterializedView materializedView ); - public abstract void addTables( Transaction transaction, List names ); + public abstract void addTables( Transaction transaction, List ids ); public abstract void updateData( Transaction transaction, Long viewId ); @@ -80,21 +79,19 @@ public abstract void addData( /** - * to trek updates on tables for materialized views with update freshness + * to track updates on tables for materialized views with update freshness */ public static class TableUpdateVisitor extends AlgShuttleImpl { @Getter - private final List names = new ArrayList<>(); + private final List ids = new ArrayList<>(); @Override public AlgNode visit( LogicalRelModify modify ) { if ( modify.getOperation() != Modify.Operation.MERGE ) { if ( (modify.getEntity() != null) ) { - LogicalTable table = modify.getEntity().unwrap( LogicalTable.class ); - names.add( table.getNamespaceName() ); - names.add( table.name ); + ids.add( modify.getEntity().id ); } } return super.visit( modify ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index af9f96e0b5..a4c295f04c 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -32,7 +32,6 @@ import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; -import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -90,12 +89,6 @@ public AllocationGraphCatalog getAllocGraph( long namespaceId ) { } - @Override - public LogicalEntity getLogicalEntity( String entityName ) { - throw new NotImplementedException(); - } - - @Override public LogicalEntity getLogicalEntity( long id ) { throw new NotImplementedException(); @@ -108,12 +101,6 @@ public PhysicalCatalog getPhysical( long namespaceId ) { } - @Override - public PhysicalEntity getPhysicalEntity( long id ) { - throw new NotImplementedException(); - } - - @Override public void addObserver( PropertyChangeListener listener ) { super.addObserver( listener ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 22212063ef..ab46788884 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -64,7 +64,6 @@ import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; @@ -272,7 +271,7 @@ private void handleSource( DataSource adapter ) { exportedColumn.cardinality, exportedColumn.nullable, Collation.getDefaultCollation() ); - catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ), + catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( tableId ), adapter.getAdapterId(), columnId, PlacementType.STATIC, @@ -1630,8 +1629,7 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D @Override public void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) throws UnknownUserException { - CatalogUser catalogUser = catalog.getSnapshot().getUser( newOwnerName ); - catalog.getLogicalRel( catalogTable.namespaceId ).setTableOwner( catalogTable.id, catalogUser.id ); + throw new UnsupportedOperationException(); } @@ -1685,7 +1683,7 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC if ( replace ) { try { dropView( catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( viewName ), statement ); - } catch ( UnknownTableException | DdlOnSourceException e ) { + } catch ( DdlOnSourceException e ) { throw new RuntimeException( "Unable tp drop the existing View with this name." ); } } else { @@ -1815,7 +1813,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a for ( DataStore s : stores ) { long adapterId = s.getAdapterId(); - catalog.getAllocRel( namespaceId ).addColumnPlacement( catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ), + catalog.getAllocRel( namespaceId ).addColumnPlacement( catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ), s.getAdapterId(), columnId, placementType, @@ -2853,7 +2851,7 @@ private void addColumn( long namespaceId, String columnName, ColumnTypeInformati addDefaultValue( namespaceId, defaultValue, addedColumnId ); for ( DataStore s : stores ) { - catalog.getAllocRel( namespaceId ).addColumnPlacement( catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ), + catalog.getAllocRel( namespaceId ).addColumnPlacement( catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ), s.getAdapterId(), addedColumnId, placementType, diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 7dc0ca59f7..acea08eddf 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -93,7 +93,6 @@ import org.polypheny.db.monitoring.events.DmlEvent; import org.polypheny.db.monitoring.events.QueryEvent; import org.polypheny.db.monitoring.events.StatementEvent; -import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; @@ -291,7 +290,7 @@ private ProposedImplementations prepareQueryList( AlgRoot logicalRoot, AlgDataTy // Update which tables where changed used for Materialized Views TableUpdateVisitor visitor = new TableUpdateVisitor(); logicalRoot.alg.accept( visitor ); - MaterializedViewManager.getInstance().addTables( statement.getTransaction(), visitor.getNames() ); + MaterializedViewManager.getInstance().addTables( statement.getTransaction(), visitor.getIds() ); if ( isAnalyze ) { statement.getProcessingDuration().stop( "Expand Views" ); @@ -768,7 +767,7 @@ public AlgNode visit( AlgNode node ) { // .collect( Collectors.toList() ); // } // final {@link AlgNode} replacement = LogicalModify.create( -// ltm.getLogicalTable(), +// ltm.getTable(), // transaction.getCatalogReader(), // newProject, // ltm.getOperation(), @@ -1317,18 +1316,19 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< // Get placements of this table LogicalTable catalogTable = table.unwrap( LogicalTable.class ); - - if ( aggregatedPartitionValues.containsKey( scanId ) ) { + // todo dl + /*if ( aggregatedPartitionValues.containsKey( scanId ) ) { if ( aggregatedPartitionValues.get( scanId ) != null ) { if ( !aggregatedPartitionValues.get( scanId ).isEmpty() ) { List partitionValues = new ArrayList<>( aggregatedPartitionValues.get( scanId ) ); if ( log.isDebugEnabled() ) { - log.debug( + /*log.debug( "TableID: {} is partitioned on column: {} - {}", catalogTable.id, catalogTable.partitionProperty.partitionColumnId, Catalog.getInstance().getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); + } List identifiedPartitions = new ArrayList<>(); for ( String partitionValue : partitionValues ) { @@ -1367,7 +1367,7 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< catalogTable.partitionProperty.partitionIds, ( l1, l2 ) -> Stream.concat( l1.stream(), l2.stream() ).collect( Collectors.toList() ) ); scanPerTable.putIfAbsent( scanId, catalogTable.id ); - } + }*/ } } diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index b35a00d38e..f4d11da11d 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -331,7 +331,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final RexBuilder rexBuilder = root.getCluster().getRexBuilder(); for ( final CatalogForeignKey foreignKey : foreignKeys ) { - final LogicalTable entity = statement.getDataContext().getSnapshot().getRelSnapshot( foreignKey.getNamespaceId() ).getLogicalTable( foreignKey.referencedKeyTableId ); + final LogicalTable entity = statement.getDataContext().getSnapshot().getRelSnapshot( foreignKey.getNamespaceId() ).getTable( foreignKey.referencedKeyTableId ); final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), entity ); RexNode joinCondition = rexBuilder.makeLiteral( true ); builder.push( input ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index 14beb2034b..971d6d313d 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -99,7 +99,7 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P proposedRoutingPlan.getPhysicalPlacementsOfPartitions().forEach( ( k, v ) -> { CatalogPartition catalogPartition = snapshot.getAllocSnapshot().getPartition( k ); - LogicalTable catalogTable = Catalog.getInstance().getLogicalEntity( catalogPartition.tableId ).unwrap( LogicalTable.class ); + LogicalTable catalogTable = Catalog.getInstance().getSnapshot().getLogicalEntity( catalogPartition.tableId ).unwrap( LogicalTable.class ); CatalogPartitionGroup catalogPartitionGroup = snapshot.getAllocSnapshot().getPartitionGroup( catalogPartition.partitionGroupId ); v.forEach( p -> { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 545d260455..14e885de42 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -248,7 +248,7 @@ private List handleRelationalOnGraphScan( AlgNode node, Statem AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); - algBuilder.lpgScan( catalog.getSnapshot().getNamespaces( new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); + algBuilder.lpgScan( statement.getTransaction().getSnapshot().getNamespaces( new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); algBuilder.lpgMatch( List.of( algBuilder.lpgNodeMatch( List.of( logicalTable.getLogicalTableName() ) ) ), List.of( "n" ) ); algBuilder.lpgProject( List.of( rexBuilder.makeLpgGetId(), rexBuilder.makeLpgPropertiesExtract(), rexBuilder.makeLpgLabels() ), diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 8f72df8ed6..779f87b97c 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -69,7 +69,6 @@ import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.TranslatableEntity; -import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; @@ -100,8 +99,7 @@ public abstract class BaseRouter implements Router { .maximumSize( RuntimeConfig.JOINED_TABLE_SCAN_CACHE_SIZE.getInteger() ) .build(); - final static Catalog catalog = Catalog.getInstance(); - private static AllocSnapshot allocSnapshot; + final static Snapshot snapshot = Catalog.getInstance().getSnapshot(); static { @@ -109,9 +107,6 @@ public abstract class BaseRouter implements Router { } - private LogicalRelSnapshot snapshot; - - /** * Execute the table scan on the first placement of a table */ @@ -129,11 +124,10 @@ protected static Map> selectPlacement( Logica // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); for ( LogicalColumn column : table.columns ) { - allocSnapshot = Catalog.getInstance().getSnapshot().getAllocSnapshot(); - if ( allocSnapshot.getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( allocSnapshot.getColumnPlacements( column.id ).get( 0 ) ); + if ( snapshot.getAllocSnapshot().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { + placementList.add( snapshot.getAllocSnapshot().getColumnPlacements( column.id ).get( 0 ) ); } else { - placementList.add( allocSnapshot.getColumnPlacements( column.id ).get( 0 ) ); + placementList.add( snapshot.getAllocSnapshot().getColumnPlacements( column.id ).get( 0 ) ); } } @@ -182,8 +176,8 @@ public RoutedAlgBuilder handleScan( Statement statement, long partitionId ) { - PhysicalEntity physical = catalog.getPhysicalEntity( partitionId ); - AlgNode node = builder.scan( catalog.getPhysicalEntity( partitionId ) ).build(); + PhysicalEntity physical = snapshot.getPhysicalSnapshot().getPhysicalTable( partitionId ); + AlgNode node = builder.scan( physical ).build(); builder.push( node ); @@ -294,12 +288,12 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< // We need to join placements on different adapters // Get primary key - snapshot = catalog.getSnapshot().getRelSnapshot( currentPlacements.get( 0 ).namespaceId ); - long pkid = snapshot.getTable( currentPlacements.get( 0 ).tableId ).primaryKey; - List pkColumnIds = snapshot.getPrimaryKey( pkid ).columnIds; + LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( currentPlacements.get( 0 ).namespaceId ); + long pkid = relSnapshot.getTable( currentPlacements.get( 0 ).tableId ).primaryKey; + List pkColumnIds = relSnapshot.getPrimaryKey( pkid ).columnIds; List pkColumns = new LinkedList<>(); for ( long pkColumnId : pkColumnIds ) { - pkColumns.add( snapshot.getColumn( pkColumnId ) ); + pkColumns.add( relSnapshot.getColumn( pkColumnId ) ); } // Add primary key @@ -364,7 +358,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< CatalogColumnPlacement placement = new ArrayList<>( placements.values() ).get( 0 ).get( 0 ); // todo dl: remove after RowType refactor - if ( catalog.getSnapshot().getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { + if ( snapshot.getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); builder.push( new LogicalTransformer( node.getCluster(), @@ -385,7 +379,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< private void buildFinalProject( RoutedAlgBuilder builder, List currentPlacements ) { List rexNodes = new ArrayList<>(); List placementList = currentPlacements.stream() - .map( col -> catalog.getSnapshot().getRelSnapshot( currentPlacements.get( 0 ).namespaceId ).getColumn( col.columnId ) ) + .map( col -> snapshot.getRelSnapshot( currentPlacements.get( 0 ).namespaceId ).getColumn( col.columnId ) ) .sorted( Comparator.comparingInt( col -> col.position ) ) .collect( Collectors.toList() ); for ( LogicalColumn logicalColumn : placementList ) { @@ -439,7 +433,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTables( null ); + List tables = snapshot.getRelSnapshot( namespace.id ).getTables( null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, selectPlacement( t ) ) ) ) .collect( Collectors.toList() ); @@ -453,7 +447,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace na private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List collections = catalog.getSnapshot().getDocSnapshot( namespace.id ).getCollections( null ); + List collections = snapshot.getDocSnapshot( namespace.id ).getCollections( null ); List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); @@ -473,10 +467,10 @@ private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace name public AlgNode getRelationalScan( LogicalLpgScan alg, long adapterId, Statement statement ) { /*CatalogGraphMapping mapping = Catalog.getInstance().getLogicalGraph( alg.entity.namespaceId ).getGraphMapping( alg.entity.id ); - PhysicalTable nodesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.nodesId ).unwrap( PhysicalTable.class ); - PhysicalTable nodePropertiesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.nodesPropertyId ).unwrap( PhysicalTable.class ); - PhysicalTable edgesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.edgesId ).unwrap( PhysicalTable.class ); - PhysicalTable edgePropertiesTable = statement.getDataContext().getSnapshot().getLogicalTable( mapping.edgesPropertyId ).unwrap( PhysicalTable.class ); + PhysicalTable nodesTable = statement.getDataContext().getSnapshot().getTable( mapping.nodesId ).unwrap( PhysicalTable.class ); + PhysicalTable nodePropertiesTable = statement.getDataContext().getSnapshot().getTable( mapping.nodesPropertyId ).unwrap( PhysicalTable.class ); + PhysicalTable edgesTable = statement.getDataContext().getSnapshot().getTable( mapping.edgesId ).unwrap( PhysicalTable.class ); + PhysicalTable edgePropertiesTable = statement.getDataContext().getSnapshot().getTable( mapping.edgesPropertyId ).unwrap( PhysicalTable.class ); AlgNode node = buildSubstitutionJoin( alg, nodesTable, nodePropertiesTable ); @@ -499,7 +493,7 @@ protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, ), nodes.name + "_" + nodes.partitionProperty.partitionIds.get( 0 ) ); - return statement.getDataContext().getSnapshot().getLogicalTable( qualifiedTableName ); + return statement.getDataContext().getSnapshot().getTable( qualifiedTableName ); */ // todo dl return null; } @@ -542,7 +536,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st } for ( Long placementId : placements ) { - CatalogAdapter adapter = catalog.getSnapshot().getAdapter( placementId ); + CatalogAdapter adapter = snapshot.getAdapter( placementId ); NamespaceType sourceModel = collection.namespaceType; if ( !adapter.supportedNamespaces.contains( sourceModel ) ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 65417106f0..e076a318e0 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -133,7 +133,7 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { } LogicalTable catalogTable = modify.getEntity().unwrap( LogicalTable.class ); - Snapshot snapshot = catalog.getSnapshot(); + Snapshot snapshot = statement.getTransaction().getSnapshot(); // Get placements of this table diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index 7109e15fb3..f72e049e19 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -143,7 +143,7 @@ protected Set> selectPlacement( LogicalTable catalo List usedColumns = queryInformation.getAllColumnsPerTable( catalogTable.id ); // Filter for placements by adapters - List adapters = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() + List adapters = snapshot.getAllocSnapshot().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() .stream() .filter( elem -> elem.getValue().containsAll( usedColumns ) ) .map( Entry::getKey ) @@ -152,7 +152,7 @@ protected Set> selectPlacement( LogicalTable catalo final Set> result = new HashSet<>(); for ( long adapterId : adapters ) { List placements = usedColumns.stream() - .map( colId -> catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( adapterId, colId ) ) + .map( colId -> snapshot.getAllocSnapshot().getColumnPlacement( adapterId, colId ) ) .collect( Collectors.toList() ); if ( !placements.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 21378e5717..80ac556273 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -51,7 +51,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.snapshot.Snapshot; @@ -112,7 +111,7 @@ public MaterializedViewManagerImpl( TransactionManager transactionManager ) { public synchronized Map updateMaterializedViewInfo() { List toRemove = new ArrayList<>(); for ( Long id : materializedInfo.keySet() ) { - if ( Catalog.getInstance().getLogicalEntity( id ) == null ) { + if ( Catalog.getInstance().getSnapshot().getLogicalEntity( id ) == null ) { toRemove.add( id ); } } @@ -180,16 +179,12 @@ public synchronized void addMaterializedInfo( Long materializedId, MaterializedC @Override public void addTables( Transaction transaction, List tableNames ) { if ( tableNames.size() > 1 ) { - try { - snapshot = Catalog.getInstance().getSnapshot(); - LogicalNamespace namespace = snapshot.getNamespace( tableNames.get( 0 ) ); - LogicalTable catalogTable = snapshot.getRelSnapshot( namespace.id ).getTable( tableNames.get( 1 ) ); - long id = catalogTable.id; - if ( !catalogTable.getConnectedViews().isEmpty() ) { - updateCandidates.put( transaction.getXid(), id ); - } - } catch ( UnknownTableException e ) { - throw new RuntimeException( "Not possible to getLogicalTable to update which Tables were changed.", e ); + snapshot = Catalog.getInstance().getSnapshot(); + LogicalNamespace namespace = snapshot.getNamespace( tableNames.get( 0 ) ); + LogicalTable catalogTable = snapshot.getRelSnapshot( namespace.id ).getTable( tableNames.get( 1 ) ); + long id = catalogTable.id; + if ( !catalogTable.getConnectedViews().isEmpty() ) { + updateCandidates.put( transaction.getXid(), id ); } } } @@ -277,7 +272,7 @@ private void updatingIntervalMaterialized() { */ public void prepareToUpdate( Long materializedId ) { Catalog catalog = Catalog.getInstance(); - LogicalTable catalogTable = catalog.getLogicalEntity( materializedId ).unwrap( LogicalTable.class ); + LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( materializedId ).unwrap( LogicalTable.class ); try { Transaction transaction = getTransactionManager().startTransaction( @@ -347,8 +342,8 @@ public void updateData( Transaction transaction, Long materializedId ) { Map> columns = new HashMap<>(); List ids = new ArrayList<>(); - if ( catalog.getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { - CatalogMaterializedView catalogMaterializedView = catalog.getLogicalEntity( materializedId ).unwrap( CatalogMaterializedView.class ); + if ( catalog.getSnapshot().getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { + CatalogMaterializedView catalogMaterializedView = catalog.getSnapshot().getLogicalEntity( materializedId ).unwrap( CatalogMaterializedView.class ); for ( long id : catalogMaterializedView.dataPlacements ) { ids.add( id ); List logicalColumns = new ArrayList<>(); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java index f99080a156..ec9ed34272 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java @@ -58,7 +58,7 @@ public class DmlDataPoint implements MonitoringDataPoint, Serializable { private String queryClass; private String physicalQueryClass; @Builder.Default - private final HashMap> changedValues = new HashMap<>(); + private final HashMap> changedValues = new HashMap<>(); @Builder.Default private final Map availableColumnsWithTable = new HashMap<>(); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java index 36a1cbf843..a5d69a10b9 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java @@ -87,7 +87,7 @@ public StatisticColumn( long schemaId, long tableId, long columnId, PolyType typ this.columnType = columnType; LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ); - if ( snapshot.getLogicalTable( tableId ) != null ) { + if ( snapshot.getTable( tableId ) != null ) { this.schema = Catalog.getInstance().getSnapshot().getNamespace( schemaId ).name; this.table = snapshot.getTable( tableId ).name; this.column = snapshot.getColumn( columnId ).name; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java index 5b610ce9d6..dc048eb3b1 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java @@ -86,7 +86,7 @@ private void updateQueryStatistics( QueryDataPointImpl dataPoint, StatisticsMana if ( isOneTable ) { long tableId = values.stream().findFirst().get(); - if ( catalog.getLogicalEntity( tableId ) != null ) { + if ( catalog.getSnapshot().getLogicalEntity( tableId ) != null ) { statisticsManager.setTableCalls( tableId, dataPoint.getMonitoringType() ); // RowCount from UI is only used if there is no other possibility @@ -100,7 +100,7 @@ private void updateQueryStatistics( QueryDataPointImpl dataPoint, StatisticsMana } } else { for ( long id : values ) { - if ( catalog.getLogicalEntity( id ) != null ) { + if ( catalog.getSnapshot().getLogicalEntity( id ) != null ) { statisticsManager.setTableCalls( id, dataPoint.getMonitoringType() ); } } @@ -119,14 +119,14 @@ private void updateDmlStatistics( DmlDataPoint dataPoint, StatisticsManager stat long tableId = values.stream().findFirst().get(); statisticsManager.setTableCalls( tableId, dataPoint.getMonitoringType() ); - if ( catalog.getLogicalEntity( tableId ) != null ) { + if ( catalog.getSnapshot().getLogicalEntity( tableId ) != null ) { if ( dataPoint.getMonitoringType().equals( "INSERT" ) ) { int added = dataPoint.getRowCount(); statisticsManager.tablesToUpdate( tableId, dataPoint.getChangedValues(), dataPoint.getMonitoringType(), - catalog.getLogicalEntity( tableId ).namespaceId ); + catalog.getSnapshot().getLogicalEntity( tableId ).namespaceId ); statisticsManager.updateRowCountPerTable( tableId, added, dataPoint.getMonitoringType() ); } else if ( dataPoint.getMonitoringType().equals( "DELETE" ) ) { int deleted = dataPoint.getRowCount(); @@ -137,7 +137,7 @@ private void updateDmlStatistics( DmlDataPoint dataPoint, StatisticsManager stat } } else { for ( long id : values ) { - if ( catalog.getLogicalEntity( id ) != null ) { + if ( catalog.getSnapshot().getLogicalEntity( id ) != null ) { statisticsManager.setTableCalls( id, dataPoint.getMonitoringType() ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index 783cbe18a1..4a36e200ba 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -23,9 +23,9 @@ import lombok.Getter; import lombok.Setter; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.entity.logical.LogicalTable; /** @@ -75,8 +75,8 @@ public StatisticTable( Long tableId ) { this.tableId = tableId; Catalog catalog = Catalog.getInstance(); - if ( catalog.getLogicalEntity( tableId ) != null ) { - LogicalTable catalogTable = catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); + if ( catalog.getSnapshot().getLogicalEntity( tableId ) != null ) { + LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); this.table = catalogTable.name; this.namespaceType = catalogTable.getNamespaceType(); this.dataPlacements = catalogTable.dataPlacements; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index ce1deed7e5..9239925eef 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -17,8 +17,6 @@ package org.polypheny.db.monitoring.statistics; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeSupport; import java.math.BigDecimal; @@ -276,7 +274,7 @@ private void reevaluateRowCount() { log.debug( "Reevaluate Row Count." ); statisticQueryInterface.getAllTable().forEach( table -> { - int rowCount = getNumberColumnCount( this.prepareNode( new QueryResult( Catalog.getInstance().getLogicalEntity( table.id ), null ), NodeType.ROW_COUNT_TABLE ) ); + int rowCount = getNumberColumnCount( this.prepareNode( new QueryResult( Catalog.getInstance().getSnapshot().getLogicalEntity( table.id ), null ), NodeType.ROW_COUNT_TABLE ) ); updateRowCountPerTable( table.id, rowCount, "SET-ROW-COUNT" ); } ); } @@ -296,7 +294,7 @@ public void reevaluateTable( long tableId ) { if ( statisticQueryInterface == null ) { return; } - LogicalEntity entity = Catalog.getInstance().getLogicalEntity( tableId ); + LogicalEntity entity = Catalog.getInstance().getSnapshot().getLogicalEntity( tableId ); if ( entity != null ) { deleteTable( entity.namespaceId, tableId ); @@ -347,7 +345,7 @@ private StatisticColumn reevaluateColumn( QueryResult column ) { /** * Reevaluates a numerical column, with the configured statistics. */ - private StatisticColumn reevaluateNumericalColumn( QueryResult column ) { + private > StatisticColumn reevaluateNumericalColumn( QueryResult column ) { StatisticQueryResult min = this.prepareNode( column, NodeType.MIN ); StatisticQueryResult max = this.prepareNode( column, NodeType.MAX ); Integer count = getNumberColumnCount( this.prepareNode( column, NodeType.ROW_COUNT_COLUMN ) ); @@ -417,12 +415,12 @@ private > StatisticColumn reevaluateTemporalColumn( Q * * @param column the column in which the values should be inserted */ - private void assignUnique( StatisticColumn column, StatisticQueryResult unique ) { + private void assignUnique( StatisticColumn column, StatisticQueryResult unique ) { if ( unique == null || unique.getData() == null ) { return; } if ( unique.getData().length <= this.buffer ) { - column.setUniqueValues( Lists.newArrayList( (T[]) unique.getData() ) ); + column.setUniqueValues( (List) Arrays.asList( unique.getData() ) ); } else { column.setFull( true ); } @@ -432,7 +430,7 @@ private void assignUnique( StatisticColumn column, StatisticQueryResult u /** * Reevaluates an alphabetical column, with the configured statistics */ - private StatisticColumn reevaluateAlphabeticalColumn( QueryResult column ) { + private > StatisticColumn reevaluateAlphabeticalColumn( QueryResult column ) { StatisticQueryResult unique = this.prepareNode( column, NodeType.UNIQUE_VALUE ); Integer count = getNumberColumnCount( this.prepareNode( column, NodeType.ROW_COUNT_COLUMN ) ); @@ -477,7 +475,9 @@ private void put( StatisticColumn statisticColumn ) { put( statisticSchemaMapCopy, - queryResult, + queryResult.getColumn().namespaceId, + queryResult.getColumn().tableId, + queryResult.getColumn().id, statisticColumn ); } @@ -508,9 +508,8 @@ private void put( private StatisticQueryResult prepareNode( QueryResult queryResult, NodeType nodeType ) { StatisticQueryResult statisticQueryColumn = null; - if ( Catalog.getInstance().getLogicalEntity( queryResult.getEntity().id ) != null ) { + if ( Catalog.getInstance().getSnapshot().getLogicalEntity( queryResult.getEntity().id ) != null ) { AlgNode queryNode = getQueryNode( queryResult, nodeType ); - //queryNode = getQueryNode( queryResult, nodeType ); statisticQueryColumn = statisticQueryInterface.selectOneColumnStat( queryNode, transaction, statement, queryResult ); } return statisticQueryColumn; @@ -608,9 +607,9 @@ private AlgNode getAggregateColumn( QueryResult queryResult, NodeType nodeType, } - private AlgNode getUniqueValues( QueryResult queryResult, RelScan tableScan, RexBuilder rexBuilder ) { + private AlgNode getUniqueValues( QueryResult queryResult, RelScan tableScan, RexBuilder rexBuilder ) { for ( int i = 0; i < tableScan.getRowType().getFieldNames().size(); i++ ) { - if ( tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn() ) ) { + if ( tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn().name ) ) { LogicalProject logicalProject = LogicalProject.create( tableScan, Collections.singletonList( rexBuilder.makeInputRef( tableScan, i ) ), @@ -639,7 +638,7 @@ private AlgNode getUniqueValues( QueryResult queryResult, RelScan tableScan, Rex */ private AlgNode getColumnCount( QueryResult queryResult, RelScan tableScan, RexBuilder rexBuilder, AlgOptCluster cluster ) { for ( int i = 0; i < tableScan.getRowType().getFieldNames().size(); i++ ) { - if ( queryResult.getColumn() != null && tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn() ) ) { + if ( queryResult.getColumn() != null && tableScan.getRowType().getFieldNames().get( i ).equals( queryResult.getColumn().name ) ) { LogicalProject logicalProject = LogicalProject.create( tableScan, Collections.singletonList( rexBuilder.makeInputRef( tableScan, i ) ), @@ -661,7 +660,7 @@ private AlgNode getColumnCount( QueryResult queryResult, RelScan tableScan, RexB /** * Gets the amount of entries for a table. */ - private AlgNode getTableCount( RelScan tableScan, AlgOptCluster cluster ) { + private AlgNode getTableCount( RelScan tableScan, AlgOptCluster cluster ) { AggregateCall aggregateCall = getRowCountAggregateCall( cluster ); return LogicalAggregate.create( tableScan, @@ -830,7 +829,7 @@ public void propertyChange( PropertyChangeEvent evt ) { private void workQueue() { while ( !this.tablesToUpdate.isEmpty() ) { long tableId = this.tablesToUpdate.poll(); - if ( Catalog.getInstance().getLogicalEntity( tableId ) != null ) { + if ( Catalog.getInstance().getSnapshot().getLogicalEntity( tableId ) != null ) { reevaluateTable( tableId ); } tableStatistic.remove( tableId ); @@ -864,9 +863,9 @@ public void tablesToUpdate( long tableId ) { * @param schemaId of the table */ @Override - public void tablesToUpdate( long tableId, Map> changedValues, String type, long schemaId ) { + public void tablesToUpdate( long tableId, Map> changedValues, String type, long schemaId ) { Catalog catalog = Catalog.getInstance(); - if ( catalog.getLogicalEntity( tableId ) != null ) { + if ( catalog.getSnapshot().getLogicalEntity( tableId ) != null ) { switch ( type ) { case "INSERT": handleInsert( tableId, changedValues, schemaId, catalog ); @@ -882,7 +881,7 @@ public void tablesToUpdate( long tableId, Map> changedValues, } - private void handleDrop( long tableId, Map> changedValues, long schemaId ) { + private void handleDrop( long tableId, Map> changedValues, long schemaId ) { Map>> schema = this.statisticSchemaMap.get( schemaId ); if ( schema != null ) { Map> table = this.statisticSchemaMap.get( schemaId ).get( tableId ); @@ -894,8 +893,8 @@ private void handleDrop( long tableId, Map> changedValues, lo private void handleTruncate( long tableId, long schemaId, Catalog catalog ) { - LogicalTable catalogTable = catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); - for ( LogicalColumn column : catalogTable.columns ) { + LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); + for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { PolyType polyType = column.type; QueryResult queryResult = new QueryResult( catalogTable, column ); if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( column.id ) != null ) { @@ -908,37 +907,37 @@ private void handleTruncate( long tableId, long schemaId, Catalog catalog ) { } - private > StatisticColumn createNewStatisticColumns( PolyType polyType, QueryResult queryResult ) { + private > StatisticColumn createNewStatisticColumns( PolyType polyType, QueryResult queryResult ) { StatisticColumn statisticColumn = null; if ( polyType.getFamily() == PolyTypeFamily.NUMERIC ) { statisticColumn = new NumericalStatisticColumn( queryResult ); } else if ( polyType.getFamily() == PolyTypeFamily.CHARACTER ) { - statisticColumn = new AlphabeticStatisticColumn( queryResult ); + statisticColumn = new AlphabeticStatisticColumn<>( queryResult ); } else if ( PolyType.DATETIME_TYPES.contains( polyType ) ) { - statisticColumn = new TemporalStatisticColumn( queryResult ); + statisticColumn = new TemporalStatisticColumn<>( queryResult ); } return statisticColumn; } - private void handleInsert( long tableId, Map> changedValues, long schemaId, Catalog catalog ) { - LogicalTable catalogTable = catalog.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); + private void handleInsert( long tableId, Map> changedValues, long schemaId, Catalog catalog ) { + LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); if ( this.statisticSchemaMap.get( schemaId ) != null ) { if ( this.statisticSchemaMap.get( schemaId ).get( tableId ) != null ) { - for ( LogicalColumn column : catalogTable.columns ) { + for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( schemaId ).getColumns( tableId ) ) { PolyType polyType = column.type; QueryResult queryResult = new QueryResult( catalogTable, column ); if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( column.id ) != null && changedValues.get( (long) column.position ) != null ) { - handleInsertColumn( tableId, changedValues, schemaId, catalogTable.columns.stream().map( c -> c.id ).collect( Collectors.toList() ), column.position, queryResult ); + handleInsertColumn( tableId, changedValues, schemaId, catalog.getSnapshot().getRelSnapshot( schemaId ).getColumns( tableId ).stream().map( c -> c.id ).collect( Collectors.toList() ), column.position, queryResult ); } else { addNewColumnStatistics( changedValues, column.position, polyType, queryResult ); } } } else { - addInserts( changedValues, catalogTable, catalogTable.columns ); + addInserts( changedValues, catalogTable, catalog.getSnapshot().getRelSnapshot( schemaId ).getColumns( tableId ) ); } } else { - addInserts( changedValues, catalogTable, catalogTable.columns ); + addInserts( changedValues, catalogTable, catalog.getSnapshot().getRelSnapshot( schemaId ).getColumns( tableId ) ); } } @@ -946,7 +945,7 @@ private void handleInsert( long tableId, Map> changedValues, /** * Creates new StatisticColumns and inserts the values. */ - private void addInserts( Map> changedValues, LogicalTable catalogTable, ImmutableList columns ) { + private void addInserts( Map> changedValues, LogicalTable catalogTable, List columns ) { for ( LogicalColumn column : columns ) { QueryResult queryResult = new QueryResult( catalogTable, column ); addNewColumnStatistics( changedValues, column.position, column.type, queryResult ); @@ -954,7 +953,7 @@ private void addInserts( Map> changedValues, LogicalTable cat } - private void addNewColumnStatistics( Map> changedValues, long i, PolyType polyType, QueryResult queryResult ) { + private void addNewColumnStatistics( Map> changedValues, long i, PolyType polyType, QueryResult queryResult ) { StatisticColumn statisticColumn = createNewStatisticColumns( polyType, queryResult ); if ( statisticColumn != null ) { statisticColumn.insert( (List) changedValues.get( i ) ); @@ -963,7 +962,7 @@ private void addNewColumnStatistics( Map> changedValues, long } - private void handleInsertColumn( long tableId, Map> changedValues, long schemaId, List columns, int i, QueryResult queryResult ) { + private void handleInsertColumn( long tableId, Map> changedValues, long schemaId, List columns, int i, QueryResult queryResult ) { StatisticColumn statisticColumn = this.statisticSchemaMap.get( schemaId ).get( tableId ).get( columns.get( i ) ); statisticColumn.insert( (List) changedValues.get( (long) i ) ); put( queryResult, statisticColumn ); @@ -975,7 +974,7 @@ private void handleInsertColumn( long tableId, Map> changedVa */ @Override public void deleteTableToUpdate( long tableId, long schemaId ) { - if ( statisticSchemaMap.containsKey( schemaId ) && statisticSchemaMap.get( schemaId ).containsKey( tableId ) ) { + if ( statisticSchemaMap.containsKey( schemaId ) ) { statisticSchemaMap.get( schemaId ).remove( tableId ); } tableStatistic.remove( tableId ); @@ -1093,7 +1092,7 @@ private synchronized void updateCalls( long tableId, String kind, TableCalls cal if ( tableStatistic.containsKey( tableId ) ) { statisticTable = tableStatistic.remove( tableId ); } else { - statisticTable = new StatisticTable( tableId ); + statisticTable = new StatisticTable<>( tableId ); } switch ( kind ) { @@ -1184,11 +1183,11 @@ public > Object getTableStatistic( long schemaId, long t numericInfo.add( (NumericalStatisticColumn) v ); statisticTable.setNumericalColumn( numericInfo ); } else if ( v.getType().getFamily() == PolyTypeFamily.CHARACTER ) { - alphabeticInfo.add( (AlphabeticStatisticColumn) v ); - statisticTable.setAlphabeticColumn( alphabeticInfo ); + alphabeticInfo.add( (AlphabeticStatisticColumn) v ); + statisticTable.setAlphabeticColumn( (List) alphabeticInfo ); } else if ( PolyType.DATETIME_TYPES.contains( Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ).getColumn( k ).type ) ) { - temporalInfo.add( (TemporalStatisticColumn) v ); - statisticTable.setTemporalColumn( temporalInfo ); + temporalInfo.add( (TemporalStatisticColumn) v ); + statisticTable.setTemporalColumn( (List) temporalInfo ); } } ); return statisticTable; diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java index d282e9ac13..f5b0f39111 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java @@ -20,7 +20,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.cql.exception.UnknownIndexException; @@ -45,15 +44,11 @@ public TableIndex( final LogicalTable catalogTable, final String schemaName, fin public static TableIndex createIndex( String schemaName, String tableName ) throws UnknownIndexException { - try { - log.debug( "Creating TableIndex." ); - Catalog catalog = Catalog.getInstance(); - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schemaName ); - LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( tableName ); - return new TableIndex( table, schemaName, tableName ); - } catch ( UnknownTableException e ) { - throw new UnknownIndexException( "Cannot find a underlying table for the specified table name: " + schemaName + "." + tableName + "." ); - } + log.debug( "Creating TableIndex." ); + Catalog catalog = Catalog.getInstance(); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schemaName ); + LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( tableName ); + return new TableIndex( table, schemaName, tableName ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java index 89838c4521..ba68864266 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java @@ -40,6 +40,8 @@ public class IdBuilder { AtomicLong interfaceId; + AtomicLong constraintId; + private static IdBuilder INSTANCE; @@ -63,6 +65,7 @@ private IdBuilder() { new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), + new AtomicLong( 0 ), new AtomicLong( 0 ) ); } @@ -78,7 +81,8 @@ public IdBuilder( AtomicLong indexId, AtomicLong keyId, AtomicLong adapterId, - AtomicLong interfaceId ) { + AtomicLong interfaceId, + AtomicLong constraintId ) { this.snapshotId = snapshotId; this.databaseId = databaseId; @@ -90,6 +94,7 @@ public IdBuilder( this.keyId = keyId; this.userId = userId; this.allocId = allocId; + this.constraintId = constraintId; this.adapterId = adapterId; this.interfaceId = interfaceId; @@ -150,4 +155,9 @@ public long getNewInterfaceId() { return interfaceId.getAndIncrement(); } + + public long getNewConstraintId() { + return constraintId.getAndIncrement(); + } + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index e6631feef4..34972e1cb0 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -27,7 +27,6 @@ import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.allocation.PolyAllocDocCatalog; import org.polypheny.db.catalog.allocation.PolyAllocGraphCatalog; @@ -46,9 +45,7 @@ import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -166,20 +163,23 @@ private void insertDefaultData() throws UnknownAdapterException { addAdapter( "hsqldb", defaultStore.getAdapterName(), AdapterType.STORE, defaultStore.getDefaultSettings() ); // Deploy default CSV view - long adapter = addAdapter( "hr", defaultSource.getAdapterName(), AdapterType.SOURCE, defaultSource.getDefaultSettings() ); + addAdapter( "hr", defaultSource.getAdapterName(), AdapterType.SOURCE, defaultSource.getDefaultSettings() ); // init schema - CatalogAdapter csv = getSnapshot().getAdapter( "hr" ); - long id = getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); + getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); - id = getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); + getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); - id = getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); + getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); + + getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); + + updateSnapshot(); - id = getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); try { - addDefaultCsvColumns( csv ); + CatalogAdapter csv = getSnapshot().getAdapter( "hr" ); + addDefaultCsvColumns( csv, namespaceId ); } catch ( UnknownTableException | GenericCatalogException | UnknownColumnException e ) { throw new RuntimeException( e ); } @@ -195,21 +195,20 @@ private void insertDefaultData() throws UnknownAdapterException { /** * Initiates default columns for csv files */ - private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownTableException, GenericCatalogException, UnknownColumnException, UnknownTableException, UnknownColumnException, GenericCatalogException { - LogicalNamespace schema = getSnapshot().getNamespace( "public" ); - LogicalTable depts = getSnapshot().getRelSnapshot( schema.id ).getTable( "depts" ); + private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) throws UnknownTableException, UnknownColumnException, GenericCatalogException { + LogicalTable depts = getSnapshot().getRelSnapshot( namespaceId ).getTable( "depts" ); addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - LogicalTable emps = getSnapshot().getRelSnapshot( schema.id ).getTable( "emps" ); + LogicalTable emps = getSnapshot().getRelSnapshot( namespaceId ).getTable( "emps" ); addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); - LogicalTable emp = getSnapshot().getRelSnapshot( schema.id ).getTable( "emp" ); + LogicalTable emp = getSnapshot().getRelSnapshot( namespaceId ).getTable( "emp" ); addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); @@ -221,7 +220,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownTableExcep addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); - LogicalTable work = getSnapshot().getRelSnapshot( schema.id ).getTable( "work" ); + LogicalTable work = getSnapshot().getRelSnapshot( namespaceId ).getTable( "work" ); addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); @@ -232,26 +231,28 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownTableExcep addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); + updateSnapshot(); + // set all needed primary keys - getLogicalRel( schema.id ).addPrimaryKey( depts.id, Collections.singletonList( getSnapshot().getRelSnapshot( schema.id ).getColumn( depts.id, "deptno" ).id ) ); - getLogicalRel( schema.id ).addPrimaryKey( emps.id, Collections.singletonList( getSnapshot().getRelSnapshot( schema.id ).getColumn( emps.id, "empid" ).id ) ); - getLogicalRel( schema.id ).addPrimaryKey( emp.id, Collections.singletonList( getSnapshot().getRelSnapshot( schema.id ).getColumn( emp.id, "employeeno" ).id ) ); - getLogicalRel( schema.id ).addPrimaryKey( work.id, Collections.singletonList( getSnapshot().getRelSnapshot( schema.id ).getColumn( work.id, "employeeno" ).id ) ); + getLogicalRel( namespaceId ).addPrimaryKey( depts.id, Collections.singletonList( getSnapshot().getRelSnapshot( namespaceId ).getColumn( depts.id, "deptno" ).id ) ); + getLogicalRel( namespaceId ).addPrimaryKey( emps.id, Collections.singletonList( getSnapshot().getRelSnapshot( namespaceId ).getColumn( emps.id, "empid" ).id ) ); + getLogicalRel( namespaceId ).addPrimaryKey( emp.id, Collections.singletonList( getSnapshot().getRelSnapshot( namespaceId ).getColumn( emp.id, "employeeno" ).id ) ); + getLogicalRel( namespaceId ).addPrimaryKey( work.id, Collections.singletonList( getSnapshot().getRelSnapshot( namespaceId ).getColumn( work.id, "employeeno" ).id ) ); // set foreign keys - getLogicalRel( schema.id ).addForeignKey( + getLogicalRel( namespaceId ).addForeignKey( emps.id, - ImmutableList.of( getSnapshot().getRelSnapshot( schema.id ).getColumn( emps.id, "deptno" ).id ), + ImmutableList.of( getSnapshot().getRelSnapshot( namespaceId ).getColumn( emps.id, "deptno" ).id ), depts.id, - ImmutableList.of( getSnapshot().getRelSnapshot( schema.id ).getColumn( depts.id, "deptno" ).id ), + ImmutableList.of( getSnapshot().getRelSnapshot( namespaceId ).getColumn( depts.id, "deptno" ).id ), "fk_emps_depts", ForeignKeyOption.NONE, ForeignKeyOption.NONE ); - getLogicalRel( schema.id ).addForeignKey( + getLogicalRel( namespaceId ).addForeignKey( work.id, - ImmutableList.of( getSnapshot().getRelSnapshot( schema.id ).getColumn( work.id, "employeeno" ).id ), + ImmutableList.of( getSnapshot().getRelSnapshot( namespaceId ).getColumn( work.id, "employeeno" ).id ), emp.id, - ImmutableList.of( getSnapshot().getRelSnapshot( schema.id ).getColumn( emp.id, "employeeno" ).id ), + ImmutableList.of( getSnapshot().getRelSnapshot( namespaceId ).getColumn( emp.id, "employeeno" ).id ), "fk_work_emp", ForeignKeyOption.NONE, ForeignKeyOption.NONE ); @@ -306,7 +307,7 @@ public void rollback() { private void validateNamespaceType( long id, NamespaceType type ) { - if ( getSnapshot().getNamespace( id ).namespaceType != type ) { + if ( logicalCatalogs.get( id ).getLogicalNamespace().namespaceType != type ) { throw new RuntimeException( "error while retrieving catalog" ); } } @@ -354,52 +355,12 @@ public AllocationGraphCatalog getAllocGraph( long namespaceId ) { } - @Override - public LogicalEntity getLogicalEntity( String entityName ) { - throw new NotImplementedException(); - - /*for ( LogicalCatalog catalog : logicalCatalogs.values() ) { - LogicalEntity entity = catalog.getEntity( entityName ); - if ( entity != null ) { - return entity; - } - } - return null;*/ - } - - - @Override - public LogicalEntity getLogicalEntity( long id ) { - throw new NotImplementedException(); - /*for ( LogicalCatalog catalog : logicalCatalogs.values() ) { - LogicalEntity entity = catalog.getEntity( id ); - if ( entity != null ) { - return entity; - } - } - return null;*/ - } - - @Override public PhysicalCatalog getPhysical( long namespaceId ) { return physicalCatalogs.get( namespaceId ); } - // move to Snapshot - @Override - public PhysicalEntity getPhysicalEntity( long id ) { - for ( PhysicalCatalog catalog : physicalCatalogs.values() ) { - PhysicalEntity entity = catalog.getPhysicalEntity( id ); - if ( entity != null ) { - return entity; - } - } - return null; - } - - @Override @Deprecated public Map getNodeInfo() { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 15568264aa..10aab5c6ae 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -16,19 +16,25 @@ package org.polypheny.db.catalog.logical; +import com.google.common.collect.ImmutableList; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.beans.PropertyChangeSupport; +import java.sql.Timestamp; +import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; import lombok.Builder; import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; +import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; @@ -36,9 +42,14 @@ import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; +import org.polypheny.db.catalog.entity.CatalogConstraint; +import org.polypheny.db.catalog.entity.CatalogDefaultValue; +import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; +import org.polypheny.db.catalog.entity.CatalogMaterializedView; +import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogView; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; @@ -46,12 +57,14 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.IndexType; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.type.PolyType; +// todo dl add object not null, synchronize @Value @SuperBuilder(toBuilder = true) public class RelationalCatalog implements Serializable, LogicalRelationalCatalog { @@ -79,10 +92,15 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog @Getter public Map keys; + @Serialize @Getter public Map keyColumns; + @Serialize + @Getter + public Map constraints; + public IdBuilder idBuilder = IdBuilder.getInstance(); @@ -91,6 +109,7 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog boolean openChanges = false; PropertyChangeSupport listeners = new PropertyChangeSupport( this ); + List tablesFlaggedForDeletion = new ArrayList<>(); public RelationalCatalog( @@ -99,7 +118,8 @@ public RelationalCatalog( @Deserialize("columns") Map columns, @Deserialize("indexes") Map indexes, @Deserialize("keys") Map keys, - @Deserialize("keyColumns") Map keyColumns ) { + @Deserialize("keyColumns") Map keyColumns, + @Deserialize("constraints") Map constraints ) { this.logicalNamespace = logicalNamespace; this.tables = tables; @@ -107,13 +127,12 @@ public RelationalCatalog( this.indexes = indexes; this.keys = keys; this.keyColumns = keyColumns; - - + this.constraints = constraints; } public RelationalCatalog( LogicalNamespace namespace ) { - this( namespace, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); + this( namespace, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); } @@ -145,36 +164,33 @@ public long addTable( String name, EntityType entityType, boolean modifiable ) { @Override public long addView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { - return 0; + throw new NotImplementedException(); } @Override public long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { - return 0; + throw new NotImplementedException(); } @Override public void renameTable( long tableId, String name ) { - + tables.put( tableId, tables.get( tableId ).toBuilder().name( name ).build() ); } @Override public void deleteTable( long tableId ) { - - } - - - @Override - public void setTableOwner( long tableId, long ownerId ) { - + tables.remove( tableId ); } @Override public void setPrimaryKey( long tableId, Long keyId ) { + tables.put( tableId, tables.get( tableId ).toBuilder().primaryKey( keyId ).build() ); + + keys.put( keyId, new CatalogPrimaryKey( keys.get( keyId ) ) ); } @@ -236,13 +252,13 @@ private long addKey( long tableId, List columnIds, EnforcementTime enforce @Override public void setIndexPhysicalName( long indexId, String physicalName ) { - + indexes.put( indexId, indexes.get( indexId ).toBuilder().physicalName( physicalName ).build() ); } @Override public void deleteIndex( long indexId ) { - + indexes.remove( indexId ); } @@ -258,109 +274,295 @@ public long addColumn( String name, long tableId, int position, PolyType type, P @Override public void renameColumn( long columnId, String name ) { - + columns.put( columnId, columns.get( columnId ).toBuilder().name( name ).build() ); } @Override public void setColumnPosition( long columnId, int position ) { - + columns.put( columnId, columns.get( columnId ).toBuilder().position( position ).build() ); } @Override - public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ) throws GenericCatalogException { + public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality ) throws GenericCatalogException { + if ( scale != null && scale > length ) { + throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); + } + columns.put( columnId, columns.get( columnId ).toBuilder().type( type ).length( length ).scale( scale ).dimension( dimension ).cardinality( cardinality ).build() ); } @Override public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { - + columns.put( columnId, columns.get( columnId ).toBuilder().nullable( nullable ).build() ); } @Override public void setCollation( long columnId, Collation collation ) { - + columns.put( columnId, columns.get( columnId ).toBuilder().collation( collation ).build() ); } @Override public void deleteColumn( long columnId ) { - + columns.remove( columnId ); } @Override public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { - + columns.put( columnId, columns.get( columnId ).toBuilder().type( type ).defaultValue( new CatalogDefaultValue( columnId, type, defaultValue, "defaultValue" ) ).build() ); } @Override public void deleteDefaultValue( long columnId ) { - + columns.put( columnId, columns.get( columnId ).toBuilder().defaultValue( null ).build() ); } @Override public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { + if ( columnIds.stream().anyMatch( id -> columns.get( id ).nullable ) ) { + throw new GenericCatalogException( "Primary key is not allowed to use nullable columns." ); + } + + // TODO: Check if the current values are unique + + // Check if there is already a primary key defined for this table and if so, delete it. + LogicalTable table = tables.get( tableId ); + + if ( table.primaryKey != null ) { + // CatalogCombinedKey combinedKey = getCombinedKey( table.primaryKey ); + if ( getKeyUniqueCount( table.primaryKey ) == 1 && isForeignKey( table.primaryKey ) ) { + // This primary key is the only constraint for the uniqueness of this key. + throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key, first drop the foreign keys or create a unique constraint." ); + } + synchronized ( this ) { + setPrimaryKey( tableId, null ); + deleteKeyIfNoLongerUsed( table.primaryKey ); + } + } + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); + setPrimaryKey( tableId, keyId ); + } + + + private boolean isForeignKey( long key ) { + return keys.values().stream().filter( k -> k instanceof CatalogForeignKey ).map( k -> (CatalogForeignKey) k ).anyMatch( k -> k.referencedKeyId == key ); + } + + + private boolean isPrimaryKey( long key ) { + return keys.values().stream().filter( k -> k instanceof CatalogPrimaryKey ).map( k -> (CatalogPrimaryKey) k ).anyMatch( k -> k.id == key ); + } + + + /** + * Check if the specified key is used as primary key, index or constraint. If so, this is a NoOp. If it is not used, the key is deleted. + */ + private void deleteKeyIfNoLongerUsed( Long keyId ) { + if ( keyId == null ) { + return; + } + CatalogKey key = keys.get( keyId ); + LogicalTable table = tables.get( key.tableId ); + if ( table.primaryKey != null && table.primaryKey.equals( keyId ) ) { + return; + } + if ( constraints.values().stream().anyMatch( c -> c.keyId == keyId ) ) { + return; + } + if ( keys.values().stream().filter( k -> k instanceof CatalogForeignKey ).anyMatch( f -> f.id == keyId ) ) { + return; + } + if ( indexes.values().stream().anyMatch( i -> i.keyId == keyId ) ) { + return; + } + synchronized ( this ) { + keys.remove( keyId ); + keyColumns.remove( key.columnIds.stream().mapToLong( Long::longValue ).toArray() ); + } + listeners.firePropertyChange( "key", key, null ); + } + + + private int getKeyUniqueCount( long keyId ) { + CatalogKey key = keys.get( keyId ); + int count = 0; + if ( isPrimaryKey( keyId ) ) { + count++; + } + + for ( CatalogConstraint constraint : constraints.values().stream().filter( c -> c.keyId == keyId ).collect( Collectors.toList() ) ) { + if ( constraint.type == ConstraintType.UNIQUE ) { + count++; + } + } + + for ( CatalogIndex index : indexes.values().stream().filter( i -> i.keyId == keyId ).collect( Collectors.toList() ) ) { + if ( index.unique ) { + count++; + } + } + return count; } @Override public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { + LogicalTable table = tables.get( tableId ); + List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); + + for ( CatalogKey refKey : childKeys ) { + if ( refKey.columnIds.size() == referencesIds.size() && refKey.columnIds.containsAll( referencesIds ) && new HashSet<>( referencesIds ).containsAll( refKey.columnIds ) ) { + + int i = 0; + for ( long referencedColumnId : refKey.columnIds ) { + LogicalColumn referencingColumn = columns.get( columnIds.get( i++ ) ); + LogicalColumn referencedColumn = columns.get( referencedColumnId ); + if ( referencedColumn.type != referencingColumn.type ) { + throw new GenericCatalogException( "The data type of the referenced columns does not match the data type of the referencing column: " + referencingColumn.type.name() + " != " + referencedColumn.type ); + } + } + // TODO same keys for key and foreign key + if ( getKeyUniqueCount( refKey.id ) > 0 ) { + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); + CatalogForeignKey key = new CatalogForeignKey( + keyId, + constraintName, + tableId, + table.namespaceId, + refKey.id, + refKey.tableId, + refKey.namespaceId, + columnIds, + referencesIds, + onUpdate, + onDelete ); + synchronized ( this ) { + keys.put( keyId, key ); + } + return; + } + } + } } @Override public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { - + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); + // Check if there is already a unique constraint + List catalogConstraints = constraints.values().stream() + .filter( c -> c.keyId == keyId && c.type == ConstraintType.UNIQUE ) + .collect( Collectors.toList() ); + if ( catalogConstraints.size() > 0 ) { + throw new GenericCatalogException( "There is already a unique constraint!" ); + } + long id = idBuilder.getNewConstraintId(); + synchronized ( this ) { + constraints.put( id, new CatalogConstraint( id, keyId, ConstraintType.UNIQUE, constraintName, Objects.requireNonNull( keys.get( keyId ) ) ) ); + } } @Override public void deletePrimaryKey( long tableId ) throws GenericCatalogException { + LogicalTable table = tables.get( tableId ); + + // TODO: Check if the currently stored values are unique + if ( table.primaryKey != null ) { + // Check if this primary key is required to maintain to uniqueness + // CatalogCombinedKey key = getCombinedKey( table.primaryKey ); + if ( isForeignKey( table.primaryKey ) ) { + if ( getKeyUniqueCount( table.primaryKey ) < 2 ) { + throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key either drop the foreign key or create a unique constraint." ); + } + } + setPrimaryKey( tableId, null ); + deleteKeyIfNoLongerUsed( table.primaryKey ); + } } @Override public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { - + CatalogForeignKey catalogForeignKey = (CatalogForeignKey) keys.get( foreignKeyId ); + synchronized ( this ) { + keys.remove( catalogForeignKey.id ); + deleteKeyIfNoLongerUsed( catalogForeignKey.id ); + } } @Override public void deleteConstraint( long constraintId ) throws GenericCatalogException { + CatalogConstraint catalogConstraint = Objects.requireNonNull( constraints.get( constraintId ) ); + //CatalogCombinedKey key = getCombinedKey( catalogConstraint.keyId ); + if ( catalogConstraint.type == ConstraintType.UNIQUE && isForeignKey( catalogConstraint.keyId ) ) { + if ( getKeyUniqueCount( catalogConstraint.keyId ) < 2 ) { + throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. Unable to drop unique constraint." ); + } + } + synchronized ( this ) { + constraints.remove( catalogConstraint.id ); + } + deleteKeyIfNoLongerUsed( catalogConstraint.keyId ); } @Override public void deleteViewDependencies( CatalogView catalogView ) { + for ( long id : catalogView.getUnderlyingTables().keySet() ) { + LogicalTable old = tables.get( id ); + List connectedViews = old.connectedViews.stream().filter( e -> e != catalogView.id ).collect( Collectors.toList() ); + + LogicalTable table = old.toBuilder().connectedViews( ImmutableList.copyOf( connectedViews ) ).build(); + synchronized ( this ) { + tables.put( id, table ); + } + listeners.firePropertyChange( "table", old, table ); + } } @Override public void updateMaterializedViewRefreshTime( long materializedViewId ) { + CatalogMaterializedView old = (CatalogMaterializedView) tables.get( materializedViewId ); + + MaterializedCriteria materializedCriteria = old.getMaterializedCriteria(); + materializedCriteria.setLastUpdate( new Timestamp( System.currentTimeMillis() ) ); + + synchronized ( this ) { + tables.put( materializedViewId, old.toBuilder().materializedCriteria( materializedCriteria ).build() ); + } + } @Override public void flagTableForDeletion( long tableId, boolean flag ) { - + if ( flag && !tablesFlaggedForDeletion.contains( tableId ) ) { + tablesFlaggedForDeletion.add( tableId ); + } else if ( !flag && tablesFlaggedForDeletion.contains( tableId ) ) { + tablesFlaggedForDeletion.remove( tableId ); + } } @Override public boolean isTableFlaggedForDeletion( long tableId ) { - return false; + return tablesFlaggedForDeletion.contains( tableId ); } } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 6c941399db..139c9ec202 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -260,17 +260,12 @@ LogicalTable parseCatalogTableName( String tableName ) throws ParserException { throw new ParserException( ParserErrorCode.TABLE_LIST_MALFORMED_TABLE, tableName ); } - try { - LogicalNamespace namespace = snapshop.getNamespace( tableElements[0] ); - LogicalTable table = snapshop.getRelSnapshot( namespace.id ).getTable( tableElements[1] ); - if ( log.isDebugEnabled() ) { - log.debug( "Finished parsing table \"{}\".", tableName ); - } - return table; - } catch ( UnknownTableException e ) { - log.error( "Unable to fetch table: {}.", tableName, e ); - throw new ParserException( ParserErrorCode.TABLE_LIST_UNKNOWN_TABLE, tableName ); + LogicalNamespace namespace = snapshop.getNamespace( tableElements[0] ); + LogicalTable table = snapshop.getRelSnapshot( namespace.id ).getTable( tableElements[1] ); + if ( log.isDebugEnabled() ) { + log.debug( "Finished parsing table \"{}\".", tableName ); } + return table; } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 7e37e39367..221921c1ef 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -50,7 +50,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; @@ -159,11 +158,7 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi Snapshot snapshot = statement.getTransaction().getSnapshot(); LogicalNamespace namespace = snapshot.getNamespace( resourcePatchRequest.tables.get( 0 ).getNamespaceName() ); LogicalTable table = null; - try { - table = snapshot.getRelSnapshot( namespace.id ).getTable( resourcePatchRequest.tables.get( 0 ).name ); - } catch ( UnknownTableException e ) { - throw new RuntimeException( e ); - } + table = snapshot.getRelSnapshot( namespace.id ).getTable( resourcePatchRequest.tables.get( 0 ).name ); // Table Scans algBuilder = this.tableScans( algBuilder, rexBuilder, resourcePatchRequest.tables ); @@ -267,11 +262,7 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, private static LogicalTable getLogicalTable( Snapshot snapshot, String namespaceName, String tableName ) { LogicalNamespace namespace = snapshot.getNamespace( namespaceName ); LogicalTable table; - try { - table = snapshot.getRelSnapshot( namespace.id ).getTable( tableName ); - } catch ( UnknownTableException e ) { - throw new RuntimeException( e ); - } + table = snapshot.getRelSnapshot( namespace.id ).getTable( tableName ); return table; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index b7a2cc8adc..5e796d02ca 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -38,7 +38,6 @@ import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; @@ -371,11 +370,7 @@ private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tab schemaId = snapshot.getNamespace( transaction.getDefaultSchema().name ).id; tableOldName = tableName.names.get( 0 ); } - try { - catalogTable = snapshot.getRelSnapshot( schemaId ).getTable( tableOldName ); - } catch ( UnknownTableException e ) { - throw new RuntimeException( e ); - } + catalogTable = snapshot.getRelSnapshot( schemaId ).getTable( tableOldName ); return catalogTable; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 0e01535c96..05187acc89 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -29,7 +29,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Operator; @@ -80,11 +79,7 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName schemaId = snapshot.getNamespace( context.getDefaultSchemaName() ).id; tableOldName = tableName.names.get( 0 ); } - try { - catalogTable = snapshot.getRelSnapshot( schemaId ).getTable( tableOldName ); - } catch ( UnknownTableException e ) { - throw new RuntimeException( e ); - } + catalogTable = snapshot.getRelSnapshot( schemaId ).getTable( tableOldName ); return catalogTable; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java index 31fb76fca0..adf7f2a570 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java @@ -608,7 +608,7 @@ public static SqlLiteral symbol( Enum o, ParserPos parserPos ) { public static AlgDataType getNamedType( Identifier node, Snapshot snapshot ) { LogicalNamespace namespace = snapshot.getNamespace( node.getNames().get( 0 ) ); - LogicalTable table = snapshot.getRelSnapshot( namespace.id ).getLogicalTable( node.getNames().get( 1 ) ); + LogicalTable table = snapshot.getRelSnapshot( namespace.id ).getTable( node.getNames().get( 1 ) ); if ( table != null ) { return table.getRowType(); } else { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 93cc996c8e..8d154f4213 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -209,7 +209,7 @@ public void execute( Context context, Statement statement, QueryParameters param String tableName; long schemaId; - // Cannot use getLogicalTable() here since table does not yet exist + // Cannot use getTable() here since table does not yet exist if ( name.names.size() == 2 ) { // SchemaName.TableName schemaId = snapshot.getNamespace( name.names.get( 0 ) ).id; tableName = name.names.get( 1 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index 29b44e13b1..a7826fdd04 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -83,7 +83,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, @Override public SqlValidatorNamespace getTableNamespace( List names ) { LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); - CatalogEntity table = validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ); + CatalogEntity table = validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 1 ) ); return table != null ? new EntityNamespace( validator, table ) : null; @@ -96,9 +96,9 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path // Look in the default schema, then default catalog, then root schema. LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); - LogicalTable table = validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ); + LogicalTable table = validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 1 ) ); if ( table != null ) { - resolves.add( new Resolve( validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ) ) ); + resolves.add( new Resolve( validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 1 ) ) ) ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java index bfd4f48c82..93651379e2 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java @@ -95,14 +95,14 @@ private ScopeChild findChild( List names, NameMatcher nameMatcher ) { } // Look up the 2 tables independently, in case one is qualified with catalog & schema and the other is not. - /*final ValidatorTable table = child.namespace.getLogicalTable(); + /*final ValidatorTable table = child.namespace.getTable(); if ( table != null ) { final ResolvedImpl resolved = new ResolvedImpl(); resolveTable( names, nameMatcher, Path.EMPTY, resolved ); if ( resolved.count() == 1 && resolved.only().remainingNames.isEmpty() && resolved.only().namespace instanceof TableNamespace - && resolved.only().namespace.getLogicalTable().getQualifiedName().equals( table.getQualifiedName() ) ) { + && resolved.only().namespace.getTable().getQualifiedName().equals( table.getQualifiedName() ) ) { return child; } }*/ diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index b3fb106ba9..311a319a84 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -3391,7 +3391,7 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc private @Nullable CatalogEntity findTable( String tableName, boolean caseSensitive ) { - return snapshot.getNamespaces( null ).stream().map( n -> snapshot.getRelSnapshot( n.id ).getLogicalTable( tableName ) ).filter( Objects::isNull ).findFirst().orElse( null ); + return snapshot.getNamespaces( null ).stream().map( n -> snapshot.getRelSnapshot( n.id ).getTable( tableName ) ).filter( Objects::isNull ).findFirst().orElse( null ); } @@ -3656,7 +3656,7 @@ public void validateSequenceValue( SqlValidatorScope scope, SqlIdentifier id ) { // We've found a table. But is it a sequence? //final SqlValidatorNamespace ns = resolved.only().namespace; /*if ( ns instanceof TableNamespace ) { - final Entity entity = ns.getLogicalTable().unwrap( Entity.class ); + final Entity entity = ns.getTable().unwrap( Entity.class ); switch ( entity.getJdbcTableType() ) { case SEQUENCE: case TEMPORARY_SEQUENCE: diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 1f0e8de3a7..0902eec998 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -96,7 +96,7 @@ public static CatalogEntity getLogicalEntity( SqlValidatorNamespace namespace, S final SqlValidatorNamespace resolvedNamespace = dmlNamespace.resolve(); if ( resolvedNamespace.isWrapperFor( TableNamespace.class ) ) { final TableNamespace tableNamespace = resolvedNamespace.unwrap( TableNamespace.class ); - final ValidatorTable validatorTable = tableNamespace.getLogicalTable(); + final ValidatorTable validatorTable = tableNamespace.getTable(); final AlgDataTypeFactory typeFactory = AlgDataTypeFactory.DEFAULT; final List extendedFields = dmlNamespace.extendList == null @@ -620,7 +620,7 @@ public static boolean isTableRelational( SqlValidatorImpl validator ) { } SqlIdentifier id = ((SqlIdentifier) validator.getTableScope().getNode()); return validator.snapshot.getNamespace( id.names.get( 0 ) ).namespaceType == NamespaceType.RELATIONAL; - /*LogicalGraph graph = validator.snapshot.getGraphSnapshot( namespace.id ).getLogicalTable( names.get( 1 ) ); + /*LogicalGraph graph = validator.snapshot.getGraphSnapshot( namespace.id ).getTable( names.get( 1 ) ); if ( graph != null ) { return false; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java index 7e0a3f93dd..c712b74d53 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java @@ -71,7 +71,7 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path //final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); //final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); - CatalogEntity entity = validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 0 ) ); + CatalogEntity entity = validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 0 ) ); resolved.found( entity ); return; } @@ -85,7 +85,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); final Step path = Path.EMPTY.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); - CatalogEntity entity = validator.snapshot.getRelSnapshot( namespace.id ).getLogicalTable( names.get( 0 ) ); + CatalogEntity entity = validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 0 ) ); resolved.found( entity ); return; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index 575f0ea611..a930067e01 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -2160,8 +2160,8 @@ protected void convertCollectionTable( Blackboard bb, SqlCall call ) { final SqlCallBinding callBinding = new SqlCallBinding( bb.scope.getValidator(), bb.scope, call ); if ( operator instanceof SqlUserDefinedTableMacro ) { final SqlUserDefinedTableMacro udf = (SqlUserDefinedTableMacro) operator; - //final TranslatableEntity table = udf.getLogicalTable( typeFactory, callBinding.sqlOperands() ); - //final LogicalTable catalogTable = Catalog.getInstance().getLogicalTable( table.getId() ); + //final TranslatableEntity table = udf.getTable( typeFactory, callBinding.sqlOperands() ); + //final LogicalTable catalogTable = Catalog.getInstance().getTable( table.getId() ); //final AlgDataType rowType = table.getRowType( typeFactory ); //AlgOptEntity algOptEntity = AlgOptEntityImpl.create( null, rowType, table, catalogTable, null ); //AlgNode converted = toAlg( algOptEntity ); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 6f31b75720..079d7f42c2 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -302,17 +302,12 @@ Result getTable( final UIRequest request ) { // determine if it is a view or a table LogicalTable catalogTable; - try { - catalogTable = catalog.getSnapshot().getRelSnapshot( catalog.getSnapshot().getNamespace( t[0] ).id ).getTable( t[1] ); - result.setNamespaceType( catalogTable.getNamespaceType() ); - if ( catalogTable.modifiable ) { - result.setType( ResultType.TABLE ); - } else { - result.setType( ResultType.VIEW ); - } - } catch ( UnknownTableException e ) { - log.error( "Caught exception", e ); - return result.setError( "Could not retrieve type of Result (table/view)." ); + catalogTable = catalog.getSnapshot().getRelSnapshot( catalog.getSnapshot().getNamespace( t[0] ).id ).getTable( t[1] ); + result.setNamespaceType( catalogTable.getNamespaceType() ); + if ( catalogTable.modifiable ) { + result.setType( ResultType.TABLE ); + } else { + result.setType( ResultType.VIEW ); } //get headers with default values @@ -658,7 +653,9 @@ void insertRow( final Context ctx ) { StringJoiner columns = new StringJoiner( ",", "(", ")" ); StringJoiner values = new StringJoiner( ",", "(", ")" ); - List logicalColumns = catalog.getSnapshot().getRelSnapshot( catalog.getLogicalEntity( tableId ).namespaceId ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + String finalTableId = tableId; + LogicalTable table = catalog.getSnapshot().getNamespaces( null ).stream().map( n -> catalog.getSnapshot().getRelSnapshot( n.id ).getTable( finalTableId ) ).findFirst().orElse( null ); + List logicalColumns = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); try { int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -948,8 +945,11 @@ public static String uiValueToSql( final String value, final PolyType type, fina private String computeWherePK( final String tableName, final String columnName, final Map filter ) { StringJoiner joiner = new StringJoiner( " AND ", "", "" ); Map catalogColumns = getCatalogColumns( tableName, columnName ); - LogicalTable catalogTable; - catalogTable = catalog.getLogicalEntity( tableName ).unwrap( LogicalTable.class ); + if ( catalogColumns.isEmpty() ) { + throw new RuntimeException(); + } + + LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( catalogColumns.values().iterator().next().namespaceId ).getTable( tableName ); CatalogPrimaryKey pk = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( long colId : pk.columnIds ) { String colName = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( colId ).name; @@ -1120,44 +1120,38 @@ void getColumns( final Context ctx ) { String[] t = request.tableId.split( "\\." ); ArrayList cols = new ArrayList<>(); - try { - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); - LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); - ArrayList primaryColumns; - if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( namespace.id ).getPrimaryKey( catalogTable.primaryKey ); - primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); - } else { - primaryColumns = new ArrayList<>(); - } - for ( LogicalColumn logicalColumn : catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumns( catalogTable.id ) ) { - String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; - String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); - cols.add( - new DbColumn( - logicalColumn.name, - logicalColumn.type.getName(), - collectionsType, - logicalColumn.nullable, - logicalColumn.length, - logicalColumn.scale, - logicalColumn.dimension, - logicalColumn.cardinality, - primaryColumns.contains( logicalColumn.name ), - defaultValue ) ); - } - result = new Result( cols.toArray( new DbColumn[0] ), null ); - if ( catalogTable.entityType == EntityType.ENTITY ) { - result.setType( ResultType.TABLE ); - } else if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW ) { - result.setType( ResultType.MATERIALIZED ); - } else { - result.setType( ResultType.VIEW ); - } - } catch ( UnknownTableException e ) { - log.error( "Caught exception while getting a column", e ); - ctx.status( 400 ).json( new Result( e ) ); - return; + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); + LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); + ArrayList primaryColumns; + if ( catalogTable.primaryKey != null ) { + CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( namespace.id ).getPrimaryKey( catalogTable.primaryKey ); + primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); + } else { + primaryColumns = new ArrayList<>(); + } + for ( LogicalColumn logicalColumn : catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumns( catalogTable.id ) ) { + String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; + String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); + cols.add( + new DbColumn( + logicalColumn.name, + logicalColumn.type.getName(), + collectionsType, + logicalColumn.nullable, + logicalColumn.length, + logicalColumn.scale, + logicalColumn.dimension, + logicalColumn.cardinality, + primaryColumns.contains( logicalColumn.name ), + defaultValue ) ); + } + result = new Result( cols.toArray( new DbColumn[0] ), null ); + if ( catalogTable.entityType == EntityType.ENTITY ) { + result.setType( ResultType.TABLE ); + } else if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW ) { + result.setType( ResultType.MATERIALIZED ); + } else { + result.setType( ResultType.VIEW ); } ctx.json( result ); @@ -3197,14 +3191,10 @@ public static Result executeSqlSelect( final Statement statement, final UIReques LogicalTable catalogTable = null; if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); - try { - LogicalNamespace namespace = crud.catalog.getSnapshot().getNamespace( t[0] ); + LogicalNamespace namespace = crud.catalog.getSnapshot().getNamespace( t[0] ); - catalogTable = crud.catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); - entityType = catalogTable.entityType; - } catch ( UnknownTableException e ) { - log.error( "Caught exception", e ); - } + catalogTable = crud.catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); + entityType = catalogTable.entityType; } ArrayList header = new ArrayList<>(); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index ff65eef988..c5e3982d46 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -42,7 +42,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownCollectionException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; @@ -191,11 +190,7 @@ public static Result getResult( QueryLanguage language, Statement statement, Que if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); - try { - catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); - } catch ( UnknownTableException e ) { - log.error( "Caught exception", e ); - } + catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); } ArrayList header = new ArrayList<>(); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java index 21d006346d..06289f3658 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java @@ -29,7 +29,6 @@ import org.apache.commons.lang3.math.NumberUtils; import org.polypheny.db.StatisticsManager; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.RuntimeConfig; @@ -87,15 +86,10 @@ public void getTableStatistics( Context ctx ) { UIRequest request = ctx.bodyAsClass( UIRequest.class ); long tableId; long schemaId; - try { - schemaId = Catalog.getInstance().getSnapshot().getNamespace( request.tableId.split( "\\." )[0] ).id; - tableId = Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ).getTable( request.tableId.split( "\\." )[1] ).id; - - ctx.json( statisticsManager.getTableStatistic( schemaId, tableId ) ); - } catch ( UnknownTableException e ) { - throw new RuntimeException( "Schema: " + request.tableId.split( "\\." )[0] + " or Table: " - + request.tableId.split( "\\." )[1] + "is unknown." ); - } + schemaId = Catalog.getInstance().getSnapshot().getNamespace( request.tableId.split( "\\." )[0] ).id; + tableId = Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ).getTable( request.tableId.split( "\\." )[1] ).id; + + ctx.json( statisticsManager.getTableStatistic( schemaId, tableId ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java index 5a5c699303..aa73302a63 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java @@ -34,7 +34,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyTypeFamily; import org.polypheny.db.webui.Crud; @@ -68,7 +67,7 @@ public String getQuery( String tableId, Statement statement, HttpServletRequest LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( split[1] ); logicalColumn = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumn( table.id, entry.getKey() ); - } catch ( UnknownColumnException | UnknownTableException e ) { + } catch ( UnknownColumnException e ) { log.error( "Could not determine column type", e ); return null; } From a533063f1ef26ec731ed4cb4a1972778cc5d7487 Mon Sep 17 00:00:00 2001 From: datomo Date: Sat, 11 Mar 2023 00:49:12 +0100 Subject: [PATCH 045/436] adjusting ddlManager --- .../db/adapter/java/ReflectiveSchema.java | 4 +- .../catalogs/AllocationDocumentCatalog.java | 7 - .../catalogs/AllocationGraphCatalog.java | 19 -- .../catalog/entity/CatalogDataPlacement.java | 2 +- .../catalog/entity/CatalogGraphPlacement.java | 34 +- .../entity/CatalogMaterializedView.java | 8 +- .../db/catalog/entity/CatalogView.java | 11 +- .../catalog/entity/logical/LogicalTable.java | 13 +- .../catalog/exceptions/CatalogException.java | 2 +- .../db/catalog/snapshot/AllocSnapshot.java | 36 +++ .../catalog/snapshot/LogicalRelSnapshot.java | 2 +- .../snapshot/impl/AllocSnapshotImpl.java | 46 +++ .../snapshot/impl/LogicalRelSnapshotImpl.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 297 ++++++++++-------- .../processing/ConstraintEnforceAttacher.java | 15 +- .../db/routing/routers/DmlRouterImpl.java | 91 +++--- .../statistics/StatisticQueryProcessor.java | 7 +- .../monitoring/statistics/StatisticTable.java | 3 +- .../org/polypheny/db/sql/language/SqlDdl.java | 7 +- .../java/org/polypheny/db/webui/Crud.java | 52 +-- .../polypheny/db/webui/crud/LanguageCrud.java | 27 +- .../models/requests/BatchUpdateRequest.java | 12 +- 22 files changed, 388 insertions(+), 309 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index 2c23838aa1..0d91620ebb 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -34,6 +34,7 @@ package org.polypheny.db.adapter.java; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.Multimap; @@ -246,8 +247,7 @@ private static class ReflectiveEntity extends LogicalTable implements ScannableE ReflectiveEntity( Type elementType, Enumerable enumerable, Long id, Long partitionId, Long adapterId ) { - //super( elementType, id, partitionId, adapterId ); - super( id, "test", -1, EntityType.ENTITY, null, false ); + super( id, "test", -1, EntityType.ENTITY, null, false, ImmutableList.of() ); this.elementType = elementType; this.enumerable = enumerable; throw new NotImplementedException(); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java index d0b78c2f54..82bc74d80d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java @@ -18,8 +18,6 @@ import java.util.List; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.PlacementType; @@ -36,14 +34,9 @@ public interface AllocationDocumentCatalog extends AllocationCatalog { */ public abstract long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException; - List getCollectionPlacementsByAdapter( long id ); - void addCollectionPlacement( long namespaceId, long adapterId, long id, PlacementType placementType ); - CatalogCollectionMapping getCollectionMapping( long id ); - void dropCollectionPlacement( long id, long adapterId ); - CatalogCollectionPlacement getCollectionPlacement( long id, long placementId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java index 8379f33ac3..81d25b9bbc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java @@ -16,9 +16,6 @@ package org.polypheny.db.catalog.catalogs; -import java.util.List; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; - public interface AllocationGraphCatalog extends AllocationCatalog { @@ -31,14 +28,6 @@ public interface AllocationGraphCatalog extends AllocationCatalog { */ public abstract long addGraphPlacement( long adapterId, long graphId ); - /** - * Gets a collection of graph placements for a given adapter. - * - * @param adapterId The id of the adapter on which the placements are placed - * @return The collection of graph placements - */ - public abstract List getGraphPlacements( long adapterId ); - /** * Deletes a specific graph placement for a given graph and adapter. * @@ -47,13 +36,5 @@ public interface AllocationGraphCatalog extends AllocationCatalog { */ public abstract void deleteGraphPlacement( long adapterId, long graphId ); - /** - * Gets a specific placement for a graph on a given adapter. - * - * @param graphId The id of the graph - * @param adapterId The id of the adapter on which the placement is placed - * @return The placement matching the conditions - */ - public abstract CatalogGraphPlacement getGraphPlacement( long graphId, long adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java index 86254bd655..e13d23dd3f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogDataPlacement.java @@ -42,7 +42,7 @@ public class CatalogDataPlacement implements CatalogObject { private static final long serialVersionUID = 5192378654968316873L; public long tableId; - public int adapterId; + public long adapterId; public PlacementType placementType; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java index 49620e3dc8..cbbc92b3da 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java @@ -16,22 +16,36 @@ package org.polypheny.db.catalog.entity; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import javax.annotation.Nullable; +import lombok.Value; +import lombok.experimental.SuperBuilder; +@Value +@SuperBuilder(toBuilder = true) public class CatalogGraphPlacement extends CatalogEntityPlacement { private static final long serialVersionUID = 5889825050034392549L; - public final int adapterId; - public final long graphId; - public final String physicalName; - public final long partitionId; - - - public CatalogGraphPlacement( int adapterId, long graphId, @Nullable String physicalName, long partitionId ) { - super( graphId, (long) adapterId, graphId ); + @Serialize + public long adapterId; + @Serialize + public long graphId; + @Serialize + public String physicalName; + @Serialize + public long partitionId; + + + public CatalogGraphPlacement( + @Deserialize("adapterId") long adapterId, + @Deserialize("graphId") long graphId, + @Deserialize("physicalName") @Nullable String physicalName, + @Deserialize("partitionId") long partitionId ) { + super( graphId, adapterId, graphId ); this.adapterId = adapterId; this.graphId = graphId; this.physicalName = physicalName; @@ -45,8 +59,4 @@ public Serializable[] getParameterArray() { } - public CatalogGraphPlacement replacePhysicalName( String physicalName ) { - return new CatalogGraphPlacement( adapterId, graphId, physicalName, partitionId ); - } - } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java index 7caf971e0a..b9bf30f458 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java @@ -16,6 +16,8 @@ package org.polypheny.db.catalog.entity; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.SuperBuilder; @@ -52,6 +54,8 @@ public CatalogMaterializedView( Long primaryKey, boolean modifiable, AlgCollation algCollation, + ImmutableList connectedViews, + ImmutableMap> underlyingTables, String language, MaterializedCriteria materializedCriteria, boolean ordered @@ -65,6 +69,8 @@ public CatalogMaterializedView( primaryKey, modifiable, algCollation, + underlyingTables, + connectedViews, language ); this.query = query; this.algCollation = algCollation; @@ -74,8 +80,6 @@ public CatalogMaterializedView( } - - @Override public AlgNode getDefinition() { return Catalog.getInstance().getNodeInfo().get( id ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index d2c29068c4..99ca24aa42 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -17,7 +17,10 @@ package org.polypheny.db.catalog.entity; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import lombok.EqualsAndHashCode; +import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; @@ -42,6 +45,8 @@ public class CatalogView extends LogicalTable { private static final long serialVersionUID = -4771308114962700515L; + @Getter + protected ImmutableMap> underlyingTables; public String language; public AlgCollation algCollation; public String query; @@ -56,6 +61,8 @@ public CatalogView( Long primaryKey, boolean modifiable, AlgCollation algCollation, + ImmutableMap> underlyingTables, + ImmutableList connectedViews, String language ) { super( id, @@ -63,9 +70,11 @@ public CatalogView( namespaceId, entityType, primaryKey, - modifiable ); + modifiable, + connectedViews ); this.query = query; this.algCollation = algCollation; + this.underlyingTables = underlyingTables; // mapdb cannot handle the class QueryLanguage, therefore we use the String here this.language = language; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 9e19c3be32..c83988eec6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -17,11 +17,13 @@ package org.polypheny.db.catalog.entity.logical; +import com.google.common.collect.ImmutableList; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.List; import lombok.EqualsAndHashCode; +import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.experimental.NonFinal; @@ -50,6 +52,9 @@ public class LogicalTable extends LogicalEntity implements Comparable connectedViews; public LogicalTable( @@ -58,11 +63,13 @@ public LogicalTable( @Deserialize("namespaceId") final long namespaceId, @Deserialize("entityType") @NonNull final EntityType type, @Deserialize("primaryKey") final Long primaryKey, - @Deserialize("modifiable") boolean modifiable ) { + @Deserialize("modifiable") boolean modifiable, + @Deserialize("connectedViews") ImmutableList connectedViews ) { super( id, name, namespaceId, type, NamespaceType.RELATIONAL ); this.primaryKey = primaryKey; this.modifiable = modifiable; + this.connectedViews = connectedViews; if ( type == EntityType.ENTITY && !modifiable ) { throw new RuntimeException( "Tables of table type TABLE must be modifiable!" ); } @@ -125,10 +132,6 @@ public List getColumnStrategies() { } - public List getConnectedViews() { - return null; - } - @RequiredArgsConstructor diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/CatalogException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/CatalogException.java index e09ee0e12b..f4655d7f06 100644 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/CatalogException.java +++ b/core/src/main/java/org/polypheny/db/catalog/exceptions/CatalogException.java @@ -17,7 +17,7 @@ package org.polypheny.db.catalog.exceptions; -public class CatalogException extends Exception { +public class CatalogException extends RuntimeException { /** * Constructs a new exception with {@code null} as its detail message. The cause is not initialized, and may subsequently be initialized by a call to {@link #initCause}. diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index 5727c7e778..c1f25c23b5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -20,8 +20,11 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogCollectionMapping; +import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; @@ -29,6 +32,7 @@ import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.partition.properties.PartitionProperty; public interface AllocSnapshot { @@ -373,4 +377,36 @@ public interface AllocSnapshot { boolean isPartitioned( long id ); + + /** + * Gets a specific placement for a graph on a given adapter. + * + * @param graphId The id of the graph + * @param adapterId The id of the adapter on which the placement is placed + * @return The placement matching the conditions + */ + public abstract CatalogGraphPlacement getGraphPlacement( long graphId, long adapterId ); + + + /** + * Gets a collection of graph placements for a given adapter. + * + * @param adapterId The id of the adapter on which the placements are placed + * @return The collection of graph placements + */ + public abstract List getGraphPlacements( long adapterId ); + + + CatalogCollectionPlacement getCollectionPlacement( long id, long placementId ); + + CatalogCollectionMapping getCollectionMapping( long id ); + + List getCollectionPlacementsByAdapter( long id ); + + List getCollectionPlacements( long collectionId ); + + PartitionProperty getPartitionProperty( long id ); + + boolean adapterHasPlacement( long adapterId, long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 2fb41fce50..120f975b5f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -107,7 +107,7 @@ public interface LogicalRelSnapshot { * @return A CatalogColumn * @throws UnknownColumnException If there is no column with this name in the specified table of the database and schema. */ - public abstract LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException; + public abstract LogicalColumn getColumn( long tableId, String columnName ); /** * Returns the column with the specified name in the specified table of the specified database and schema. diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index 76504b0024..b3864bdf77 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -22,8 +22,11 @@ import java.util.Map; import org.polypheny.db.catalog.catalogs.AllocationCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogCollectionMapping; +import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; @@ -32,6 +35,7 @@ import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.AllocSnapshot; +import org.polypheny.db.partition.properties.PartitionProperty; public class AllocSnapshotImpl implements AllocSnapshot { @@ -290,4 +294,46 @@ public boolean isPartitioned( long id ) { return false; } + + @Override + public CatalogGraphPlacement getGraphPlacement( long graphId, long adapterId ) { + return null; + } + + + @Override + public List getGraphPlacements( long adapterId ) { + return null; + } + + + @Override + public CatalogCollectionPlacement getCollectionPlacement( long id, long placementId ) { + return null; + } + + + @Override + public CatalogCollectionMapping getCollectionMapping( long id ) { + return null; + } + + + @Override + public List getCollectionPlacementsByAdapter( long id ) { + return null; + } + + + @Override + public List getCollectionPlacements( long collectionId ) { + return null; + } + + + @Override + public PartitionProperty getPartitionProperty( long id ) { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index 72f09d49b9..c2af262bf6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -209,7 +209,7 @@ public List getColumns( @Nullable Pattern tableName, @Nullable Pa @Override - public LogicalColumn getColumn( long tableId, String columnName ) throws UnknownColumnException { + public LogicalColumn getColumn( long tableId, String columnName ) { return tableIdColumnNameColumn.get( Pair.of( tableId, columnName ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index ab46788884..fb2b69c08d 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -52,7 +52,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; @@ -99,7 +98,9 @@ import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.exception.AlterSourceException; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -195,11 +196,7 @@ protected DataStore getDataStoreInstance( long storeId ) throws DdlOnSourceExcep private LogicalColumn getCatalogColumn( long namespaceId, long tableId, String columnName ) throws ColumnNotExistsException { - try { - return catalog.getSnapshot().getRelSnapshot( namespaceId ).getColumn( tableId, columnName ); - } catch ( UnknownColumnException e ) { - throw new ColumnNotExistsException( tableId, columnName ); - } + return catalog.getSnapshot().getRelSnapshot( namespaceId ).getColumn( tableId, columnName ); } @@ -291,12 +288,14 @@ private void handleSource( DataSource adapter ) { try { catalog.getLogicalRel( defaultNamespaceId ).addPrimaryKey( tableId, primaryKeyColIds ); LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( tableId ); + + CatalogDataPlacement placement = catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).get( 0 ); catalog.getAllocRel( defaultNamespaceId ) .addPartitionPlacement( catalogTable.namespaceId, adapter.getAdapterId(), catalogTable.id, - catalogTable.partitionProperty.partitionIds.get( 0 ), + placement.getAdapterId(), PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); } catch ( GenericCatalogException e ) { @@ -321,7 +320,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte if ( catalogAdapter.type == AdapterType.SOURCE ) { // Remove collection Set collectionsToDrop = new HashSet<>(); - for ( CatalogCollectionPlacement collectionPlacement : catalog.getAllocDoc( defaultNamespaceId ).getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { + for ( CatalogCollectionPlacement collectionPlacement : catalog.getSnapshot().getAllocSnapshot().getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { collectionsToDrop.add( collectionPlacement.collectionId ); } @@ -329,7 +328,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte LogicalCollection collection = catalog.getSnapshot().getDocSnapshot( 1 ).getCollection( id ); // Make sure that there is only one adapter - if ( collection.placements.size() != 1 ) { + if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( collection.id ).size() != 1 ) { throw new RuntimeException( "The data source contains collections with more than one placement. This should not happen!" ); } @@ -364,7 +363,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte LogicalTable table = catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( tableId ); // Make sure that there is only one adapter - if ( table.dataPlacements.size() != 1 ) { + if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( tableId ).size() != 1 ) { throw new RuntimeException( "The data source contains tables with more than one placement. This should not happen!" ); } @@ -374,7 +373,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Delete column placement in catalog - for ( LogicalColumn column : table.columns ) { + for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getColumns( tableId ) ) { if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { catalog.getAllocRel( defaultNamespaceId ).deleteColumnPlacement( catalogAdapter.id, column.id, false ); } @@ -388,7 +387,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Delete columns - for ( LogicalColumn column : table.columns ) { + for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getColumns( tableId ) ) { catalog.getLogicalRel( defaultNamespaceId ).deleteColumn( column.id ); } @@ -433,14 +432,14 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure there is only one adapter - if ( catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( catalogTable.columns.get( 0 ).id ).size() != 1 ) { + if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } long adapterId = catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ).get( 0 ).adapterId; DataSource dataSource = (DataSource) AdapterManager.getInstance().getAdapter( adapterId ); - String physicalTableName = catalog.getSnapshot().getAllocSnapshot().getPartitionPlacement( adapterId, catalogTable.partitionProperty.partitionIds.get( 0 ) ).physicalTableName; + String physicalTableName = catalog.getSnapshot().getPhysicalSnapshot().getPhysicalTable( catalogTable.id, adapterId ).name; List exportedColumns = dataSource.getExportedColumns().get( physicalTableName ); // Check if physicalColumnName is valid @@ -770,7 +769,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L List tempPartitionGroupList = new ArrayList<>(); - if ( catalogTable.dataPlacements.contains( dataStore.getAdapterId() ) ) { + if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacement( catalogTable.id, dataStore.getAdapterId() ) == null ) { throw new PlacementAlreadyExistsException(); } else { catalog.getAllocRel( catalogTable.namespaceId ).addDataPlacement( dataStore.getAdapterId(), catalogTable.id ); @@ -778,7 +777,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L // Check whether the list is empty (this is a shorthand for a full placement) if ( columnIds.size() == 0 ) { - columnIds = ImmutableList.copyOf( catalogTable.getColumnIds() ); + columnIds = ImmutableList.copyOf( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ).stream().map( c -> c.id ).collect( Collectors.toList() ); } // Select partitions to create on this placement @@ -791,6 +790,8 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L isDataPlacementPartitioned = !currentPartList.isEmpty(); + PartitionProperty property = catalog.getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + if ( !partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { // Abort if a manual partitionList has been specified even though the data placement has already been partitioned @@ -804,10 +805,10 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L for ( int partitionGroupId : partitionGroupIds ) { // Check if specified partition index is even part of table and if so get corresponding uniquePartId try { - tempPartitionGroupList.add( catalogTable.partitionProperty.partitionGroupIds.get( partitionGroupId ) ); + tempPartitionGroupList.add( property.partitionGroupIds.get( partitionGroupId ) ); } catch ( IndexOutOfBoundsException e ) { throw new RuntimeException( "Specified Partition-Index: '" + partitionGroupId + "' is not part of table '" - + catalogTable.name + "', has only " + catalogTable.partitionProperty.numPartitionGroups + " partitions" ); + + catalogTable.name + "', has only " + property.numPartitionGroups + " partitions" ); } } } else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { @@ -842,7 +843,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { // If DataPlacement already contains partitions then create new placement with same set of partitions. tempPartitionGroupList = currentPartList; } else { - tempPartitionGroupList = catalogTable.partitionProperty.partitionGroupIds; + tempPartitionGroupList = property.partitionGroupIds; } } //} @@ -908,38 +909,35 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, checkModelLogic( catalogTable ); - try { - CatalogPrimaryKey oldPk = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey oldPk = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); - List columnIds = new LinkedList<>(); - for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); - columnIds.add( logicalColumn.id ); - } - catalog.getLogicalRel( catalogTable.namespaceId ).addPrimaryKey( catalogTable.id, columnIds ); - - // Add new column placements - long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores - List oldPkPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumnId ); - for ( CatalogColumnPlacement ccp : oldPkPlacements ) { - for ( long columnId : columnIds ) { - if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, - ccp.adapterId, - columnId, // Will be set later - PlacementType.AUTOMATIC, // Will be set later - null, // Will be set later - null, null, 0 ); - AdapterManager.getInstance().getStore( ccp.adapterId ).addColumn( - statement.getPrepareContext(), - catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTable( ccp.tableId ), - catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ) ); - } + List columnIds = new LinkedList<>(); + for ( String columnName : columnNames ) { + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + columnIds.add( logicalColumn.id ); + } + catalog.getLogicalRel( catalogTable.namespaceId ).addPrimaryKey( catalogTable.id, columnIds ); + + // Add new column placements + long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores + List oldPkPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumnId ); + for ( CatalogColumnPlacement ccp : oldPkPlacements ) { + for ( long columnId : columnIds ) { + if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, + ccp.adapterId, + columnId, // Will be set later + PlacementType.AUTOMATIC, // Will be set later + null, // Will be set later + null, null, 0 ); + AdapterManager.getInstance().getStore( ccp.adapterId ).addColumn( + statement.getPrepareContext(), + catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTable( ccp.tableId ), + catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ) ); } } - } catch ( GenericCatalogException | UnknownColumnException e ) { - throw new RuntimeException( e ); } + } @@ -965,7 +963,8 @@ public void addUniqueConstraint( LogicalTable catalogTable, List columnN @Override public void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { - if ( catalogTable.columns.size() < 2 ) { + List columns = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + if ( columns.size() < 2 ) { throw new RuntimeException( "Cannot drop sole column of table " + catalogTable.name ); } @@ -1012,7 +1011,6 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } // Delete from catalog - List columns = snapshot.getColumns( catalogTable.id ); catalog.getLogicalRel( catalogTable.namespaceId ).deleteColumn( column.id ); if ( column.position != columns.size() ) { // Update position of the other columns @@ -1097,7 +1095,7 @@ public void dropIndex( LogicalTable catalogTable, String indexName, Statement st @Override public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, LastPlacementException { // Check whether this placement exists - if ( !catalogTable.dataPlacements.contains( storeInstance.getAdapterId() ) ) { + if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) == null ) { throw new PlacementNotExistsException(); } @@ -1295,9 +1293,9 @@ public void dropDefaultValue( LogicalTable catalogTable, String columnName, Stat @Override public void modifyDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException { - + CatalogDataPlacement placements = statement.getDataContext().getSnapshot().getAllocSnapshot().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); // Check whether this placement already exists - if ( !catalogTable.dataPlacements.contains( storeInstance.getAdapterId() ) ) { + if ( placements == null ) { throw new PlacementNotExistsException(); } @@ -1306,7 +1304,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds List columnsToRemove = new ArrayList<>(); - LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ); + LogicalRelSnapshot snapshot = statement.getTransaction().getSnapshot().getRelSnapshot( catalogTable.namespaceId ); // Checks before physically removing of placement that the partition distribution is still valid and sufficient // Identifies which columns need to be removed @@ -1351,8 +1349,10 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds List tempPartitionGroupList = new ArrayList<>(); + PartitionProperty partition = statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + // Select partitions to create on this placement - if ( catalogTable.partitionProperty.isPartitioned ) { + if ( partition.isPartitioned ) { long tableId = catalogTable.id; // If index partitions are specified if ( !partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { @@ -1360,11 +1360,11 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds for ( long partitionGroupId : partitionGroupIds ) { // Check if specified partition index is even part of table and if so get corresponding uniquePartId try { - int index = catalogTable.partitionProperty.partitionGroupIds.indexOf( partitionGroupId ); - tempPartitionGroupList.add( catalogTable.partitionProperty.partitionGroupIds.get( index ) ); + int index = partition.partitionGroupIds.indexOf( partitionGroupId ); + tempPartitionGroupList.add( partition.partitionGroupIds.get( index ) ); } catch ( IndexOutOfBoundsException e ) { throw new RuntimeException( "Specified Partition-Index: '" + partitionGroupId + "' is not part of table '" - + catalogTable.name + "', has only " + catalogTable.partitionProperty.partitionGroupIds.size() + " partitions" ); + + catalogTable.name + "', has only " + partition.partitionGroupIds.size() + " partitions" ); } } } @@ -1389,10 +1389,10 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { // If nothing has been explicitly specified keep current placement of partitions. // Since it's impossible to have a placement without any partitions anyway log.debug( "Table is partitioned and concrete partitionList has NOT been specified " ); - tempPartitionGroupList = catalogTable.partitionProperty.partitionGroupIds; + tempPartitionGroupList = partition.partitionGroupIds; } } else { - tempPartitionGroupList.add( catalogTable.partitionProperty.partitionGroupIds.get( 0 ) ); + tempPartitionGroupList.add( partition.partitionGroupIds.get( 0 ) ); } // All internal partitions placed on this store @@ -1541,12 +1541,12 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part @Override public void addColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, PlacementAlreadyExistsException, ColumnNotExistsException { columnName = adjustNameIfNeeded( columnName, catalogTable.namespaceId ); - + Snapshot snapshot = statement.getTransaction().getSnapshot(); if ( storeInstance == null ) { throw new UnknownAdapterException( "" ); } // Check whether this placement already exists - if ( !catalogTable.dataPlacements.contains( storeInstance.getAdapterId() ) ) { + if ( !snapshot.getAllocSnapshot().adapterHasPlacement( storeInstance.getAdapterId(), catalogTable.id ) ) { throw new PlacementNotExistsException(); } @@ -1590,8 +1590,10 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D if ( storeInstance == null ) { throw new UnknownAdapterException( "" ); } + Snapshot snapshot = statement.getTransaction().getSnapshot(); + // Check whether this placement already exists - if ( !catalogTable.dataPlacements.contains( storeInstance.getAdapterId() ) ) { + if ( !snapshot.getAllocSnapshot().adapterHasPlacement( storeInstance.getAdapterId(), catalogTable.id ) ) { throw new PlacementNotExistsException(); } @@ -1758,14 +1760,15 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a Map> underlyingTables = new HashMap<>(); Map> underlying = findUnderlyingTablesOfView( algRoot.alg, underlyingTables, fieldList ); - LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( namespaceId ); + Snapshot snapshot = statement.getTransaction().getSnapshot(); + LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( namespaceId ); // add check if underlying table is of model document -> mql, relational -> sql underlying.keySet().forEach( tableId -> checkModelLangCompatibility( language, namespaceId, tableId ) ); if ( materializedCriteria.getCriteriaType() == CriteriaType.UPDATE ) { List entityTypes = new ArrayList<>(); - underlying.keySet().forEach( t -> entityTypes.add( snapshot.getTable( t ).entityType ) ); + underlying.keySet().forEach( t -> entityTypes.add( relSnapshot.getTable( t ).entityType ) ); if ( !(entityTypes.contains( EntityType.ENTITY )) ) { throw new GenericCatalogException( "Not possible to use Materialized View with Update Freshness if underlying table does not include a modifiable table." ); } @@ -1826,7 +1829,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a } else { logicalColumns = new ArrayList<>(); } - logicalColumns.add( snapshot.getColumn( columnId ) ); + logicalColumns.add( relSnapshot.getColumn( columnId ) ); addedColumns.put( adapterId, logicalColumns ); } @@ -1842,7 +1845,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a catalogMaterializedView.namespaceId, store.getAdapterId(), tableId, - catalogMaterializedView.partitionProperty.partitionIds.get( 0 ), + snapshot.getAllocSnapshot().getPartitionProperty( catalogMaterializedView.id ).partitionIds.get( 0 ), PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); @@ -1904,11 +1907,12 @@ public long addGraphPlacement( long graphId, List stores, boolean onl } LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( graphId ).getGraph( graphId ); - Catalog.getInstance().getSnapshot(); + Snapshot snapshot = statement.getTransaction().getSnapshot(); - List preExistingPlacements = graph.placements + List preExistingPlacements = snapshot.getAllocSnapshot().getGraphPlacements( graphId ) .stream() - .filter( p -> !stores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) + .filter( p -> !stores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p.adapterId ) ) + .map( p -> p.adapterId ) .collect( Collectors.toList() ); Long existingAdapterId = preExistingPlacements.isEmpty() ? null : preExistingPlacements.get( 0 ); @@ -1934,7 +1938,7 @@ public long addGraphPlacement( long graphId, List stores, boolean onl @Override public void removeGraphDatabasePlacement( long graphId, DataStore store, Statement statement ) { - CatalogGraphPlacement placement = catalog.getAllocGraph( graphId ).getGraphPlacement( graphId, store.getAdapterId() ); + CatalogGraphPlacement placement = statement.getTransaction().getSnapshot().getAllocSnapshot().getGraphPlacement( graphId, store.getAdapterId() ); store.dropGraph( statement.getPrepareContext(), placement ); @@ -2049,10 +2053,9 @@ public void removeGraph( long graphId, boolean ifExists, Statement statement ) { } return; } - - for ( long adapterId : graph.placements ) { - CatalogGraphPlacement placement = catalog.getAllocGraph( graphId ).getGraphPlacement( graphId, adapterId ); - AdapterManager.getInstance().getStore( adapterId ).dropGraph( statement.getPrepareContext(), placement ); + AllocSnapshot allocSnapshot = catalog.getSnapshot().getAllocSnapshot(); + for ( CatalogGraphPlacement placement : allocSnapshot.getGraphPlacements( graphId ) ) { + AdapterManager.getInstance().getStore( placement.adapterId ).dropGraph( statement.getPrepareContext(), placement ); } catalog.getLogicalGraph( graphId ).deleteGraph( graphId ); @@ -2136,8 +2139,8 @@ private Map> findUnderlyingTablesOfView( AlgNode algNode, Map getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList ) { LogicalTable table = algNode.getEntity().unwrap( LogicalTable.class ); - List columns = table.columns; - List logicalColumnNames = table.getColumnNames(); + List columns = Catalog.getInstance().getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( table.id ); + List logicalColumnNames = columns.stream().map( c -> c.name ).collect( Collectors.toList() ); List underlyingColumns = new ArrayList<>(); for ( int i = 0; i < columns.size(); i++ ) { for ( AlgDataTypeField algDataTypeField : fieldList.getFieldList() ) { @@ -2200,18 +2203,20 @@ public void createTable( long namespaceId, String name, List f for ( ConstraintInformation constraint : constraints ) { addConstraint( namespaceId, constraint.name, constraint.type, constraint.columnNames, tableId ); } - - LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ); + Snapshot snapshot = statement.getTransaction().getSnapshot(); + LogicalTable catalogTable = snapshot.getRelSnapshot( namespaceId ).getTable( tableId ); // Trigger rebuild of schema; triggers schema creation on adapters Catalog.getInstance().getSnapshot(); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + for ( DataStore store : stores ) { catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( catalogTable.namespaceId, store.getAdapterId(), catalogTable.id, - catalogTable.partitionProperty.partitionIds.get( 0 ), + property.partitionIds.get( 0 ), PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); @@ -2290,8 +2295,8 @@ private boolean assertEntityExists( long namespaceId, String name, boolean ifNot public void dropCollection( LogicalCollection catalogCollection, Statement statement ) { AdapterManager manager = AdapterManager.getInstance(); - for ( long adapterId : catalogCollection.placements ) { - DataStore store = (DataStore) manager.getAdapter( adapterId ); + for ( CatalogCollectionPlacement placement : statement.getTransaction().getSnapshot().getAllocSnapshot().getCollectionPlacements( catalogCollection.id ) ) { + DataStore store = (DataStore) manager.getAdapter( placement.adapterId ); store.dropCollection( statement.getPrepareContext(), catalogCollection ); } @@ -2301,9 +2306,9 @@ public void dropCollection( LogicalCollection catalogCollection, Statement state public void removeDocumentLogistics( LogicalCollection catalogCollection, Statement statement ) { - CatalogCollectionMapping mapping = catalog.getAllocDoc( catalogCollection.namespaceId ).getCollectionMapping( catalogCollection.id ); - LogicalTable table = catalog.getSnapshot().getRelSnapshot( catalogCollection.namespaceId ).getTable( mapping.collectionId ); - catalog.getLogicalRel( catalogCollection.namespaceId ).deleteTable( table.id ); + // CatalogCollectionMapping mapping = catalog.getAllocDoc( catalogCollection.namespaceId ).getCollectionMapping( catalogCollection.id ); + // LogicalTable table = catalog.getSnapshot().getRelSnapshot( catalogCollection.namespaceId ).getTable( mapping.collectionId ); + // catalog.getLogicalRel( catalogCollection.namespaceId ).deleteTable( table.id ); } @@ -2422,7 +2427,8 @@ private void checkDocumentModel( long namespaceId, List column @Override public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( partitionInfo.table.namespaceId ).getColumn( partitionInfo.table.id, partitionInfo.columnName ); + Snapshot snapshot = statement.getTransaction().getSnapshot(); + LogicalColumn logicalColumn = snapshot.getRelSnapshot( partitionInfo.table.namespaceId ).getColumn( partitionInfo.table.id, partitionInfo.columnName ); PartitionType actualPartitionType = PartitionType.getByName( partitionInfo.typeName ); @@ -2437,7 +2443,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Check if specified partitionColumn is even part of the table if ( log.isDebugEnabled() ) { - log.debug( "Creating partition group for table: {} with id {} on schema: {} on column: {}", partitionInfo.table.name, partitionInfo.table.id, partitionInfo.table.getNamespaceName(), logicalColumn.id ); + log.debug( "Creating partition group for table: {} with id {} on column: {}", partitionInfo.table.name, partitionInfo.table.id, logicalColumn.id ); } LogicalTable unPartitionedTable = partitionInfo.table; @@ -2612,10 +2618,10 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Get primary key of table and use PK to find all DataPlacements of table long pkid = partitionInfo.table.primaryKey; - LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( partitionInfo.table.namespaceId ); - List pkColumnIds = snapshot.getPrimaryKey( pkid ).columnIds; + LogicalRelSnapshot relSnapshot = catalog.getSnapshot().getRelSnapshot( partitionInfo.table.namespaceId ); + List pkColumnIds = relSnapshot.getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient - LogicalColumn pkColumn = snapshot.getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = relSnapshot.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) boolean fillStores = false; @@ -2623,7 +2629,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List stores = new ArrayList<>(); fillStores = true; } - List catalogColumnPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumn.id ); + List catalogColumnPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { if ( fillStores ) { // Ask router on which store(s) the table should be placed @@ -2635,7 +2641,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Now get the partitioned table, partitionInfo still contains the basic/unpartitioned table. - LogicalTable partitionedTable = snapshot.getTable( partitionInfo.table.id ); + LogicalTable partitionedTable = relSnapshot.getTable( partitionInfo.table.id ); DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); for ( DataStore store : stores ) { for ( long partitionId : partitionIds ) { @@ -2655,7 +2661,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Get only columns that are actually on that store // Every store of a newly partitioned table, initially will hold all partitions List necessaryColumns = new LinkedList<>(); - catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( snapshot.getColumn( cp.columnId ) ) ); + catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( relSnapshot.getColumn( cp.columnId ) ) ); // Copy data from the old partition to new partitions dataMigrator.copyPartitionData( @@ -2664,16 +2670,16 @@ public void addPartitioning( PartitionInformation partitionInfo, List unPartitionedTable, partitionedTable, necessaryColumns, - unPartitionedTable.partitionProperty.partitionIds, - partitionedTable.partitionProperty.partitionIds ); + snapshot.getAllocSnapshot().getPartitionProperty( unPartitionedTable.id ).partitionIds, + snapshot.getAllocSnapshot().getPartitionProperty( partitionedTable.id ).partitionIds ); } // Adjust indexes - List indexes = snapshot.getIndexes( unPartitionedTable.id, false ); + List indexes = relSnapshot.getIndexes( unPartitionedTable.id, false ); for ( CatalogIndex index : indexes ) { // Remove old index DataStore ds = ((DataStore) AdapterManager.getInstance().getAdapter( index.location )); - ds.dropIndex( statement.getPrepareContext(), index, unPartitionedTable.partitionProperty.partitionIds ); + ds.dropIndex( statement.getPrepareContext(), index, snapshot.getAllocSnapshot().getPartitionProperty( unPartitionedTable.id ).partitionIds ); catalog.getLogicalRel( partitionInfo.table.namespaceId ).deleteIndex( index.id ); // Add new index long newIndexId = catalog.getLogicalRel( partitionInfo.table.namespaceId ).addIndex( @@ -2686,18 +2692,18 @@ public void addPartitioning( PartitionInformation partitionInfo, List index.type, index.name ); if ( index.location == 0 ) { - IndexManager.getInstance().addIndex( snapshot.getIndex( newIndexId ), statement ); + IndexManager.getInstance().addIndex( relSnapshot.getIndex( newIndexId ), statement ); } else { ds.addIndex( statement.getPrepareContext(), - snapshot.getIndex( newIndexId ), + relSnapshot.getIndex( newIndexId ), catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); } } // Remove old tables - stores.forEach( store -> store.dropTable( statement.getPrepareContext(), unPartitionedTable, unPartitionedTable.partitionProperty.partitionIds ) ); - catalog.getAllocRel( partitionInfo.table.namespaceId ).deletePartitionGroup( unPartitionedTable.id, unPartitionedTable.namespaceId, unPartitionedTable.partitionProperty.partitionGroupIds.get( 0 ) ); + stores.forEach( store -> store.dropTable( statement.getPrepareContext(), unPartitionedTable, snapshot.getAllocSnapshot().getPartitionProperty( unPartitionedTable.id ).partitionIds ) ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).deletePartitionGroup( unPartitionedTable.id, unPartitionedTable.namespaceId, snapshot.getAllocSnapshot().getPartitionProperty( unPartitionedTable.id ).partitionIds.get( 0 ) ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -2707,33 +2713,36 @@ public void addPartitioning( PartitionInformation partitionInfo, List @Override public void removePartitioning( LogicalTable partitionedTable, Statement statement ) throws GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { long tableId = partitionedTable.id; + Snapshot snapshot = statement.getTransaction().getSnapshot(); if ( log.isDebugEnabled() ) { log.debug( "Merging partitions for table: {} with id {} on schema: {}", - partitionedTable.name, partitionedTable.id, partitionedTable.getNamespaceName() ); + partitionedTable.name, partitionedTable.id, snapshot.getNamespace( partitionedTable.namespaceId ) ); } - LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( partitionedTable.namespaceId ); + LogicalRelSnapshot relSnapshot = catalog.getSnapshot().getRelSnapshot( partitionedTable.namespaceId ); + + PartitionProperty partition = snapshot.getAllocSnapshot().getPartitionProperty( partitionedTable.id ); // Need to gather the partitionDistribution before actually merging // We need a columnPlacement for every partition Map> placementDistribution = new HashMap<>(); PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( partitionedTable.partitionProperty.partitionType ); - placementDistribution = partitionManager.getRelevantPlacements( partitionedTable, partitionedTable.partitionProperty.partitionIds, new ArrayList<>( List.of( -1L ) ) ); + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( partition.partitionType ); + placementDistribution = partitionManager.getRelevantPlacements( partitionedTable, partition.partitionIds, new ArrayList<>( List.of( -1L ) ) ); // Update catalog table catalog.getAllocRel( partitionedTable.namespaceId ).mergeTable( tableId ); // Now get the merged table - LogicalTable mergedTable = snapshot.getTable( tableId ); + LogicalTable mergedTable = relSnapshot.getTable( tableId ); List stores = new ArrayList<>(); // Get primary key of table and use PK to find all DataPlacements of table long pkid = partitionedTable.primaryKey; - List pkColumnIds = snapshot.getPrimaryKey( pkid ).columnIds; + List pkColumnIds = relSnapshot.getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient - LogicalColumn pkColumn = snapshot.getColumn( pkColumnIds.get( 0 ) ); + LogicalColumn pkColumn = relSnapshot.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) List catalogColumnPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumn.id ); @@ -2749,12 +2758,13 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // For merge create only full placements on the used stores. Otherwise partition constraints might not hold for ( DataStore store : stores ) { + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( mergedTable.id ); // Need to create partitionPlacements first in order to trigger schema creation on PolySchemaBuilder catalog.getAllocRel( partitionedTable.namespaceId ).addPartitionPlacement( mergedTable.namespaceId, store.getAdapterId(), mergedTable.id, - mergedTable.partitionProperty.partitionIds.get( 0 ), + property.partitionIds.get( 0 ), PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); @@ -2763,14 +2773,14 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); - catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( snapshot.getColumn( cp.columnId ) ) ); + catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( relSnapshot.getColumn( cp.columnId ) ) ); // TODO @HENNLO Check if this can be omitted catalog.getAllocRel( partitionedTable.namespaceId ).updateDataPlacement( store.getAdapterId(), mergedTable.id, catalog.getSnapshot().getAllocSnapshot().getDataPlacement( store.getAdapterId(), mergedTable.id ).columnPlacementsOnAdapter, - mergedTable.partitionProperty.partitionIds ); + property.partitionIds ); // dataMigrator.copySelectiveData( @@ -2780,15 +2790,16 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme mergedTable, necessaryColumns, placementDistribution, - mergedTable.partitionProperty.partitionIds ); + property.partitionIds ); } // Adjust indexes - List indexes = snapshot.getIndexes( partitionedTable.id, false ); + List indexes = relSnapshot.getIndexes( partitionedTable.id, false ); for ( CatalogIndex index : indexes ) { // Remove old index DataStore ds = (DataStore) AdapterManager.getInstance().getAdapter( index.location ); - ds.dropIndex( statement.getPrepareContext(), index, partitionedTable.partitionProperty.partitionIds ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( partitionedTable.id ); + ds.dropIndex( statement.getPrepareContext(), index, property.partitionIds ); catalog.getLogicalRel( partitionedTable.namespaceId ).deleteIndex( index.id ); // Add new index long newIndexId = catalog.getLogicalRel( partitionedTable.namespaceId ).addIndex( @@ -2801,11 +2812,11 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme index.type, index.name ); if ( index.location == 0 ) { - IndexManager.getInstance().addIndex( snapshot.getIndex( newIndexId ), statement ); + IndexManager.getInstance().addIndex( relSnapshot.getIndex( newIndexId ), statement ); } else { ds.addIndex( statement.getPrepareContext(), - snapshot.getIndex( newIndexId ), + relSnapshot.getIndex( newIndexId ), catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); } } @@ -2813,16 +2824,17 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // Needs to be separated from loop above. Otherwise we loose data for ( DataStore store : stores ) { List partitionIdsOnStore = new ArrayList<>(); - catalog.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( store.getAdapterId(), partitionedTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( mergedTable.id ); // Otherwise everything will be dropped again, leaving the table inaccessible - partitionIdsOnStore.remove( mergedTable.partitionProperty.partitionIds.get( 0 ) ); + partitionIdsOnStore.remove( property.partitionIds.get( 0 ) ); // Drop all partitionedTables (table contains old partitionIds) store.dropTable( statement.getPrepareContext(), partitionedTable, partitionIdsOnStore ); } // Loop over **old.partitionIds** to delete all partitions which are part of table // Needs to be done separately because partitionPlacements will be recursively dropped in `deletePartitionGroup` but are needed in dropTable - for ( long partitionGroupId : partitionedTable.partitionProperty.partitionGroupIds ) { + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( partitionedTable.id ); + for ( long partitionGroupId : property.partitionGroupIds ) { catalog.getAllocRel( partitionedTable.namespaceId ).deletePartitionGroup( tableId, partitionedTable.namespaceId, partitionGroupId ); } @@ -2914,6 +2926,7 @@ public void dropNamespace( String schemaName, boolean ifExists, Statement statem @Override public void dropView( LogicalTable catalogView, Statement statement ) throws DdlOnSourceException { + Snapshot snapshot = statement.getTransaction().getSnapshot(); // Make sure that this is a table of type VIEW if ( catalogView.entityType != EntityType.VIEW ) { throw new NotViewException(); @@ -2926,7 +2939,8 @@ public void dropView( LogicalTable catalogView, Statement statement ) throws Ddl catalog.getLogicalRel( catalogView.namespaceId ).deleteViewDependencies( (CatalogView) catalogView ); // Delete columns - for ( LogicalColumn column : catalogView.columns ) { + + for ( LogicalColumn column : snapshot.getRelSnapshot( catalogView.namespaceId ).getColumns( catalogView.id ) ) { catalog.getLogicalRel( catalogView.namespaceId ).deleteColumn( column.id ); } @@ -2962,6 +2976,7 @@ public void dropMaterializedView( LogicalTable materializedView, Statement state @Override public void dropTable( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException { + Snapshot snapshot = catalog.getSnapshot(); // Make sure that this is a table of type TABLE (and not SOURCE) //checkIfDdlPossible( catalogEntity.tableType ); @@ -2970,26 +2985,27 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Check if there are foreign keys referencing this table List selfRefsToDelete = new LinkedList<>(); - LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ); - List exportedKeys = snapshot.getExportedKeys( catalogTable.id ); + LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( catalogTable.namespaceId ); + List exportedKeys = relSnapshot.getExportedKeys( catalogTable.id ); if ( exportedKeys.size() > 0 ) { for ( CatalogForeignKey foreignKey : exportedKeys ) { if ( foreignKey.tableId == catalogTable.id ) { // If this is a self-reference, drop it later. selfRefsToDelete.add( foreignKey ); } else { - throw new PolyphenyDbException( "Cannot drop table '" + catalogTable.getNamespaceName() + "." + catalogTable.name + "' because it is being referenced by '" + exportedKeys.get( 0 ).getSchemaName() + "." + exportedKeys.get( 0 ).getTableName() + "'." ); + throw new PolyphenyDbException( "Cannot drop table '" + snapshot.getNamespace( catalogTable.namespaceId ) + "." + catalogTable.name + "' because it is being referenced by '" + exportedKeys.get( 0 ).getSchemaName() + "." + exportedKeys.get( 0 ).getTableName() + "'." ); } } } // Make sure that all adapters are of type store (and not source) - for ( long storeId : catalogTable.dataPlacements ) { - getDataStoreInstance( storeId ); + List placements = snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ); + for ( CatalogDataPlacement placement : placements ) { + getDataStoreInstance( placement.adapterId ); } // Delete all indexes - for ( CatalogIndex index : snapshot.getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : relSnapshot.getIndexes( catalogTable.id, false ) ) { if ( index.location == 0 ) { // Delete polystore index IndexManager.getInstance().deleteIndex( index ); @@ -2998,7 +3014,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D AdapterManager.getInstance().getStore( index.location ).dropIndex( statement.getPrepareContext(), index, - catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } // Delete index in catalog catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); @@ -3006,16 +3022,19 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Delete data from the stores and remove the column placement catalog.getLogicalRel( catalogTable.namespaceId ).flagTableForDeletion( catalogTable.id, true ); - for ( long storeId : catalogTable.dataPlacements ) { + List p = snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ); + List columns; + for ( CatalogDataPlacement placement : p ) { // Delete table on store List partitionIdsOnStore = new ArrayList<>(); - catalog.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( storeId, catalogTable.id ).forEach( p -> partitionIdsOnStore.add( p.partitionId ) ); + snapshot.getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( placement.adapterId, catalogTable.id ).forEach( pl -> partitionIdsOnStore.add( pl.partitionId ) ); - AdapterManager.getInstance().getStore( storeId ).dropTable( statement.getPrepareContext(), catalogTable, partitionIdsOnStore ); + AdapterManager.getInstance().getStore( placement.adapterId ).dropTable( statement.getPrepareContext(), catalogTable, partitionIdsOnStore ); // Delete column placement in catalog - for ( LogicalColumn column : catalogTable.columns ) { - if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( storeId, column.id ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeId, column.id, false ); + columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + for ( LogicalColumn column : columns ) { + if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( placement.adapterId, column.id ) ) { + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( placement.adapterId, column.id, false ); } } } @@ -3031,7 +3050,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D } // Delete indexes of this table - List indexes = snapshot.getIndexes( catalogTable.id, false ); + List indexes = relSnapshot.getIndexes( catalogTable.id, false ); for ( CatalogIndex index : indexes ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); IndexManager.getInstance().deleteIndex( index ); @@ -3042,12 +3061,12 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Remove primary key catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); // Delete all foreign keys of the table - List foreignKeys = snapshot.getForeignKeys( catalogTable.id ); + List foreignKeys = relSnapshot.getForeignKeys( catalogTable.id ); for ( CatalogForeignKey foreignKey : foreignKeys ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } // Delete all constraints of the table - for ( CatalogConstraint constraint : snapshot.getConstraints( catalogTable.id ) ) { + for ( CatalogConstraint constraint : relSnapshot.getConstraints( catalogTable.id ) ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); } } catch ( GenericCatalogException e ) { @@ -3056,7 +3075,8 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D } // Delete columns - for ( LogicalColumn column : catalogTable.columns ) { + columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + for ( LogicalColumn column : columns ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteColumn( column.id ); } @@ -3085,8 +3105,9 @@ public void truncate( LogicalTable catalogTable, Statement statement ) { prepareMonitoring( statement, Kind.TRUNCATE, catalogTable ); // Execute truncate on all placements - catalogTable.dataPlacements.forEach( adapterId -> { - AdapterManager.getInstance().getAdapter( adapterId ).truncate( statement.getPrepareContext(), catalogTable ); + List placements = statement.getTransaction().getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ); + placements.forEach( placement -> { + AdapterManager.getInstance().getAdapter( placement.adapterId ).truncate( statement.getPrepareContext(), catalogTable ); } ); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index f4d11da11d..c6afc837ae 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -58,7 +58,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.ConstraintType; @@ -488,11 +487,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final String columnName = foreignKey.getColumnNames().get( i ); final String foreignColumnName = foreignKey.getReferencedKeyColumnNames().get( i ); final LogicalColumn foreignColumn; - try { - foreignColumn = snapshot.getColumn( foreignTable.id, foreignColumnName ); - } catch ( UnknownColumnException e ) { - throw new RuntimeException( e ); - } + foreignColumn = snapshot.getColumn( foreignTable.id, foreignColumnName ); RexNode newValue; int targetIndex; if ( root.isUpdate() ) { @@ -563,12 +558,8 @@ public RexNode visitFieldAccess( RexFieldAccess fieldAccess ) { final String columnName = foreignKey.getReferencedKeyColumnNames().get( i ); final String foreignColumnName = foreignKey.getColumnNames().get( i ); final LogicalColumn column, foreignColumn; - try { - column = snapshot.getColumn( table.id, columnName ); - foreignColumn = snapshot.getColumn( foreignTable.id, foreignColumnName ); - } catch ( UnknownColumnException e ) { - throw new RuntimeException( e ); - } + column = snapshot.getColumn( table.id, columnName ); + foreignColumn = snapshot.getColumn( foreignTable.id, foreignColumnName ); final RexNode inputRef = new RexInputRef( column.position - 1, rexBuilder.getTypeFactory().createPolyType( column.type ) ); final RexNode foreignInputRef = new RexInputRef( foreignColumn.position - 1, rexBuilder.getTypeFactory().createPolyType( foreignColumn.type ) ); projects.add( inputRef ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index e076a318e0..88794875b4 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -88,7 +88,6 @@ import org.polypheny.db.catalog.entity.physical.PhysicalCollection; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.ModifiableEntity; @@ -198,14 +197,10 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { while ( updateColumnListIterator.hasNext() ) { String columnName = updateColumnListIterator.next(); sourceExpressionListIterator.next(); - try { - LogicalColumn logicalColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); - if ( !snapshot.getAllocSnapshot().checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { - updateColumnListIterator.remove(); - sourceExpressionListIterator.remove(); - } - } catch ( UnknownColumnException e ) { - throw new RuntimeException( e ); + LogicalColumn logicalColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + if ( !snapshot.getAllocSnapshot().checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { + updateColumnListIterator.remove(); + sourceExpressionListIterator.remove(); } } if ( updateColumnList.size() == 0 ) { @@ -265,27 +260,23 @@ public AlgNode visit( LogicalFilter filter ) { int index = 0; for ( String cn : updateColumnList ) { - try { - if ( snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, cn ).id == catalogTable.partitionProperty.partitionColumnId ) { - if ( log.isDebugEnabled() ) { - log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", catalogTable.partitionProperty.partitionColumnId, index ); - } - // Routing/Locking can now be executed on certain partitions - partitionValue = sourceExpressionList.get( index ).toString().replace( "'", "" ); - if ( log.isDebugEnabled() ) { - log.debug( - "UPDATE: partitionColumn-value: '{}' should be put on partition: {}", - partitionValue, - partitionManager.getTargetPartitionId( catalogTable, partitionValue ) ); - } - identPart = (int) partitionManager.getTargetPartitionId( catalogTable, partitionValue ); - // Needed to verify if UPDATE shall be executed on two partitions or not - identifiedPartitionForSetValue = identPart; - accessedPartitionList.add( identPart ); - break; + if ( snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, cn ).id == catalogTable.partitionProperty.partitionColumnId ) { + if ( log.isDebugEnabled() ) { + log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", catalogTable.partitionProperty.partitionColumnId, index ); } - } catch ( UnknownColumnException e ) { - throw new RuntimeException( e ); + // Routing/Locking can now be executed on certain partitions + partitionValue = sourceExpressionList.get( index ).toString().replace( "'", "" ); + if ( log.isDebugEnabled() ) { + log.debug( + "UPDATE: partitionColumn-value: '{}' should be put on partition: {}", + partitionValue, + partitionManager.getTargetPartitionId( catalogTable, partitionValue ) ); + } + identPart = (int) partitionManager.getTargetPartitionId( catalogTable, partitionValue ); + // Needed to verify if UPDATE shall be executed on two partitions or not + identifiedPartitionForSetValue = identPart; + accessedPartitionList.add( identPart ); + break; } index++; } @@ -1332,31 +1323,27 @@ private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, L int index = ((RexInputRef) operand).getIndex(); AlgDataTypeField field = node.getInput().getRowType().getFieldList().get( index ); LogicalColumn column; - try { - String columnName; - String[] columnNames = field.getName().split( "\\." ); - if ( columnNames.length == 1 ) { // columnName - columnName = columnNames[0]; - } else if ( columnNames.length == 2 ) { // tableName.columnName - if ( !catalogTable.name.equalsIgnoreCase( columnNames[0] ) ) { - throw new RuntimeException( "Table name does not match expected table name: " + field.getName() ); - } - columnName = columnNames[1]; - } else if ( columnNames.length == 3 ) { // schemaName.tableName.columnName - if ( !catalogTable.getNamespaceName().equalsIgnoreCase( columnNames[0] ) ) { - throw new RuntimeException( "Schema name does not match expected schema name: " + field.getName() ); - } - if ( !catalogTable.name.equalsIgnoreCase( columnNames[1] ) ) { - throw new RuntimeException( "Table name does not match expected table name: " + field.getName() ); - } - columnName = columnNames[2]; - } else { - throw new RuntimeException( "Invalid column name: " + field.getName() ); + String columnName; + String[] columnNames = field.getName().split( "\\." ); + if ( columnNames.length == 1 ) { // columnName + columnName = columnNames[0]; + } else if ( columnNames.length == 2 ) { // tableName.columnName + if ( !catalogTable.name.equalsIgnoreCase( columnNames[0] ) ) { + throw new RuntimeException( "Table name does not match expected table name: " + field.getName() ); + } + columnName = columnNames[1]; + } else if ( columnNames.length == 3 ) { // schemaName.tableName.columnName + if ( !catalogTable.getNamespaceName().equalsIgnoreCase( columnNames[0] ) ) { + throw new RuntimeException( "Schema name does not match expected schema name: " + field.getName() ); } - column = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); - } catch ( UnknownColumnException e ) { - throw new RuntimeException( e ); + if ( !catalogTable.name.equalsIgnoreCase( columnNames[1] ) ) { + throw new RuntimeException( "Table name does not match expected table name: " + field.getName() ); + } + columnName = columnNames[2]; + } else { + throw new RuntimeException( "Invalid column name: " + field.getName() ); } + column = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); if ( !snapshot.getAllocSnapshot().checkIfExistsColumnPlacement( placements.get( 0 ).adapterId, column.id ) ) { throw new RuntimeException( "Current implementation of vertical partitioning does not allow conditions on partitioned columns. " ); // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index ddfff66a06..233eba07c6 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -96,7 +96,8 @@ public List> getSchemaTree() { List childTables = snapshot.getRelSnapshot( schema.id ).getTables( null ); for ( LogicalTable childTable : childTables ) { List table = new ArrayList<>(); - for ( LogicalColumn logicalColumn : childTable.columns ) { + List columns = snapshot.getRelSnapshot( schema.id ).getColumns( childTable.id ); + for ( LogicalColumn logicalColumn : columns ) { table.add( schema.name + "." + childTable.name + "." + logicalColumn.name ); } if ( childTable.entityType == EntityType.ENTITY ) { @@ -120,7 +121,7 @@ public List getAllColumns() { return snapshot.getNamespaces( null ) .stream() .filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> t.columns.stream() ) ) + .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> snapshot.getRelSnapshot( n.id ).getColumns( t.id ).stream() ) ) .map( QueryResult::fromCatalogColumn ) .collect( Collectors.toList() ); } @@ -145,7 +146,7 @@ public List getAllTable() { */ public List getAllColumns( Long tableId ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - return snapshot.getNamespaces( null ).stream().flatMap( n -> snapshot.getRelSnapshot( n.id ).getTable( tableId ).columns.stream() ).map( QueryResult::fromCatalogColumn ).collect( Collectors.toList() ); + return snapshot.getNamespaces( null ).stream().flatMap( n -> snapshot.getRelSnapshot( n.id ).getColumns( tableId ).stream() ).map( QueryResult::fromCatalogColumn ).collect( Collectors.toList() ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index 4a36e200ba..854ce7c773 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -20,6 +20,7 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.List; +import java.util.stream.Collectors; import lombok.Getter; import lombok.Setter; import org.polypheny.db.catalog.Catalog; @@ -79,7 +80,7 @@ public StatisticTable( Long tableId ) { LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); this.table = catalogTable.name; this.namespaceType = catalogTable.getNamespaceType(); - this.dataPlacements = catalogTable.dataPlacements; + this.dataPlacements = ImmutableList.copyOf( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).stream().map( c -> c.adapterId ).collect( Collectors.toList() ) ); this.entityType = catalogTable.entityType; } calls = new TableCalls( tableId, 0, 0, 0, 0 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 05187acc89..0ceab050bc 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -28,7 +28,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Operator; @@ -86,11 +85,7 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName protected LogicalColumn getCatalogColumn( long namespaceId, long tableId, SqlIdentifier columnName ) { LogicalColumn logicalColumn; - try { - logicalColumn = snapshot.getRelSnapshot( namespaceId ).getColumn( tableId, columnName.getSimple() ); - } catch ( UnknownColumnException e ) { - throw CoreUtil.newContextException( columnName.getPos(), RESOURCE.columnNotFoundInTable( columnName.getSimple(), tableId + "" ) ); - } + logicalColumn = snapshot.getRelSnapshot( namespaceId ).getColumn( tableId, columnName.getSimple() ); return logicalColumn; } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 079d7f42c2..1febae3299 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -103,6 +103,7 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogMaterializedView; @@ -130,6 +131,7 @@ import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; @@ -153,6 +155,7 @@ import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.plugins.PolyPluginManager.PluginStatus; import org.polypheny.db.processing.ExtendedQueryParameters; @@ -449,12 +452,12 @@ private String getIconName( NamespaceType namespaceType ) { void getTables( final Context ctx ) { Transaction transaction = getTransaction(); EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); - long schemaId = transaction.getDefaultSchema().id; - String requestedSchema; + long namespaceId = transaction.getDefaultSchema().id; + String namespaceName; if ( request.schema != null ) { - requestedSchema = request.schema; + namespaceName = request.schema; } else { - requestedSchema = catalog.getSnapshot().getNamespace( schemaId ).name; + namespaceName = catalog.getSnapshot().getNamespace( namespaceId ).name; } try { @@ -467,10 +470,10 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getSnapshot().getRelSnapshot( schemaId ).getTables( null ); + List tables = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTables( null ); ArrayList result = new ArrayList<>(); for ( LogicalTable t : tables ) { - result.add( new DbTable( t.name, t.getNamespaceName(), t.modifiable, t.entityType ) ); + result.add( new DbTable( t.name, namespaceName, t.modifiable, t.entityType ) ); } ctx.json( result ); } @@ -1165,10 +1168,10 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { - ImmutableMap> underlyingTable = ((CatalogView) catalogTable).getUnderlyingTables(); List columns = new ArrayList<>(); - for ( LogicalColumn col : catalogTable.columns ) { + List cols = catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumns( catalogTable.id ); + for ( LogicalColumn col : cols ) { columns.add( new DbColumn( col.name, col.type.getName(), @@ -1784,8 +1787,9 @@ void getIndexes( final Context ctx ) { } // Get functional indexes - for ( long storeId : catalogTable.dataPlacements ) { - Adapter adapter = AdapterManager.getInstance().getAdapter( storeId ); + List placements = catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ); + for ( CatalogDataPlacement placement : placements ) { + Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); DataStore store; if ( adapter instanceof DataStore ) { store = (DataStore) adapter; @@ -1915,7 +1919,7 @@ private Placement getPlacements( final Index index ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); try { LogicalTable table = getLogicalTable( schemaName, tableName ); - Placement p = new Placement( table.partitionProperty.isPartitioned, snapshot.getAllocSnapshot().getPartitionGroupNames( table.id ), table.entityType ); + Placement p = new Placement( snapshot.getAllocSnapshot().isPartitioned( table.id ), snapshot.getAllocSnapshot().getPartitionGroupNames( table.id ), table.entityType ); if ( table.entityType == EntityType.VIEW ) { return p; @@ -1926,13 +1930,14 @@ private Placement getPlacements( final Index index ) { List pkPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement placement : pkPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); p.addAdapter( new RelationalStore( adapter.getUniqueName(), adapter.getUniqueName(), snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), snapshot.getAllocSnapshot().getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), - table.partitionProperty.numPartitionGroups, - table.partitionProperty.partitionType ) ); + property.numPartitionGroups, + property.partitionType ) ); } return p; } @@ -2532,11 +2537,12 @@ void getUml( final Context ctx ) { .build() ); } } - + LogicalRelSnapshot relSnapshot = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ); // get tables with its columns - DbTable table = new DbTable( catalogTable.name, catalogTable.getNamespaceName(), catalogTable.modifiable, catalogTable.entityType ); - for ( String columnName : catalogTable.getColumnNames() ) { - table.addColumn( new DbColumn( columnName ) ); + DbTable table = new DbTable( catalogTable.name, catalog.getSnapshot().getNamespace( catalogTable.namespaceId ).getName(), catalogTable.modifiable, catalogTable.entityType ); + + for ( LogicalColumn column : relSnapshot.getColumns( catalogTable.id ) ) { + table.addColumn( new DbColumn( column.name ) ); } // get primary key with its columns @@ -3223,15 +3229,11 @@ public static Result executeSqlSelect( final Statement statement, final UIReques // Get column default values if ( catalogTable != null ) { - try { - LogicalColumn logicalColumn = crud.catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); - if ( logicalColumn != null ) { - if ( logicalColumn.defaultValue != null ) { - dbCol.defaultValue = logicalColumn.defaultValue.value; - } + LogicalColumn logicalColumn = crud.catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + if ( logicalColumn != null ) { + if ( logicalColumn.defaultValue != null ) { + dbCol.defaultValue = logicalColumn.defaultValue.value; } - } catch ( UnknownColumnException e ) { - log.error( "Caught exception", e ); } } header.add( dbCol ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index c5e3982d46..e9be035f1a 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -35,7 +35,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -219,7 +219,7 @@ public static Result getResult( QueryLanguage language, Statement statement, Que // Get column default values if ( catalogTable != null ) { - LogicalColumn logicalColumn = catalogTable.columns.stream().filter( c -> c.name.equals( columnName ) ).findFirst().orElse( null ); + LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); if ( logicalColumn != null ) { if ( logicalColumn.defaultValue != null ) { dbCol.defaultValue = logicalColumn.defaultValue.value; @@ -329,13 +329,13 @@ private Placement getPlacements( final Index index ) { return p; } else { - for ( long adapterId : graph.placements ) { - CatalogGraphPlacement placement = catalog.getAllocGraph( graph.id ).getGraphPlacement( graph.id, adapterId ); + List placements = catalog.getSnapshot().getAllocSnapshot().getDataPlacements( graph.id ); + for ( CatalogDataPlacement placement : placements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); p.addAdapter( new Placement.GraphStore( adapter.getUniqueName(), adapter.getUniqueName(), - catalog.getAllocGraph( graph.id ).getGraphPlacements( adapterId ), + catalog.getSnapshot().getAllocSnapshot().getGraphPlacements( placement.adapterId ), adapter.getSupportedNamespaceTypes().contains( NamespaceType.GRAPH ) ) ); } return p; @@ -372,15 +372,20 @@ public void getCollectionPlacements( Context context ) { LogicalCollection collection = collections.get( 0 ); - Placement placement = new Placement( false, List.of(), EntityType.ENTITY ); + Placement p = new Placement( false, List.of(), EntityType.ENTITY ); + + List placements = catalog.getSnapshot().getAllocSnapshot().getCollectionPlacements( collection.id ); - for ( long adapterId : collection.placements ) { - Adapter adapter = AdapterManager.getInstance().getAdapter( adapterId ); - List placements = catalog.getAllocDoc( collection.namespaceId ).getCollectionPlacementsByAdapter( adapterId ); - placement.addAdapter( new DocumentStore( adapter.getUniqueName(), adapter.getUniqueName(), placements, adapter.getSupportedNamespaceTypes().contains( NamespaceType.DOCUMENT ) ) ); + for ( CatalogCollectionPlacement placement : placements ) { + Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); + p.addAdapter( new DocumentStore( + adapter.getUniqueName(), + adapter.getUniqueName(), + catalog.getSnapshot().getAllocSnapshot().getCollectionPlacementsByAdapter( placement.adapterId ), + adapter.getSupportedNamespaceTypes().contains( NamespaceType.DOCUMENT ) ) ); } - context.json( placement ); + context.json( p ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java index aa73302a63..dca7651b00 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java @@ -33,7 +33,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyTypeFamily; import org.polypheny.db.webui.Crud; @@ -63,14 +62,9 @@ public String getQuery( String tableId, Statement statement, HttpServletRequest Catalog catalog = Catalog.getInstance(); String[] split = tableId.split( "\\." ); LogicalColumn logicalColumn; - try { - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); - LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( split[1] ); - logicalColumn = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumn( table.id, entry.getKey() ); - } catch ( UnknownColumnException e ) { - log.error( "Could not determine column type", e ); - return null; - } + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); + LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( split[1] ); + logicalColumn = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumn( table.id, entry.getKey() ); if ( fileName == null && value == null ) { setClauses.add( String.format( "\"%s\"=NULL", entry.getKey() ) ); } else if ( value != null && fileName == null ) { From 45fd67b810aaeac10ac50882a5a185dd8b4e0168 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 12 Mar 2023 01:19:41 +0100 Subject: [PATCH 046/436] changed physical, allocation to not include callee --- .../org/polypheny/db/adapter/Adapter.java | 12 +- .../polypheny/db/adapter/AdapterManager.java | 2 +- .../algebra/rules/LoptSemiJoinOptimizer.java | 4 +- .../catalog/catalogs/AllocationCatalog.java | 3 + .../catalogs/AllocationDocumentCatalog.java | 4 + .../catalogs/AllocationGraphCatalog.java | 5 + .../catalogs/AllocationRelationalCatalog.java | 4 + .../db/catalog/catalogs/PhysicalCatalog.java | 8 +- .../entity/CatalogCollectionPlacement.java | 19 ++- .../entity/CatalogEntityPlacement.java | 2 + .../catalog/entity/CatalogGraphPlacement.java | 2 + .../allocation/AllocationCollection.java | 7 +- .../entity/allocation/AllocationEntity.java | 12 +- .../entity/allocation/AllocationGraph.java | 5 +- .../entity/allocation/AllocationTable.java | 21 +-- .../entity/physical/PhysicalCollection.java | 7 +- .../entity/physical/PhysicalEntity.java | 21 ++- .../entity/physical/PhysicalGraph.java | 7 +- .../entity/physical/PhysicalTable.java | 39 ++--- .../db/catalog/snapshot/AllocSnapshot.java | 4 +- .../db/catalog/snapshot/PhysicalSnapshot.java | 2 +- .../snapshot/impl/AllocSnapshotImpl.java | 152 +++++++++++++++++- .../snapshot/impl/PhysicalSnapshotImpl.java | 2 +- .../partition/AbstractPartitionManager.java | 4 +- .../db/partition/FrequencyMapImpl.java | 32 ++-- .../db/partition/HashPartitionManager.java | 8 +- .../TemperatureAwarePartitionManager.java | 11 +- .../processing/ConstraintEnforceAttacher.java | 13 +- .../db/processing/DataMigratorImpl.java | 55 ++++--- .../db/routing/UiRoutingPageUtil.java | 2 +- .../db/routing/routers/AbstractDqlRouter.java | 14 +- .../db/routing/routers/BaseRouter.java | 14 +- .../db/routing/routers/CachedPlanRouter.java | 7 +- .../db/routing/routers/DmlRouterImpl.java | 74 +++++---- .../routers/FullPlacementQueryRouter.java | 9 +- .../db/routing/routers/IcarusRouter.java | 7 +- .../db/routing/routers/SimpleRouter.java | 8 +- .../CreateAllPlacementStrategy.java | 10 +- .../CreateSinglePlacementStrategy.java | 8 +- .../db/transaction/EntityAccessMap.java | 9 +- .../db/view/MaterializedViewManagerImpl.java | 41 ++--- .../allocation/PolyAllocDocCatalog.java | 30 ++-- .../allocation/PolyAllocGraphCatalog.java | 21 +-- .../java/org/polypheny/db/webui/Crud.java | 3 +- .../polypheny/db/webui/models/Placement.java | 23 +-- 45 files changed, 467 insertions(+), 280 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 4297069614..4ad2650c8a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -487,20 +487,20 @@ public void addInformationPhysicalNames() { Snapshot snapshot = Catalog.getInstance().getSnapshot(); group.setRefreshFunction( () -> { physicalColumnNames.reset(); - List> physicalsOnAdapter = snapshot.getPhysicalSnapshot().getPhysicalsOnAdapter( adapterId ); + List physicalsOnAdapter = snapshot.getPhysicalSnapshot().getPhysicalsOnAdapter( adapterId ); - for ( PhysicalEntity entity : physicalsOnAdapter ) { + for ( PhysicalEntity entity : physicalsOnAdapter ) { if ( entity.namespaceType != NamespaceType.RELATIONAL ) { continue; } PhysicalTable physicalTable = (PhysicalTable) entity; LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( physicalTable.namespaceId ); - for ( long columnId : physicalTable.columnIds ) { + for ( Entry entry : physicalTable.columns.entrySet() ) { physicalColumnNames.addRow( - columnId, - relSnapshot.getColumn( columnId ), - physicalTable.namespaceName + "." + physicalTable.name + "." + relSnapshot.getColumn( columnId ) ); + entry.getKey(), + relSnapshot.getColumn( entry.getKey() ), + physicalTable.namespaceName + "." + physicalTable.name + "." + entry.getValue() ); } } } ); diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index 5d394a447a..22358ea8df 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -244,7 +244,7 @@ public void removeAdapter( long adapterId ) { CatalogAdapter catalogAdapter = Catalog.getInstance().getSnapshot().getAdapter( adapterId ); // Check if the store has any placements - List> placements = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAllocationsOnAdapter( catalogAdapter.id ); + List placements = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAllocationsOnAdapter( catalogAdapter.id ); if ( placements.size() != 0 ) { throw new RuntimeException( "There is still data placed on this data store" ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java index 132d8ad723..c76b24774a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java @@ -666,8 +666,8 @@ public int compare( Integer alg1Idx, Integer alg2Idx ) { */ private abstract static class LcsEntity extends CatalogEntity { - protected LcsEntity( CatalogEntityBuilder b ) { - super( b ); + protected LcsEntity() { + super( null ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java index 364256badc..9518d9a08d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java @@ -16,7 +16,10 @@ package org.polypheny.db.catalog.catalogs; +import org.polypheny.db.catalog.entity.LogicalNamespace; + public interface AllocationCatalog { + LogicalNamespace getNamespace(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java index 82bc74d80d..4031d17a7e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java @@ -17,7 +17,9 @@ package org.polypheny.db.catalog.catalogs; import java.util.List; +import java.util.Map; import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.PlacementType; @@ -39,4 +41,6 @@ public interface AllocationDocumentCatalog extends AllocationCatalog { void dropCollectionPlacement( long id, long adapterId ); + Map getCollections(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java index 81d25b9bbc..67808b1d03 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java @@ -16,6 +16,9 @@ package org.polypheny.db.catalog.catalogs; +import java.util.Map; +import org.polypheny.db.catalog.entity.allocation.AllocationGraph; + public interface AllocationGraphCatalog extends AllocationCatalog { @@ -37,4 +40,6 @@ public interface AllocationGraphCatalog extends AllocationCatalog { public abstract void deleteGraphPlacement( long adapterId, long graphId ); + Map getGraphs(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 70e9b6e25b..43de986298 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -17,7 +17,9 @@ package org.polypheny.db.catalog.catalogs; import java.util.List; +import java.util.Map; import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; @@ -348,4 +350,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { void removeTableFromPeriodicProcessing( long tableId ); + Map getTables(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index c1fadc1536..276052521b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -22,12 +22,12 @@ public interface PhysicalCatalog { - List> getPhysicalsOnAdapter( long id ); + List getPhysicalsOnAdapter( long id ); - PhysicalEntity getPhysicalEntity( long id ); + PhysicalEntity getPhysicalEntity( long id ); - void addPhysicalEntity( PhysicalEntity physicalEntity ); + void addPhysicalEntity( PhysicalEntity physicalEntity ); -

      PhysicalEntity getFromLogical( long id ); +

      PhysicalEntity getFromLogical( long id ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java index d410aa16ee..b623ad82d2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogCollectionPlacement.java @@ -18,25 +18,24 @@ import java.io.Serializable; import javax.annotation.Nullable; +import lombok.EqualsAndHashCode; +import lombok.Value; - +@EqualsAndHashCode(callSuper = true) +@Value public class CatalogCollectionPlacement extends CatalogEntityPlacement { private static final long serialVersionUID = 4227137255905904785L; - public final int adapter; - public final long collectionId; - public final String physicalName; - public final long id; - public final String physicalNamespaceName; + public long collectionId; + public String physicalName; + public long id; - public CatalogCollectionPlacement( long namespaceId, int adapterId, long collectionId, @Nullable String physicalName, String physicalNamespaceName, long id ) { - super( namespaceId, (long) adapterId, collectionId ); - this.adapter = adapterId; + public CatalogCollectionPlacement( long id, long namespaceId, long adapterId, long collectionId, @Nullable String physicalName ) { + super( namespaceId, adapterId, collectionId ); this.collectionId = collectionId; this.physicalName = physicalName; - this.physicalNamespaceName = physicalNamespaceName; this.id = id; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntityPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntityPlacement.java index f0102fd8aa..1b7d564a2e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntityPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntityPlacement.java @@ -18,9 +18,11 @@ import java.io.Serializable; import lombok.AllArgsConstructor; +import lombok.experimental.SuperBuilder; import org.polypheny.db.schema.Wrapper; @AllArgsConstructor +@SuperBuilder(toBuilder = true) public abstract class CatalogEntityPlacement implements CatalogObject, Serializable, Wrapper { public final Long namespaceId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java index cbbc92b3da..50412b3a6c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogGraphPlacement.java @@ -20,10 +20,12 @@ import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import javax.annotation.Nullable; +import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.SuperBuilder; +@EqualsAndHashCode(callSuper = true) @Value @SuperBuilder(toBuilder = true) public class CatalogGraphPlacement extends CatalogEntityPlacement { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java index ea6fbb4a8f..530b6441d2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java @@ -24,16 +24,15 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class AllocationCollection extends AllocationEntity { +public class AllocationCollection extends AllocationEntity { - public AllocationCollection( LogicalCollection collection, long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( collection, id, name, type, namespaceType, adapterId ); + public AllocationCollection( LogicalCollection collection, long id, String name, long adapterId ) { + super( id, name, collection.id, collection.namespaceId, adapterId, NamespaceType.DOCUMENT ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java index 455e5d71c7..c8d8f4ceb2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -20,7 +20,7 @@ import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -28,16 +28,16 @@ @Value @NonFinal @SuperBuilder(toBuilder = true) -public abstract class AllocationEntity extends LogicalEntity { +public abstract class AllocationEntity extends CatalogEntity { public long adapterId; - public L logical; + public long logicalId; - protected AllocationEntity( L logical, long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( id, name, logical.namespaceId, type, namespaceType ); + protected AllocationEntity( long id, String name, long logicalId, long namespaceId, long adapterId, NamespaceType type ) { + super( id, name, namespaceId, EntityType.ENTITY, type ); this.adapterId = adapterId; - this.logical = logical; + this.logicalId = logicalId; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java index 742f636171..580bea3ef7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -24,11 +24,12 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class AllocationGraph extends AllocationEntity { +public class AllocationGraph extends AllocationEntity { public LogicalGraph logical; @@ -36,7 +37,7 @@ public class AllocationGraph extends AllocationEntity { public AllocationGraph( long id, LogicalGraph graph, long adapterId ) { - super( graph, id, graph.name, graph.entityType, graph.namespaceType, adapterId ); + super( id, graph.name, graph.id, graph.namespaceId, adapterId, NamespaceType.GRAPH ); this.id = id; this.logical = graph; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 2857586ef1..c11b69d7fa 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -22,7 +22,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Value; @@ -32,32 +31,28 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PlacementType; @EqualsAndHashCode(callSuper = true) @Value @SuperBuilder(toBuilder = true) -public class AllocationTable extends AllocationEntity { +public class AllocationTable extends AllocationEntity { @Serialize public List placements; @Serialize public long adapterId; - @Serialize - public LogicalTable logicalTable; public AllocationTable( - @Deserialize("logicalTable") LogicalTable logicalTable, @Deserialize("id") long id, @Deserialize("name") String name, + @Deserialize("logicalId") long logicalId, + @Deserialize("namespaceId") long namespaceId, @Deserialize("adapterId") long adapterId, @Deserialize("placements") List placements ) { - super( logicalTable, id, name, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); - this.logicalTable = logicalTable; + super( id, name, logicalId, namespaceId, adapterId, NamespaceType.RELATIONAL ); this.adapterId = adapterId; this.placements = placements; } @@ -80,16 +75,12 @@ public Map getColumnNames() { } + @Deprecated public Map getColumns() { return null; } - public Map getColumnNamesIds() { - return getColumnNames().entrySet().stream().collect( Collectors.toMap( Entry::getValue, Entry::getKey ) ); - } - - public String getNamespaceName() { return null; } @@ -97,7 +88,7 @@ public String getNamespaceName() { public AllocationTable withAddedColumn( long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { List placements = new ArrayList<>( this.placements ); - placements.add( new CatalogColumnPlacement( logical.namespaceId, id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, 0 ) ); + placements.add( new CatalogColumnPlacement( namespaceId, id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, 0 ) ); return toBuilder().placements( placements ).build(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java index 63c91d1602..bfd2812dde 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -23,17 +23,16 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class PhysicalCollection extends PhysicalEntity { +public class PhysicalCollection extends PhysicalEntity { - public PhysicalCollection( LogicalCollection logical, long id, long namespaceId, String name, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( logical, id, name, namespaceId, namespaceName, type, namespaceType, adapterId ); + public PhysicalCollection( long id, long namespaceId, String name, String namespaceName, EntityType type, long adapterId ) { + super( id, name, namespaceId, namespaceName, type, NamespaceType.DOCUMENT, adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java index 46b0e9260e..c09fa9f490 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java @@ -16,15 +16,26 @@ package org.polypheny.db.catalog.entity.physical; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -public abstract class PhysicalEntity extends AllocationEntity { +@EqualsAndHashCode(callSuper = true) +@Value +@NonFinal +public abstract class PhysicalEntity extends CatalogEntity { - protected PhysicalEntity( L logical, long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( logical, id, name, type, namespaceType, adapterId ); + public String namespaceName; + public long adapterId; + + + protected PhysicalEntity( long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + super( id, name, namespaceId, type, namespaceType ); + this.namespaceName = namespaceName; + this.adapterId = adapterId; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index 4c5eb254cf..7caa9de796 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -23,17 +23,16 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class PhysicalGraph extends PhysicalEntity { +public class PhysicalGraph extends PhysicalEntity { - public PhysicalGraph( LogicalGraph logical, long id, String name, EntityType type, NamespaceType namespaceType, long adapterId ) { - super( logical, id, name, id, name, type, namespaceType, adapterId ); // for graph both name and namespaceName are the same + public PhysicalGraph( long id, String name, EntityType type, long adapterId ) { + super( id, name, id, name, type, NamespaceType.GRAPH, adapterId ); // for graph both name and namespaceName are the same } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 934677ce3e..35a55003aa 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -16,10 +16,9 @@ package org.polypheny.db.catalog.entity.physical; -import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import java.io.Serializable; -import java.util.List; -import java.util.stream.Collectors; +import java.util.Map; import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.NonFinal; @@ -31,10 +30,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.type.PolyTypeFactoryImpl; @@ -42,28 +38,25 @@ @EqualsAndHashCode(callSuper = true) @Value @NonFinal -public class PhysicalTable extends PhysicalEntity { +public class PhysicalTable extends PhysicalEntity { - public ImmutableList placements; - public ImmutableList columnIds; - public ImmutableList columnNames; - public String namespaceName; - public AllocationTable allocation; + public ImmutableMap columns; + + public String namespaceName; + public ImmutableMap types; - public PhysicalTable( AllocationTable allocation, long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, List placements, List columnNames ) { - super( allocation.logical, id, name, namespaceId, namespaceName, type, namespaceType, allocation.adapterId ); - this.allocation = allocation; + public PhysicalTable( long id, String name, long namespaceId, String namespaceName, long adapterId, Map columns, Map types ) { + super( id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.namespaceName = namespaceName; - this.placements = ImmutableList.copyOf( placements ); - this.columnIds = ImmutableList.copyOf( placements.stream().map( p -> p.columnId ).collect( Collectors.toList() ) ); - this.columnNames = ImmutableList.copyOf( columnNames ); + this.columns = ImmutableMap.copyOf( columns ); + this.types = ImmutableMap.copyOf( types ); } - public PhysicalTable( AllocationTable table, String name, String namespaceName, List columnNames ) { - this( table, table.id, name, table.namespaceId, namespaceName, table.entityType, table.namespaceType, table.placements, columnNames ); + public PhysicalTable( AllocationTable table, String name, String namespaceName, Map columns, Map types ) { + this( table.id, name, table.namespaceId, namespaceName, table.adapterId, columns, types ); } @@ -77,10 +70,8 @@ public AlgProtoDataType buildProto() { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - for ( CatalogColumnPlacement placement : placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumn( placement.columnId ); - AlgDataType sqlType = logicalColumn.getAlgDataType( typeFactory ); - fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); + for ( long id : columns.keySet() ) { + fieldInfo.add( columns.get( id ), columns.get( id ), types.get( id ) ).nullable( types.get( id ).isNullable() ); } return AlgDataTypeImpl.proto( fieldInfo.build() ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index c1f25c23b5..ab2c979a38 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -44,9 +44,9 @@ public interface AllocSnapshot { // AllocationGraph getAllocGraph( long id ); - List> getAllocationsOnAdapter( long id ); + List getAllocationsOnAdapter( long id ); - AllocationEntity getAllocEntity( long id ); + AllocationEntity getAllocEntity( long id ); /** * Gets a collective list of column placements per column on an adapter. diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java index 7cc2a09919..fed2589603 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java @@ -39,6 +39,6 @@ public interface PhysicalSnapshot { PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ); - List> getPhysicalsOnAdapter( long adapterId ); + List getPhysicalsOnAdapter( long adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index b3864bdf77..e97a017969 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -18,9 +18,15 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import org.polypheny.db.catalog.catalogs.AllocationCatalog; +import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; +import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; +import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; @@ -30,52 +36,176 @@ import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.logistic.DataPlacementRole; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.partition.properties.PartitionProperty; +import org.polypheny.db.util.Pair; public class AllocSnapshotImpl implements AllocSnapshot { + private final ImmutableMap tables; + private final ImmutableMap collections; + private final ImmutableMap graphs; + + private final ImmutableMap, CatalogColumnPlacement> adapterColumnPlacement; + + private final ImmutableMap allocs; + private final ImmutableMap> allocsOnAdapters; + private final ImmutableMap> columPlacements; + private final ImmutableMap, List> adapterLogicalTablePlacements; + + public AllocSnapshotImpl( Map allocationCatalogs ) { + this.tables = buildTables( allocationCatalogs + .values() + .stream() + .filter( a -> a.getNamespace().namespaceType == NamespaceType.RELATIONAL ) + .map( c -> (AllocationRelationalCatalog) c ) + .collect( Collectors.toList() ) ); + this.collections = buildCollections( allocationCatalogs + .values() + .stream() + .filter( a -> a.getNamespace().namespaceType == NamespaceType.DOCUMENT ) + .map( c -> (AllocationDocumentCatalog) c ) + .collect( Collectors.toList() ) ); + this.graphs = buildGraphs( allocationCatalogs + .values() + .stream() + .filter( a -> a.getNamespace().namespaceType == NamespaceType.GRAPH ) + .map( c -> (AllocationGraphCatalog) c ) + .collect( Collectors.toList() ) ); + + this.allocs = mergeAllocs(); + this.allocsOnAdapters = buildAllocsOnAdapters(); + this.adapterColumnPlacement = buildAdapterColumnPlacement(); + this.columPlacements = buildColumnPlacements(); + this.adapterLogicalTablePlacements = buildAdapterLogicalTablePlacements(); + } + + + private ImmutableMap, List> buildAdapterLogicalTablePlacements() { + Map, List> map = new HashMap<>(); + this.tables.forEach( ( k, v ) -> { + v.placements.forEach( p -> { + if ( !map.containsKey( Pair.of( p.adapterId, p.tableId ) ) ) { + map.put( Pair.of( p.adapterId, p.tableId ), new ArrayList<>() ); + } + map.get( Pair.of( p.adapterId, p.tableId ) ).add( p ); + } ); + } ); + + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap> buildColumnPlacements() { + Map> map = new HashMap<>(); + this.tables.forEach( ( k, v ) -> { + v.placements.forEach( p -> { + if ( !map.containsKey( p.columnId ) ) { + map.put( p.columnId, new ArrayList<>() ); + } + map.get( p.columnId ).add( p ); + } ); + } ); + + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap, CatalogColumnPlacement> buildAdapterColumnPlacement() { + Map, CatalogColumnPlacement> map = new HashMap<>(); + this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> map.put( Pair.of( v.adapterId, p.columnId ), p ) ) ); + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap mergeAllocs() { + Map allocs = new HashMap<>(); + allocs.putAll( this.tables ); + allocs.putAll( this.collections ); + allocs.putAll( this.graphs ); + + return ImmutableMap.copyOf( allocs ); + } + + + private ImmutableMap> buildAllocsOnAdapters() { + Map> allocs = new HashMap<>(); + this.allocs.forEach( ( k, v ) -> { + if ( !allocs.containsKey( v.adapterId ) ) { + allocs.put( v.adapterId, new ArrayList<>() ); + } + allocs.get( v.adapterId ).add( v ); + } ); + return ImmutableMap.copyOf( allocs ); + + } + + + private ImmutableMap buildGraphs( List catalogs ) { + Map graphs = new HashMap<>(); + catalogs.forEach( c -> graphs.putAll( c.getGraphs() ) ); + + return ImmutableMap.copyOf( graphs ); + } + + + private ImmutableMap buildCollections( List catalogs ) { + Map collections = new HashMap<>(); + catalogs.forEach( c -> collections.putAll( c.getCollections() ) ); + + return ImmutableMap.copyOf( collections ); + } + + + private ImmutableMap buildTables( List catalogs ) { + Map tables = new HashMap<>(); + catalogs.forEach( c -> tables.putAll( c.getTables() ) ); + + return ImmutableMap.copyOf( tables ); } @Override - public List> getAllocationsOnAdapter( long id ) { - return null; + public List getAllocationsOnAdapter( long id ) { + return allocsOnAdapters.get( id ); } @Override - public AllocationEntity getAllocEntity( long id ) { - return null; + public AllocationEntity getAllocEntity( long id ) { + return allocs.get( id ); } @Override public CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ) { - return null; + return adapterColumnPlacement.get( Pair.of( adapterId, columnId ) ); } @Override public boolean checkIfExistsColumnPlacement( long adapterId, long columnId ) { - return false; + return adapterColumnPlacement.containsKey( Pair.of( adapterId, columnId ) ); } @Override public List getColumnPlacements( long columnId ) { - return null; + return columPlacements.get( columnId ); } @Override public List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ) { - return null; + return adapterLogicalTablePlacements.get( Pair.of( adapterId, tableId ) ); } @@ -336,4 +466,10 @@ public PartitionProperty getPartitionProperty( long id ) { return null; } + + @Override + public boolean adapterHasPlacement( long adapterId, long id ) { + return false; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java index 3e7ed3fa86..4fc24c7278 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java @@ -68,7 +68,7 @@ public PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ) { @Override - public List> getPhysicalsOnAdapter( long adapterId ) { + public List getPhysicalsOnAdapter( long adapterId ) { return null; } diff --git a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java index 7aea399c2c..1df636ba97 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java @@ -44,7 +44,7 @@ public abstract class AbstractPartitionManager implements PartitionManager { @Override public boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ) { // Check for the specified columnId if we still have a ColumnPlacement for every partitionGroup - for ( Long partitionGroupId : catalogTable.partitionProperty.partitionGroupIds ) { + for ( Long partitionGroupId : Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).partitionGroupIds ) { List ccps = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); if ( ccps.size() <= threshold ) { for ( CatalogColumnPlacement placement : ccps ) { @@ -69,7 +69,7 @@ public Map> getRelevantPlacements( LogicalTab CatalogPartition catalogPartition = catalog.getSnapshot().getAllocSnapshot().getPartition( partitionId ); List relevantCcps = new ArrayList<>(); - for ( LogicalColumn column : catalogTable.columns ) { + for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { List ccps = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, column.id ); ccps.removeIf( ccp -> excludedAdapters.contains( ccp.adapterId ) ); if ( !ccps.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index dce6f24fe9..e42ece4ca8 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -41,10 +41,12 @@ import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.monitoring.core.MonitoringServiceProvider; import org.polypheny.db.monitoring.events.metrics.DmlDataPoint; import org.polypheny.db.monitoring.events.metrics.QueryDataPointImpl; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.partition.properties.TemperaturePartitionProperty; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.transaction.Statement; @@ -125,7 +127,7 @@ private void processAllPeriodicTables() { List periodicTables = catalog.getSnapshot().getTablesForPeriodicProcessing(); // Retrieve all Tables which rely on periodic processing for ( LogicalTable table : periodicTables ) { - if ( table.partitionProperty.partitionType == PartitionType.TEMPERATURE ) { + if ( catalog.getSnapshot().getAllocSnapshot().getPartitionProperty( table.id ).partitionType == PartitionType.TEMPERATURE ) { determinePartitionFrequency( table, invocationTimestamp ); } } @@ -157,11 +159,13 @@ private void determinePartitionDistribution( LogicalTable table ) { log.debug( "Determine access frequency of partitions of table: {}", table.name ); } + PartitionProperty property = catalog.getSnapshot().getAllocSnapshot().getPartitionProperty( table.id ); + // Get percentage of tables which can remain in HOT - long numberOfPartitionsInHot = (table.partitionProperty.partitionIds.size() * ((TemperaturePartitionProperty) table.partitionProperty).getHotAccessPercentageIn()) / 100; + long numberOfPartitionsInHot = (property.partitionIds.size() * ((TemperaturePartitionProperty) property).getHotAccessPercentageIn()) / 100; // These are the tables than can remain in HOT - long allowedTablesInHot = (table.partitionProperty.partitionIds.size() * ((TemperaturePartitionProperty) table.partitionProperty).getHotAccessPercentageOut()) / 100; + long allowedTablesInHot = (property.partitionIds.size() * ((TemperaturePartitionProperty) property).getHotAccessPercentageOut()) / 100; if ( numberOfPartitionsInHot == 0 ) { numberOfPartitionsInHot = 1; @@ -216,7 +220,7 @@ private void determinePartitionDistribution( LogicalTable table ) { // Which of those are currently in cold --> action needed - List currentHotPartitions = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitions( ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); + List currentHotPartitions = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitions( ((TemperaturePartitionProperty) property).getHotPartitionGroupId() ); for ( CatalogPartition catalogPartition : currentHotPartitions ) { // Remove partitions from List if they are already in HOT (not necessary to send to DataMigrator) @@ -264,9 +268,12 @@ private void redistributePartitions( LogicalTable table, List partitionsFr Statement statement = transaction.createStatement(); DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + Snapshot snapshot = transaction.getSnapshot(); + + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); - List adaptersWithHot = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); - List adaptersWithCold = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) table.partitionProperty).getColdPartitionGroupId() ); + List adaptersWithHot = snapshot.getAllocSnapshot().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) property).getHotPartitionGroupId() ); + List adaptersWithCold = snapshot.getAllocSnapshot().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) property).getColdPartitionGroupId() ); log.debug( "Get adapters to create physical tables" ); // Validate that partition does not already exist on store @@ -293,9 +300,8 @@ private void redistributePartitions( LogicalTable table, List partitionsFr } // DROP all partitions on each store - - long hotPartitionGroupId = ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId(); - long coldPartitionGroupId = ((TemperaturePartitionProperty) table.partitionProperty).getColdPartitionGroupId(); + long hotPartitionGroupId = ((TemperaturePartitionProperty) property).getHotPartitionGroupId(); + long coldPartitionGroupId = ((TemperaturePartitionProperty) property).getColdPartitionGroupId(); // Update catalogInformation partitionsFromColdToHot.forEach( p -> Catalog.getInstance().getAllocRel( table.namespaceId ).updatePartition( p, hotPartitionGroupId ) ); @@ -396,14 +402,16 @@ private List filterList( long namespaceId, long adapterId, long tableId, L */ @Override public void determinePartitionFrequency( LogicalTable table, long invocationTimestamp ) { - Timestamp queryStart = new Timestamp( invocationTimestamp - ((TemperaturePartitionProperty) table.partitionProperty).getFrequencyInterval() * 1000 ); + Snapshot snapshot = catalog.getSnapshot(); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); + Timestamp queryStart = new Timestamp( invocationTimestamp - ((TemperaturePartitionProperty) property).getFrequencyInterval() * 1000 ); accessCounter = new HashMap<>(); - List tempPartitionIds = new ArrayList<>( table.partitionProperty.partitionIds ); + List tempPartitionIds = new ArrayList<>( property.partitionIds ); tempPartitionIds.forEach( p -> accessCounter.put( p, (long) 0 ) ); - switch ( ((TemperaturePartitionProperty) table.partitionProperty).getPartitionCostIndication() ) { + switch ( ((TemperaturePartitionProperty) property).getPartitionCostIndication() ) { case ALL: for ( QueryDataPointImpl queryDataPoint : MonitoringServiceProvider.getInstance().getDataPointsAfter( QueryDataPointImpl.class, queryStart ) ) { queryDataPoint.getAccessedPartitions().forEach( p -> incrementPartitionAccess( p, tempPartitionIds ) ); diff --git a/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java index d52eb99379..4bbefc407c 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java @@ -20,10 +20,12 @@ import java.util.Arrays; import java.util.List; import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.type.PolyType; @@ -43,11 +45,13 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue hashValue *= -1; } + PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + // Get designated HASH partition based on number of internal partitions - int partitionIndex = (int) (hashValue % catalogTable.partitionProperty.partitionIds.size()); + int partitionIndex = (int) (hashValue % property.partitionIds.size()); // Finally decide on which partition to put it - return catalogTable.partitionProperty.partitionIds.get( partitionIndex ); + return property.partitionIds.get( partitionIndex ); } diff --git a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java index 0d51715c8b..73465a4e9a 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java @@ -21,11 +21,13 @@ import java.util.Arrays; import java.util.List; import java.util.Map; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumnType; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.partition.properties.TemperaturePartitionProperty; import org.polypheny.db.type.PolyType; @@ -41,8 +43,9 @@ public class TemperatureAwarePartitionManager extends AbstractPartitionManager { public long getTargetPartitionId( LogicalTable catalogTable, String columnValue ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); + PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( - ((TemperaturePartitionProperty) catalogTable.partitionProperty).getInternalPartitionFunction() + ((TemperaturePartitionProperty) property).getInternalPartitionFunction() ); return partitionManager.getTargetPartitionId( catalogTable, columnValue ); @@ -53,8 +56,9 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); + PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( - ((TemperaturePartitionProperty) catalogTable.partitionProperty).getInternalPartitionFunction() + ((TemperaturePartitionProperty) property).getInternalPartitionFunction() ); return partitionManager.getRelevantPlacements( catalogTable, partitionIds, excludedAdapters ); @@ -65,8 +69,9 @@ public Map> getRelevantPlacements( LogicalTab public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); + PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( - ((TemperaturePartitionProperty) catalogTable.partitionProperty).getInternalPartitionFunction() + ((TemperaturePartitionProperty) property).getInternalPartitionFunction() ); return partitionManager.getAllPlacements( catalogTable, partitionIds ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index c6afc837ae..b09e6a8231 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -407,21 +407,22 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme builder.scan( table.name ); builder.join( JoinAlgType.INNER, builder.literal( true ) ); + List columns = snapshot.getColumns( table.id ); + List columNames = columns.stream().map( c -> c.name ).collect( Collectors.toList() ); + List conditionList1 = primaryKey.getColumnNames().stream().map( c -> builder.call( OperatorRegistry.get( OperatorName.EQUALS ), builder.field( names.indexOf( c ) ), - builder.field( names.size() + table.getColumnNames().indexOf( c ) ) - ) - ).collect( Collectors.toList() ); + builder.field( names.size() + columNames.indexOf( c ) ) + ) ).collect( Collectors.toList() ); List conditionList2 = constraint.key.getColumnNames().stream().map( c -> builder.call( OperatorRegistry.get( OperatorName.EQUALS ), builder.field( names.indexOf( "$projected$." + c ) ), - builder.field( names.size() + table.getColumnNames().indexOf( c ) ) - ) - ).collect( Collectors.toList() ); + builder.field( names.size() + columNames.indexOf( c ) ) + ) ).collect( Collectors.toList() ); RexNode condition = rexBuilder.makeCall( diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 0bbd9e7c04..fe9a2b5415 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -58,9 +58,11 @@ import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexDynamicParam; @@ -165,9 +167,10 @@ private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGra @Override public void copyData( Transaction transaction, CatalogAdapter store, List columns, List partitionIds ) { - LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( columns.get( 0 ).namespaceId ); - LogicalTable table = snapshot.getTable( columns.get( 0 ).tableId ); - CatalogPrimaryKey primaryKey = snapshot.getPrimaryKey( table.primaryKey ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( columns.get( 0 ).namespaceId ); + LogicalTable table = relSnapshot.getTable( columns.get( 0 ).tableId ); + CatalogPrimaryKey primaryKey = relSnapshot.getPrimaryKey( table.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); @@ -179,7 +182,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List> placementDistribution = new HashMap<>(); - if ( table.partitionProperty.isPartitioned ) { + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); + if ( property.isPartitioned ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( table.partitionProperty.partitionType ); + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); placementDistribution = partitionManager.getRelevantPlacements( table, partitionIds, Collections.singletonList( store.id ) ); } else { placementDistribution.put( - table.partitionProperty.partitionIds.get( 0 ), + property.partitionIds.get( 0 ), selectSourcePlacements( table, selectColumnList, targetColumnPlacements.get( 0 ).adapterId ) ); } @@ -465,9 +469,10 @@ public AlgRoot getSourceIterator( Statement statement, Map selectSourcePlacements( LogicalTable table, List columns, long excludingAdapterId ) { // Find the adapter with the most column placements Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = catalog.getSnapshot(); long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : snapshot.getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getKey() != excludingAdapterId && entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -478,16 +483,15 @@ public static List selectSourcePlacements( LogicalTable for ( LogicalColumn logicalColumn : columns ) { columnIds.add( logicalColumn.id ); } - AllocSnapshot snapshot = catalog.getSnapshot().getAllocSnapshot(); // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); - for ( LogicalColumn column : table.columns ) { + for ( LogicalColumn column : snapshot.getRelSnapshot( table.namespaceId ).getColumns( table.id ) ) { if ( columnIds.contains( column.id ) ) { - if ( snapshot.getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( snapshot.getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); + if ( snapshot.getAllocSnapshot().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { + placementList.add( snapshot.getAllocSnapshot().getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); } else { - for ( CatalogColumnPlacement placement : snapshot.getColumnPlacements( column.id ) ) { + for ( CatalogColumnPlacement placement : snapshot.getAllocSnapshot().getColumnPlacements( column.id ) ) { if ( placement.adapterId != excludingAdapterId ) { placementList.add( placement ); break; @@ -617,39 +621,40 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo if ( sourceTable.id != targetTable.id ) { throw new RuntimeException( "Unsupported migration scenario. Table ID mismatch" ); } - - CatalogPrimaryKey primaryKey = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + CatalogPrimaryKey primaryKey = snapshot.getRelSnapshot( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( snapshot.getAllocSnapshot().getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getColumn( cid ); + LogicalColumn logicalColumn = snapshot.getRelSnapshot( sourceTable.namespaceId ).getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } } + PartitionProperty targetProperty = snapshot.getAllocSnapshot().getPartitionProperty( targetTable.id ); // Add partition columns to select column list - long partitionColumnId = targetTable.partitionProperty.partitionColumnId; - LogicalColumn partitionColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getColumn( partitionColumnId ); + long partitionColumnId = targetProperty.partitionColumnId; + LogicalColumn partitionColumn = snapshot.getRelSnapshot( sourceTable.namespaceId ).getColumn( partitionColumnId ); if ( !selectColumnList.contains( partitionColumn ) ) { selectColumnList.add( partitionColumn ); } PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( targetTable.partitionProperty.partitionType ); + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( targetProperty.partitionType ); //We need a columnPlacement for every partition Map> placementDistribution = new HashMap<>(); - - placementDistribution.put( sourceTable.partitionProperty.partitionIds.get( 0 ), selectSourcePlacements( sourceTable, selectColumnList, -1 ) ); + PartitionProperty sourceProperty = snapshot.getAllocSnapshot().getPartitionProperty( sourceTable.id ); + placementDistribution.put( sourceProperty.partitionIds.get( 0 ), selectSourcePlacements( sourceTable, selectColumnList, -1 ) ); Statement sourceStatement = transaction.createStatement(); @@ -691,9 +696,9 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo int partitionColumnIndex = -1; String parsedValue = null; String nullifiedPartitionValue = partitionManager.getUnifiedNullValue(); - if ( targetTable.partitionProperty.isPartitioned ) { - if ( resultColMapping.containsKey( targetTable.partitionProperty.partitionColumnId ) ) { - partitionColumnIndex = resultColMapping.get( targetTable.partitionProperty.partitionColumnId ); + if ( targetProperty.isPartitioned ) { + if ( resultColMapping.containsKey( targetProperty.partitionColumnId ) ) { + partitionColumnIndex = resultColMapping.get( targetProperty.partitionColumnId ); } else { parsedValue = nullifiedPartitionValue; } diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index 971d6d313d..e35514f608 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -107,7 +107,7 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P CatalogPartitionPlacement catalogPartitionPlacement = snapshot.getAllocSnapshot().getPartitionPlacement( p.left, k ); LogicalColumn logicalColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogColumnPlacement.columnId ); table.addRow( - catalogTable.getNamespaceName() + "." + catalogTable.name, + snapshot.getNamespace( catalogTable.namespaceId ) + "." + catalogTable.name, logicalColumn.name, catalogPartitionGroup.partitionGroupName + " --> " + catalogPartition.id, catalogPartitionPlacement.adapterUniqueName, diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 14e885de42..601c5b2d7c 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -41,14 +41,13 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.Router; -import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.schema.ModelTrait; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.tools.RoutedAlgBuilder; @@ -207,7 +206,7 @@ protected List buildSelect( AlgNode node, List) node, statement, builders.get( 0 ), null ) ); } if ( node instanceof LogicalRelScan && node.getEntity() != null ) { @@ -224,12 +223,13 @@ protected List buildSelect( AlgNode node, List 1 ) { + if ( snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ).size() > 1 ) { return handleVerticalPartitioningOrReplication( node, catalogTable, statement, logicalTable, builders, cluster, queryInformation ); } return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); @@ -248,8 +248,8 @@ private List handleRelationalOnGraphScan( AlgNode node, Statem AlgBuilder algBuilder = AlgBuilder.create( statement ); RexBuilder rexBuilder = algBuilder.getRexBuilder(); - algBuilder.lpgScan( statement.getTransaction().getSnapshot().getNamespaces( new Pattern( logicalTable.getLogicalSchemaName() ) ).get( 0 ).id ); - algBuilder.lpgMatch( List.of( algBuilder.lpgNodeMatch( List.of( logicalTable.getLogicalTableName() ) ) ), List.of( "n" ) ); + algBuilder.lpgScan( logicalTable.id ); + algBuilder.lpgMatch( List.of( algBuilder.lpgNodeMatch( List.of( logicalTable.name ) ) ), List.of( "n" ) ); algBuilder.lpgProject( List.of( rexBuilder.makeLpgGetId(), rexBuilder.makeLpgPropertiesExtract(), rexBuilder.makeLpgLabels() ), List.of( "id", "properties", "labels" ) ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 779f87b97c..5dc24478a4 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -74,6 +74,7 @@ import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; @@ -123,7 +124,7 @@ protected static Map> selectPlacement( Logica // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); - for ( LogicalColumn column : table.columns ) { + for ( LogicalColumn column : snapshot.getRelSnapshot( table.namespaceId ).getColumns( table.id ) ) { if ( snapshot.getAllocSnapshot().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { placementList.add( snapshot.getAllocSnapshot().getColumnPlacements( column.id ).get( 0 ) ); } else { @@ -132,7 +133,8 @@ protected static Map> selectPlacement( Logica } return new HashMap<>() {{ - put( table.partitionProperty.partitionIds.get( 0 ), placementList ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); + put( property.partitionIds.get( 0 ), placementList ); }}; } @@ -176,7 +178,7 @@ public RoutedAlgBuilder handleScan( Statement statement, long partitionId ) { - PhysicalEntity physical = snapshot.getPhysicalSnapshot().getPhysicalTable( partitionId ); + PhysicalEntity physical = snapshot.getPhysicalSnapshot().getPhysicalTable( partitionId ); AlgNode node = builder.scan( physical ).build(); builder.push( node ); @@ -405,7 +407,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab List scans = new ArrayList<>(); - List placements = catalogGraph.placements; + List placements = snapshot.getAllocSnapshot().getGraphPlacements( catalogGraph.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); if ( placementId != null ) { placements = List.of( placementId ); } @@ -530,7 +532,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st List scans = new ArrayList<>(); - List placements = collection.placements; + List placements = snapshot.getAllocSnapshot().getCollectionPlacements( collection.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); if ( adapterId != null ) { placements = List.of( adapterId ); } @@ -577,7 +579,7 @@ private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statemen @NotNull private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Long adapterId, Statement statement, RoutedAlgBuilder builder ) { - List columns = node.entity.columns; + List columns = statement.getTransaction().getSnapshot().getRelSnapshot( node.entity.namespaceId ).getColumns( node.entity.id ); AlgTraitSet out = node.getTraitSet().replace( ModelTrait.RELATIONAL ); CatalogEntity subTable = getSubstitutionTable( statement, node.entity.id, columns.get( 0 ).id, adapterId ); builder.scan( subTable ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index b2d283fbf3..0b61461488 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -29,6 +29,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.dto.CachedProposedRoutingPlan; @@ -57,13 +58,13 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build } if ( node instanceof DocumentScan ) { - return super.handleDocumentScan( (DocumentScan) node, statement, builder, null ); + return super.handleDocumentScan( (DocumentScan) node, statement, builder, null ); } if ( node instanceof LogicalRelScan && node.getEntity() != null ) { LogicalTable catalogTable = node.getEntity().unwrap( LogicalTable.class ); - - List partitionIds = catalogTable.partitionProperty.partitionIds; + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + List partitionIds = property.partitionIds; Map> placement = new HashMap<>(); for ( long partition : partitionIds ) { if ( cachedPlan.physicalPlacementsOfPartitions.get( partition ) != null ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 88794875b4..c91ff143a8 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -74,7 +74,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; @@ -94,6 +93,7 @@ import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.processing.WhereClauseVisitor; import org.polypheny.db.rex.RexBuilder; @@ -154,9 +154,9 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { // Essentially gets a list of all stores where this table resides List pkPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); - - if ( catalogTable.partitionProperty.isPartitioned && log.isDebugEnabled() ) { - log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, catalogTable.partitionProperty.partitionGroupIds ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + if ( property.isPartitioned && log.isDebugEnabled() ) { + log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, property.partitionGroupIds ); for ( CatalogColumnPlacement dataPlacement : pkPlacements ) { log.debug( "\t\t -> '{}' {}\t{}", @@ -180,14 +180,14 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { Map newParameterValues = new HashMap<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { - // Get placements on store List placementsOnAdapter = snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); // If this is an update, check whether we need to execute on this store at all List updateColumnList = modify.getUpdateColumnList(); List sourceExpressionList = modify.getSourceExpressionList(); - if ( placementsOnAdapter.size() != catalogTable.columns.size() ) { + List columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + if ( placementsOnAdapter.size() != columns.size() ) { if ( modify.getOperation() == Modify.Operation.UPDATE ) { updateColumnList = new LinkedList<>( modify.getUpdateColumnList() ); @@ -213,14 +213,14 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { long identifiedPartitionForSetValue = -1; Set accessedPartitionList = new HashSet<>(); // Identify where clause of UPDATE - if ( catalogTable.partitionProperty.isPartitioned ) { + if ( property.isPartitioned ) { boolean worstCaseRouting = false; Set identifiedPartitionsInFilter = new HashSet<>(); PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( catalogTable.partitionProperty.partitionType ); + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); - WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, catalogTable.getColumnIds().indexOf( catalogTable.partitionProperty.partitionColumnId ) ); + WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, columns.indexOf( property.partitionColumnId ) ); modify.accept( new AlgShuttleImpl() { @Override public AlgNode visit( LogicalFilter filter ) { @@ -260,9 +260,9 @@ public AlgNode visit( LogicalFilter filter ) { int index = 0; for ( String cn : updateColumnList ) { - if ( snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, cn ).id == catalogTable.partitionProperty.partitionColumnId ) { + if ( snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, cn ).id == property.partitionColumnId ) { if ( log.isDebugEnabled() ) { - log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", catalogTable.partitionProperty.partitionColumnId, index ); + log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", property.partitionColumnId, index ); } // Routing/Locking can now be executed on certain partitions partitionValue = sourceExpressionList.get( index ).toString().replace( "'", "" ); @@ -318,7 +318,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { //Partition functionality cannot be used --> worstCase --> send query to every partition else { worstCaseRouting = true; - accessedPartitionList = new HashSet<>( catalogTable.partitionProperty.partitionIds ); + accessedPartitionList = new HashSet<>( property.partitionIds ); } } else if ( modify.getOperation() == Modify.Operation.INSERT ) { @@ -333,14 +333,14 @@ else if ( identifiedPartitionForSetValue != -1 ) { String columnFieldName = (modify.getInput()).getRowType().getFieldList().get( j ).getKey(); // Retrieve columnId of fieldName and map it to its fieldList location of INSERT Stmt - int columnIndex = catalogTable.getColumnNames().indexOf( columnFieldName ); - resultColMapping.put( catalogTable.getColumnIds().get( columnIndex ), j ); + int columnIndex = columns.stream().map( c -> c.name ).collect( Collectors.toList() ).indexOf( columnFieldName ); + resultColMapping.put( columns.stream().map( c -> c.id ).collect( Collectors.toList() ).get( columnIndex ), j ); // Determine location of partitionColumn in fieldList - if ( catalogTable.getColumnIds().get( columnIndex ) == catalogTable.partitionProperty.partitionColumnId ) { + if ( columns.stream().map( c -> c.id ).collect( Collectors.toList() ).get( columnIndex ) == property.partitionColumnId ) { partitionColumnIndex = columnIndex; if ( log.isDebugEnabled() ) { - log.debug( "INSERT: Found PartitionColumnID: '{}' at column index: {}", catalogTable.partitionProperty.partitionColumnId, j ); + log.debug( "INSERT: Found PartitionColumnID: '{}' at column index: {}", property.partitionColumnId, j ); worstCaseRouting = false; } @@ -414,7 +414,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { } else if ( modify.getInput() instanceof LogicalProject && ((LogicalProject) modify.getInput()).getInput() instanceof LogicalValues ) { - String partitionColumnName = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name; + String partitionColumnName = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( property.partitionColumnId ).name; List fieldNames = modify.getInput().getRowType().getFieldNames(); LogicalRelModify ltm = modify; @@ -515,7 +515,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { } if ( log.isDebugEnabled() ) { - String partitionColumnName = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name; + String partitionColumnName = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( property.partitionColumnId ).name; String partitionName = snapshot.getAllocSnapshot().getPartitionGroup( identPart ).partitionGroupName; log.debug( "INSERT: partitionColumn-value: '{}' should be put on partition: {} ({}), which is partitioned with column {}", partitionValue, identPart, partitionName, partitionColumnName ); @@ -533,11 +533,11 @@ else if ( identifiedPartitionForSetValue != -1 ) { if ( worstCaseRouting ) { log.debug( "PartitionColumnID was not an explicit part of statement, partition routing will therefore assume worst-case: Routing to ALL PARTITIONS" ); - accessedPartitionList = new HashSet<>( catalogTable.partitionProperty.partitionIds ); + accessedPartitionList = new HashSet<>( property.partitionIds ); } } else { // un-partitioned tables only have one partition anyway - identPart = catalogTable.partitionProperty.partitionIds.get( 0 ); + identPart = property.partitionIds.get( 0 ); accessedPartitionList.add( identPart ); } @@ -686,14 +686,14 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, List modifies = new ArrayList<>(); - List placements = collection.placements; + List placements = snapshot.getAllocSnapshot().getCollectionPlacements( collection.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); if ( adapterId != null ) { placements = List.of( adapterId ); } for ( long placementId : placements ) { CatalogAdapter adapter = snapshot.getAdapter( placementId ); - CatalogCollectionPlacement placement = Catalog.getInstance().getAllocDoc( alg.entity.namespaceId ).getCollectionPlacement( collection.id, placementId ); + CatalogCollectionPlacement placement = snapshot.getAllocSnapshot().getCollectionPlacement( collection.id, placementId ); PhysicalCollection document = snapshot.getPhysicalSnapshot().getPhysicalCollection( placement.id ); if ( !adapter.supportedNamespaces.contains( NamespaceType.DOCUMENT ) ) { @@ -723,7 +723,13 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, @Override public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement ) { LogicalGraph catalogGraph = alg.entity.unwrap( LogicalGraph.class ); - return routeGraphDml( alg, statement, catalogGraph, catalogGraph.placements ); + List placements = statement + .getTransaction() + .getSnapshot() + .getAllocSnapshot() + .getCollectionPlacements( catalogGraph.id ).stream().map( c -> c.adapterId ) + .collect( Collectors.toList() ); + return routeGraphDml( alg, statement, catalogGraph, placements ); } @@ -737,7 +743,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Logical for ( long adapterId : placements ) { CatalogAdapter adapter = snapshot.getAdapter( adapterId ); - CatalogGraphPlacement graphPlacement = Catalog.getInstance().getAllocGraph( alg.entity.namespaceId ).getGraphPlacement( catalogGraph.id, adapterId ); + CatalogGraphPlacement graphPlacement = snapshot.getAllocSnapshot().getGraphPlacement( catalogGraph.id, adapterId ); PhysicalGraph graph = snapshot.getPhysicalSnapshot().getPhysicalGraph( catalogGraph.id, adapterId ); if ( graph == null ) { @@ -1224,7 +1230,8 @@ private AlgBuilder buildDml( builder = super.handleValues( values, builder ); - if ( catalogTable.columns.size() == placements.size() ) { // full placement, no additional checks required + List columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + if ( columns.size() == placements.size() ) { // full placement, no additional checks required return builder; } else if ( node.getRowType().toString().equals( "RecordType(INTEGER ZERO)" ) ) { // This is a prepared statement. Actual values are in the project. Do nothing @@ -1237,15 +1244,17 @@ private AlgBuilder buildDml( return builder.project( rexNodes ); } } else if ( node instanceof LogicalProject ) { - if ( catalogTable.columns.size() == placements.size() ) { // full placement, generic handling is sufficient - if ( catalogTable.partitionProperty.isPartitioned && remapParameterValues ) { // && ((LogicalProject) node).getInput().getRowType().toString().equals( "RecordType(INTEGER ZERO)" ) + List columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + if ( columns.size() == placements.size() ) { // full placement, generic handling is sufficient + if ( property.isPartitioned && remapParameterValues ) { // && ((LogicalProject) node).getInput().getRowType().toString().equals( "RecordType(INTEGER ZERO)" ) return remapParameterizedDml( node, builder, statement, parameterValues ); } else { return super.handleGeneric( node, builder ); } } else { // vertically partitioned, adjust project if ( ((LogicalProject) node).getInput().getRowType().toString().equals( "RecordType(INTEGER ZERO)" ) ) { - if ( catalogTable.partitionProperty.isPartitioned && remapParameterValues ) { + if ( property.isPartitioned && remapParameterValues ) { builder = remapParameterizedDml( node, builder, statement, parameterValues ); } builder.push( node.copy( node.getTraitSet(), ImmutableList.of( builder.peek( 0 ) ) ) ); @@ -1268,7 +1277,8 @@ private AlgBuilder buildDml( } } } else if ( node instanceof LogicalFilter ) { - if ( catalogTable.columns.size() != placements.size() ) { // partitioned, check if there is a illegal condition + List columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + if ( columns.size() != placements.size() ) { // partitioned, check if there is a illegal condition RexCall call = ((RexCall) ((LogicalFilter) node).getCondition()); for ( RexNode operand : call.operands ) { @@ -1300,7 +1310,9 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); - CatalogPartitionPlacement partition = snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, fromTable.partitionProperty.partitionIds.get( 0 ) ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( fromTable.id ); + + CatalogPartitionPlacement partition = snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, property.partitionIds.get( 0 ) ); nodes.add( super.handleScan( builder, @@ -1333,7 +1345,7 @@ private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, L } columnName = columnNames[1]; } else if ( columnNames.length == 3 ) { // schemaName.tableName.columnName - if ( !catalogTable.getNamespaceName().equalsIgnoreCase( columnNames[0] ) ) { + if ( !snapshot.getNamespace( catalogTable.id ).name.equalsIgnoreCase( columnNames[0] ) ) { throw new RuntimeException( "Schema name does not match expected schema name: " + field.getName() ); } if ( !catalogTable.name.equalsIgnoreCase( columnNames[1] ) ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index f72e049e19..74b43e8223 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -28,14 +28,15 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.Router; import org.polypheny.db.routing.factories.RouterFactory; -import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; @@ -108,7 +109,8 @@ protected List handleNonePartitioning( List newBuilders = new ArrayList<>(); for ( List placementCombination : placements ) { Map> currentPlacementDistribution = new HashMap<>(); - currentPlacementDistribution.put( catalogTable.partitionProperty.partitionIds.get( 0 ), placementCombination ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + currentPlacementDistribution.put( property.partitionIds.get( 0 ), placementCombination ); for ( RoutedAlgBuilder builder : builders ) { RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); @@ -127,7 +129,8 @@ protected List handleNonePartitioning( protected Collection>> selectPlacementHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( catalogTable.partitionProperty.partitionType ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); // Utilize scanId to retrieve Partitions being accessed List partitionIds = queryInformation.getAccessedPartitions().get( node.getId() ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index 281141d3d0..6a583de798 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -29,6 +29,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.schema.LogicalEntity; @@ -71,7 +72,8 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa if ( builders.size() == 1 && builders.get( 0 ).getPhysicalPlacementsOfPartitions().isEmpty() ) { for ( List currentPlacement : placements ) { final Map> currentPlacementDistribution = new HashMap<>(); - currentPlacementDistribution.put( catalogTable.partitionProperty.partitionIds.get( 0 ), currentPlacement ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + currentPlacementDistribution.put( property.partitionIds.get( 0 ), currentPlacement ); final RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builders.get( 0 ) ); newBuilder.addPhysicalInfo( currentPlacementDistribution ); @@ -88,7 +90,8 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa for ( List currentPlacement : placements ) { final Map> currentPlacementDistribution = new HashMap<>(); - currentPlacementDistribution.put( catalogTable.partitionProperty.partitionIds.get( 0 ), currentPlacement ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + currentPlacementDistribution.put( property.partitionIds.get( 0 ), currentPlacement ); // AdapterId for all col placements same final long adapterId = currentPlacement.get( 0 ).adapterId; diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java index 17b4d5377a..44133865d6 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java @@ -23,14 +23,15 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.routing.LogicalQueryInformation; import org.polypheny.db.routing.Router; import org.polypheny.db.routing.factories.RouterFactory; -import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; @@ -66,14 +67,15 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa @Override protected List handleHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( catalogTable.partitionProperty.partitionType ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); // Utilize scanId to retrieve Partitions being accessed List partitionIds = queryInformation.getAccessedPartitions().get( node.getId() ); Map> placementDistribution = partitionIds != null ? partitionManager.getRelevantPlacements( catalogTable, partitionIds, Collections.emptyList() ) - : partitionManager.getRelevantPlacements( catalogTable, catalogTable.partitionProperty.partitionIds, Collections.emptyList() ); + : partitionManager.getRelevantPlacements( catalogTable, property.partitionIds, Collections.emptyList() ); // Only one builder available builders.get( 0 ).addPhysicalInfo( placementDistribution ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java index 4cc8eb6c29..2cf9d65135 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java @@ -21,8 +21,10 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; /** @@ -32,9 +34,11 @@ public class CreateAllPlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { - LogicalTable catalogTable = Catalog.getInstance().getSnapshot().getRelSnapshot( addedColumn.namespaceId ).getTable( addedColumn.tableId ); - return catalogTable.dataPlacements.stream() - .map( elem -> AdapterManager.getInstance().getStore( elem ) ) + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + LogicalTable catalogTable = snapshot.getRelSnapshot( addedColumn.namespaceId ).getTable( addedColumn.tableId ); + List dataPlacements = snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ); + return dataPlacements.stream() + .map( elem -> AdapterManager.getInstance().getStore( elem.adapterId ) ) .collect( Collectors.toList() ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index 6b00ed71b2..74120c7da9 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -22,16 +22,20 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; public class CreateSinglePlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { - LogicalTable catalogTable = Catalog.getInstance().getSnapshot().getRelSnapshot( addedColumn.namespaceId ).getTable( addedColumn.tableId ); - return ImmutableList.of( AdapterManager.getInstance().getStore( catalogTable.dataPlacements.get( 0 ) ) ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + LogicalTable catalogTable = snapshot.getRelSnapshot( addedColumn.namespaceId ).getTable( addedColumn.tableId ); + List dataPlacement = snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ); + return ImmutableList.of( AdapterManager.getInstance().getStore( dataPlacement.get( 0 ).adapterId ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index e224fce027..6ea0a16ddf 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -40,9 +40,9 @@ import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.transaction.EntityAccessMap.EntityIdentifier.NamespaceLevel; @@ -268,7 +268,8 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { relevantPartitions = accessedPartitions.get( p.getId() ); } else if ( table != null ) { if ( table.namespaceType == NamespaceType.RELATIONAL ) { - relevantPartitions = table.unwrap( LogicalTable.class ).partitionProperty.partitionIds; + PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( table.id ); + relevantPartitions = property.partitionIds; } else { relevantPartitions = List.of(); } @@ -323,8 +324,8 @@ private void attachGraph( AlgNode p ) { private void extractWriteConstraints( LogicalEntity logicalTable ) { for ( long constraintTable : logicalTable.getConstraintIds() ) { - for ( long constraintPartitionIds - : Catalog.getInstance().getSnapshot().getRelSnapshot( logicalTable.namespaceId ).getTable( constraintTable ).partitionProperty.partitionIds ) { + PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( logicalTable.id ); + for ( long constraintPartitionIds : property.partitionIds ) { EntityIdentifier id = new EntityIdentifier( constraintTable, constraintPartitionIds, NamespaceLevel.ENTITY_LEVEL ); if ( !accessMap.containsKey( id ) ) { diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 80ac556273..2831424cc7 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -29,6 +29,7 @@ import java.util.Map.Entry; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataStore; @@ -43,6 +44,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; @@ -174,14 +176,14 @@ public synchronized void addMaterializedInfo( Long materializedId, MaterializedC * update candidates for materialized view with freshness updates * * @param transaction transaction of the commit - * @param tableNames table that was changed + * @param tableIds table that was changed */ @Override - public void addTables( Transaction transaction, List tableNames ) { - if ( tableNames.size() > 1 ) { + public void addTables( Transaction transaction, List tableIds ) { + if ( tableIds.size() > 1 ) { snapshot = Catalog.getInstance().getSnapshot(); - LogicalNamespace namespace = snapshot.getNamespace( tableNames.get( 0 ) ); - LogicalTable catalogTable = snapshot.getRelSnapshot( namespace.id ).getTable( tableNames.get( 1 ) ); + LogicalNamespace namespace = snapshot.getNamespace( tableIds.get( 0 ) ); + LogicalTable catalogTable = snapshot.getRelSnapshot( namespace.id ).getTable( tableIds.get( 1 ) ); long id = catalogTable.id; if ( !catalogTable.getConnectedViews().isEmpty() ) { updateCandidates.put( transaction.getXid(), id ); @@ -310,18 +312,18 @@ public void addData( Transaction transaction, List stores, Map columnPlacements = new LinkedList<>(); DataMigrator dataMigrator = transaction.getDataMigrator(); - - for ( long id : materializedView.dataPlacements ) { + List dataPlacements = transaction.getSnapshot().getAllocSnapshot().getDataPlacements( materializedView.id ); + for ( CatalogDataPlacement placement : dataPlacements ) { Statement sourceStatement = transaction.createStatement(); prepareSourceRel( sourceStatement, materializedView.getAlgCollation(), algRoot.alg ); Statement targetStatement = transaction.createStatement(); columnPlacements.clear(); - columns.get( id ).forEach( column -> columnPlacements.add( snapshot.getAllocSnapshot().getColumnPlacement( id, column.id ) ) ); + columns.get( placement.adapterId ).forEach( column -> columnPlacements.add( snapshot.getAllocSnapshot().getColumnPlacement( placement.adapterId, column.id ) ) ); // If partitions should be allowed for materialized views this needs to be changed that all partitions are considered - AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( id, materializedView.id ).get( 0 ) ); + AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( placement.adapterId, materializedView.id ).get( 0 ) ); - dataMigrator.executeQuery( columns.get( id ), algRoot, sourceStatement, targetStatement, targetRel, true, materializedView.isOrdered() ); + dataMigrator.executeQuery( columns.get( placement.adapterId ), algRoot, sourceStatement, targetStatement, targetRel, true, materializedView.isOrdered() ); } } @@ -334,7 +336,6 @@ public void addData( Transaction transaction, List stores, Map> columns = new HashMap<>(); List ids = new ArrayList<>(); - if ( catalog.getSnapshot().getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { - CatalogMaterializedView catalogMaterializedView = catalog.getSnapshot().getLogicalEntity( materializedId ).unwrap( CatalogMaterializedView.class ); - for ( long id : catalogMaterializedView.dataPlacements ) { - ids.add( id ); + if ( snapshot.getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { + CatalogMaterializedView catalogMaterializedView = snapshot.getLogicalEntity( materializedId ).unwrap( CatalogMaterializedView.class ); + List dataPlacements = snapshot.getAllocSnapshot().getDataPlacements( catalogMaterializedView.id ); + for ( CatalogDataPlacement placement : dataPlacements ) { + ids.add( placement.adapterId ); List logicalColumns = new ArrayList<>(); - int localAdapterIndex = catalogMaterializedView.dataPlacements.indexOf( id ); - snapshot.getAllocSnapshot().getDataPlacement( catalogMaterializedView.dataPlacements.get( localAdapterIndex ), catalogMaterializedView.id ) + int localAdapterIndex = dataPlacements.indexOf( placement ); + snapshot.getAllocSnapshot().getDataPlacement( dataPlacements.stream().map( p -> p.adapterId ).collect( Collectors.toList() ).get( localAdapterIndex ), catalogMaterializedView.id ) .columnPlacementsOnAdapter.forEach( col -> - logicalColumns.add( snapshot.getRelSnapshot( catalogMaterializedView.namespaceId ).getColumn( col ) ) - ); - columns.put( id, logicalColumns ); + logicalColumns.add( snapshot.getRelSnapshot( catalogMaterializedView.namespaceId ).getColumn( col ) ) ); + columns.put( placement.adapterId, logicalColumns ); } AlgRoot targetRel; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java index db267ca77a..5aca473b4c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java @@ -22,13 +22,21 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; -import org.polypheny.db.catalog.entity.CatalogCollectionMapping; -import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.PlacementType; public class PolyAllocDocCatalog implements Serializable, AllocationDocumentCatalog { + @Getter + public final LogicalNamespace namespace; + + + public PolyAllocDocCatalog( LogicalNamespace namespace ) { + this.namespace = namespace; + } + + @Getter public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocDocCatalog.class ); @@ -39,40 +47,22 @@ public PolyAllocDocCatalog copy() { } - @Override public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { return 0; } - @Override - public List getCollectionPlacementsByAdapter( long id ) { - return null; - } - - @Override public void addCollectionPlacement( long namespaceId, long adapterId, long id, PlacementType placementType ) { } - @Override - public CatalogCollectionMapping getCollectionMapping( long id ) { - return null; - } - - @Override public void dropCollectionPlacement( long id, long adapterId ) { } - @Override - public CatalogCollectionPlacement getCollectionPlacement( long id, long placementId ) { - return null; - } - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java index e51b7f576b..6fa2273b61 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java @@ -17,27 +17,27 @@ package org.polypheny.db.catalog.allocation; import io.activej.serializer.BinarySerializer; -import java.util.List; import lombok.Getter; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; -import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.entity.LogicalNamespace; public class PolyAllocGraphCatalog implements Serializable, AllocationGraphCatalog { + @Getter + private final LogicalNamespace namespace; @Getter public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocGraphCatalog.class ); - @Override - public long addGraphPlacement( long adapterId, long graphId ) { - return 0; + public PolyAllocGraphCatalog( LogicalNamespace namespace ) { + this.namespace = namespace; } @Override - public List getGraphPlacements( long adapterId ) { - return null; + public long addGraphPlacement( long adapterId, long graphId ) { + return 0; } @@ -47,17 +47,10 @@ public void deleteGraphPlacement( long adapterId, long graphId ) { } - @Override - public CatalogGraphPlacement getGraphPlacement( long graphId, long adapterId ) { - return null; - } - - @Override public PolyAllocGraphCatalog copy() { return deserialize( serialize(), PolyAllocGraphCatalog.class ); } - } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 1febae3299..9199651b92 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -120,7 +120,6 @@ import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; @@ -2498,7 +2497,7 @@ void removeQueryInterface( final Context ctx ) { try { qim.removeQueryInterface( catalog, uniqueName ); ctx.json( new Result( 1 ).setGeneratedQuery( generatedQuery ) ); - } catch ( RuntimeException | UnknownQueryInterfaceException e ) { + } catch ( RuntimeException e ) { log.error( "Could not remove query interface {}", ctx.body(), e ); ctx.json( new Result( e ).setGeneratedQuery( generatedQuery ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java index c8721d9340..e9247fb452 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java @@ -20,12 +20,13 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.PlacementType; +import lombok.Value; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.PlacementType; /** @@ -141,20 +142,21 @@ public static class DocumentStore extends Store { public DocumentStore( String uniqueName, String adapterName, List collectionPlacements, boolean isNative ) { super( uniqueName, adapterName ); - this.placements = collectionPlacements.stream().map( p -> new CollectionPlacement( p.collectionId, p.adapter ) ).collect( Collectors.toList() ); + this.placements = collectionPlacements.stream().map( p -> new CollectionPlacement( p.collectionId, p.adapterId ) ).collect( Collectors.toList() ); this.isNative = isNative; } } + @Value private static class CollectionPlacement { - private final long collectionId; - private final int adapterId; + long collectionId; + long adapterId; - public CollectionPlacement( long collectionId, int adapterId ) { + public CollectionPlacement( long collectionId, long adapterId ) { this.collectionId = collectionId; this.adapterId = adapterId; } @@ -162,13 +164,14 @@ public CollectionPlacement( long collectionId, int adapterId ) { } + @Value private static class GraphPlacement { - private final long graphId; - private final int adapterId; + long graphId; + long adapterId; - public GraphPlacement( long graphId, int adapterId ) { + public GraphPlacement( long graphId, long adapterId ) { this.graphId = graphId; this.adapterId = adapterId; } From 70d4b7080cd823f9fc7763ee63b0c7bff2ab8d21 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 12 Mar 2023 23:37:34 +0100 Subject: [PATCH 047/436] removing of unnecessary parameters in catalog entites, fixed abstract class parsing, started fixing running --- .../algebra/rules/LoptSemiJoinOptimizer.java | 4 +- .../catalogs/AllocationRelationalCatalog.java | 13 +- .../db/catalog/catalogs/PhysicalCatalog.java | 9 +- .../db/catalog/entity/CatalogEntity.java | 14 +- .../entity/CatalogMaterializedView.java | 2 +- .../allocation/AllocationCollection.java | 4 +- .../entity/allocation/AllocationEntity.java | 14 +- .../entity/allocation/AllocationGraph.java | 2 +- .../entity/allocation/AllocationTable.java | 31 +++-- .../entity/logical/LogicalCollection.java | 2 - .../catalog/entity/logical/LogicalEntity.java | 2 +- .../catalog/entity/logical/LogicalGraph.java | 10 +- .../catalog/entity/logical/LogicalTable.java | 25 +++- .../entity/physical/PhysicalCollection.java | 4 +- .../entity/physical/PhysicalEntity.java | 4 +- .../entity/physical/PhysicalGraph.java | 4 +- .../entity/physical/PhysicalTable.java | 6 +- .../db/catalog/snapshot/AllocSnapshot.java | 10 +- .../db/catalog/snapshot/PhysicalSnapshot.java | 5 + .../snapshot/impl/AllocSnapshotImpl.java | 118 +++++++++++++---- .../snapshot/impl/PhysicalSnapshotImpl.java | 75 ++++++++++- .../org/polypheny/db/ddl/DdlManagerImpl.java | 74 ++++++----- .../db/processing/DataMigratorImpl.java | 2 +- .../db/routing/routers/AbstractDqlRouter.java | 2 +- .../db/routing/routers/BaseRouter.java | 14 +- .../db/routing/routers/IcarusRouter.java | 2 +- .../db/transaction/EntityAccessMap.java | 5 +- lombok.config | 1 + .../monitoring/statistics/StatisticTable.java | 2 +- .../java/org/polypheny/db/cql/Combiner.java | 14 +- .../polypheny/db/cql/Cql2RelConverter.java | 8 +- .../polypheny/db/adapter/csv/CsvSource.java | 4 +- .../polypheny/db/adapter/csv/CsvTable.java | 10 +- .../db/cypher/ddl/CypherAddPlacement.java | 2 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 14 +- .../db/languages/mql/MqlAddPlacement.java | 2 +- .../db/languages/mql/MqlDeletePlacement.java | 2 +- .../org/polypheny/db/catalog/PolyCatalog.java | 14 +- .../allocation/PolyAllocDocCatalog.java | 5 + .../allocation/PolyAllocGraphCatalog.java | 5 + .../allocation/PolyAllocRelCatalog.java | 125 +++--------------- .../db/catalog/logical/RelationalCatalog.java | 3 +- .../catalog/physical/PolyPhysicalCatalog.java | 43 +----- .../polypheny/db/restapi/RequestParser.java | 4 +- .../polypheny/db/sql/SqlProcessorImpl.java | 4 +- .../altertable/SqlAlterTableAddColumn.java | 7 +- .../SqlAlterTableAddPartitions.java | 6 +- .../altertable/SqlAlterTableAddPlacement.java | 2 +- .../SqlAlterTableMergePartitions.java | 8 +- .../SqlAlterTableModifyPartitions.java | 8 +- .../SqlAlterTableModifyPlacement.java | 4 +- .../java/org/polypheny/db/webui/Crud.java | 8 +- 52 files changed, 427 insertions(+), 335 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java index c76b24774a..a9dd174313 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java @@ -52,6 +52,8 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.TranslatableEntity; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgOptCost; @@ -667,7 +669,7 @@ public int compare( Integer alg1Idx, Integer alg2Idx ) { private abstract static class LcsEntity extends CatalogEntity { protected LcsEntity() { - super( null ); + super( -1, "lcs", -1, EntityType.ENTITY, NamespaceType.RELATIONAL ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 43de986298..22eb3e54f6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -20,7 +20,6 @@ import java.util.Map; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; @@ -33,8 +32,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { /** * Adds a placement for a column. * - * @param table - * @param adapterId The adapter on which the table should be placed on + * @param allocationId * @param columnId The id of the column to be placed * @param placementType The type of placement * @param physicalSchemaName The schema name on the adapter @@ -42,16 +40,16 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param physicalColumnName The column name on the adapter * @param position */ - void addColumnPlacement( LogicalTable table, long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ); + void addColumnPlacement( long allocationId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ); /** * Deletes all dependent column placements * - * @param adapterId The id of the adapter + * @param allocationId The id of the adapter * @param columnId The id of the column * @param columnOnly columnOnly If delete originates from a dropColumn */ - void deleteColumnPlacement( long adapterId, long columnId, boolean columnOnly ); + void deleteColumnPlacement( long allocationId, long columnId, boolean columnOnly ); @@ -223,8 +221,9 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * * @param adapterId adapter where placement should be located * @param tableId table to retrieve the placement from + * @return */ - void addDataPlacement( long adapterId, long tableId ); + long addDataPlacement( long adapterId, long tableId ); /** * Adds a new DataPlacement for a given table on a specific store. diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index 276052521b..7754d07767 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -16,18 +16,13 @@ package org.polypheny.db.catalog.catalogs; -import java.util.List; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; public interface PhysicalCatalog { - List getPhysicalsOnAdapter( long id ); - - PhysicalEntity getPhysicalEntity( long id ); - void addPhysicalEntity( PhysicalEntity physicalEntity ); -

      PhysicalEntity getFromLogical( long id ); + + java.util.concurrent.ConcurrentHashMap getPhysicals(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index f4f3f17cfb..1193222e45 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -19,9 +19,7 @@ import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.List; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; -import lombok.Value; +import lombok.Getter; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; import org.polypheny.db.StatisticsManager; @@ -38,10 +36,11 @@ import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.ImmutableBitSet; +@Getter @SuperBuilder(toBuilder = true) -@Value @NonFinal public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializable, CatalogType, Expressible { + @Serialize public long id; @Serialize @@ -54,7 +53,12 @@ public abstract class CatalogEntity implements CatalogObject, Wrapper, Serializa public long namespaceId; - public CatalogEntity( long id, String name, long namespaceId, EntityType type, NamespaceType namespaceType ) { + public CatalogEntity( + long id, + String name, + long namespaceId, + EntityType type, + NamespaceType namespaceType ) { this.id = id; this.namespaceId = namespaceId; this.name = name; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java index b9bf30f458..62b2245c82 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java @@ -27,8 +27,8 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.languages.QueryLanguage; -@EqualsAndHashCode(callSuper = true) @SuperBuilder(toBuilder = true) +@EqualsAndHashCode(callSuper = true) @Value public class CatalogMaterializedView extends CatalogView { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java index 530b6441d2..1eca455527 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java @@ -31,8 +31,8 @@ @NonFinal public class AllocationCollection extends AllocationEntity { - public AllocationCollection( LogicalCollection collection, long id, String name, long adapterId ) { - super( id, name, collection.id, collection.namespaceId, adapterId, NamespaceType.DOCUMENT ); + public AllocationCollection( LogicalCollection collection, long id, long adapterId ) { + super( id, collection.id, collection.namespaceId, adapterId, NamespaceType.DOCUMENT ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java index c8d8f4ceb2..9dbc5d64bd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -16,10 +16,10 @@ package org.polypheny.db.catalog.entity.allocation; +import io.activej.serializer.annotations.Serialize; import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.NonFinal; -import lombok.experimental.SuperBuilder; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -27,15 +27,21 @@ @EqualsAndHashCode(callSuper = true) @Value @NonFinal -@SuperBuilder(toBuilder = true) public abstract class AllocationEntity extends CatalogEntity { + @Serialize public long adapterId; + @Serialize public long logicalId; - protected AllocationEntity( long id, String name, long logicalId, long namespaceId, long adapterId, NamespaceType type ) { - super( id, name, namespaceId, EntityType.ENTITY, type ); + protected AllocationEntity( + long id, + long logicalId, + long namespaceId, + long adapterId, + NamespaceType type ) { + super( id, null, namespaceId, EntityType.ENTITY, type ); this.adapterId = adapterId; this.logicalId = logicalId; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java index 580bea3ef7..2556e996d0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -37,7 +37,7 @@ public class AllocationGraph extends AllocationEntity { public AllocationGraph( long id, LogicalGraph graph, long adapterId ) { - super( id, graph.name, graph.id, graph.namespaceId, adapterId, NamespaceType.GRAPH ); + super( id, graph.id, graph.namespaceId, adapterId, NamespaceType.GRAPH ); this.id = id; this.logical = graph; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index c11b69d7fa..2fcb319881 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -22,12 +22,13 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Value; -import lombok.experimental.SuperBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -36,24 +37,19 @@ @EqualsAndHashCode(callSuper = true) @Value -@SuperBuilder(toBuilder = true) public class AllocationTable extends AllocationEntity { @Serialize public List placements; - @Serialize - public long adapterId; public AllocationTable( @Deserialize("id") long id, - @Deserialize("name") String name, @Deserialize("logicalId") long logicalId, @Deserialize("namespaceId") long namespaceId, @Deserialize("adapterId") long adapterId, @Deserialize("placements") List placements ) { - super( id, name, logicalId, namespaceId, adapterId, NamespaceType.RELATIONAL ); - this.adapterId = adapterId; + super( id, logicalId, namespaceId, adapterId, NamespaceType.RELATIONAL ); this.placements = placements; } @@ -71,18 +67,17 @@ public Expression asExpression() { public Map getColumnNames() { - return null; + return getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ); } - @Deprecated public Map getColumns() { - return null; + return Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumns( id ).stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); } public String getNamespaceName() { - return null; + return Catalog.getInstance().getSnapshot().getNamespace( id ).name; } @@ -90,13 +85,23 @@ public AllocationTable withAddedColumn( long columnId, PlacementType placementTy List placements = new ArrayList<>( this.placements ); placements.add( new CatalogColumnPlacement( namespaceId, id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, 0 ) ); - return toBuilder().placements( placements ).build(); + return new AllocationTable( id, logicalId, namespaceId, adapterId, placements ); } public AllocationTable withRemovedColumn( long columnId ) { List placements = new ArrayList<>( this.placements ); - return toBuilder().placements( placements.stream().filter( p -> p.columnId != columnId ).collect( Collectors.toList() ) ).build(); + return new AllocationTable( id, logicalId, namespaceId, adapterId, placements.stream().filter( p -> p.columnId != columnId ).collect( Collectors.toList() ) ); + } + + + public Map getColumnTypes() { + return null; + } + + + public Map getColumnNamesId() { + return getColumnNames().entrySet().stream().collect( Collectors.toMap( Entry::getValue, Entry::getKey ) ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index 8ee5c89476..91c80025bd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -20,7 +20,6 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Value; -import lombok.experimental.SuperBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; @@ -30,7 +29,6 @@ @EqualsAndHashCode(callSuper = true) @Value -@SuperBuilder(toBuilder = true) public class LogicalCollection extends LogicalEntity implements CatalogObject { private static final long serialVersionUID = -6490762948368178584L; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java index 17ea29b78d..b572b2e440 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalEntity.java @@ -24,8 +24,8 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -@SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = true) +@SuperBuilder(toBuilder = true) @Value @NonFinal public abstract class LogicalEntity extends CatalogEntity { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index 3686820354..13df4ee8e8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -18,16 +18,16 @@ import com.drew.lang.annotations.NotNull; import java.io.Serializable; +import java.util.List; import lombok.EqualsAndHashCode; import lombok.Value; -import lombok.experimental.SuperBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; -@SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = true) @Value public class LogicalGraph extends LogicalEntity implements Comparable { @@ -67,10 +67,14 @@ public int compareTo( @NotNull LogicalGraph o ) { } - @Override public Expression asExpression() { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getCollection", Expressions.constant( id ) ); } + + public List getPlacements() { + return Catalog.getInstance().getSnapshot().getAllocSnapshot().getGraphPlacements( id ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index c83988eec6..1eca2295ec 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -22,6 +22,7 @@ import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.List; +import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NonNull; @@ -39,8 +40,9 @@ import org.polypheny.db.schema.ColumnStrategy; @EqualsAndHashCode(callSuper = false) -@NonFinal @SuperBuilder(toBuilder = true) +@NonFinal +//@Value public class LogicalTable extends LogicalEntity implements Comparable { private static final long serialVersionUID = 4653390333258552102L; @@ -54,7 +56,7 @@ public class LogicalTable extends LogicalEntity implements Comparable connectedViews; + public ImmutableList connectedViews; public LogicalTable( @@ -76,7 +78,6 @@ public LogicalTable( } - // Used for creating ResultSets @Override public Serializable[] getParameterArray() { @@ -132,6 +133,24 @@ public List getColumnStrategies() { } + public List getColumns() { + return Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumns( id ); + } + + + public List getColumnIds() { + return getColumns().stream().map( c -> c.id ).collect( Collectors.toList() ); + } + + + public List getColumnNames() { + return getColumns().stream().map( c -> c.name ).collect( Collectors.toList() ); + } + + + public String getNamespaceName() { + return Catalog.getInstance().getSnapshot().getNamespace( namespaceId ).name; + } @RequiredArgsConstructor diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java index bfd2812dde..2334e8df2d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -31,8 +31,8 @@ @NonFinal public class PhysicalCollection extends PhysicalEntity { - public PhysicalCollection( long id, long namespaceId, String name, String namespaceName, EntityType type, long adapterId ) { - super( id, name, namespaceId, namespaceName, type, NamespaceType.DOCUMENT, adapterId ); + public PhysicalCollection( long id, long logicalId, long namespaceId, String name, String namespaceName, EntityType type, long adapterId ) { + super( id, logicalId, name, namespaceId, namespaceName, type, NamespaceType.DOCUMENT, adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java index c09fa9f490..2c4201ae85 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java @@ -30,12 +30,14 @@ public abstract class PhysicalEntity extends CatalogEntity { public String namespaceName; public long adapterId; + public long logicalId; - protected PhysicalEntity( long id, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + protected PhysicalEntity( long id, long logicalId, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { super( id, name, namespaceId, type, namespaceType ); this.namespaceName = namespaceName; this.adapterId = adapterId; + this.logicalId = logicalId; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index 7caa9de796..6d9daee1ed 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -31,8 +31,8 @@ @NonFinal public class PhysicalGraph extends PhysicalEntity { - public PhysicalGraph( long id, String name, EntityType type, long adapterId ) { - super( id, name, id, name, type, NamespaceType.GRAPH, adapterId ); // for graph both name and namespaceName are the same + public PhysicalGraph( long id, long logicalId, String name, EntityType type, long adapterId ) { + super( id, logicalId, name, id, name, type, NamespaceType.GRAPH, adapterId ); // for graph both name and namespaceName are the same } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 35a55003aa..621d7aaccb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -47,8 +47,8 @@ public class PhysicalTable extends PhysicalEntity { public ImmutableMap types; - public PhysicalTable( long id, String name, long namespaceId, String namespaceName, long adapterId, Map columns, Map types ) { - super( id, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); + public PhysicalTable( long id, long logicalId, String name, long namespaceId, String namespaceName, long adapterId, Map columns, Map types ) { + super( id, logicalId, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.namespaceName = namespaceName; this.columns = ImmutableMap.copyOf( columns ); this.types = ImmutableMap.copyOf( types ); @@ -56,7 +56,7 @@ public PhysicalTable( long id, String name, long namespaceId, String namespaceNa public PhysicalTable( AllocationTable table, String name, String namespaceName, Map columns, Map types ) { - this( table.id, name, table.namespaceId, namespaceName, table.adapterId, columns, types ); + this( table.id, table.logicalId, name, table.namespaceId, namespaceName, table.adapterId, columns, types ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index ab2c979a38..2c8396bdeb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -16,9 +16,8 @@ package org.polypheny.db.catalog.snapshot; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import java.util.List; +import java.util.Map; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; @@ -29,7 +28,6 @@ import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; -import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.partition.properties.PartitionProperty; @@ -107,7 +105,7 @@ public interface AllocSnapshot { * @param tableId The id of the table for the requested column placements * @return The requested collection */ - ImmutableMap> getColumnPlacementsByAdapter( long tableId ); + Map> getColumnPlacementsByAdapter( long tableId ); /** * Gets the partition group sorted by partition. @@ -373,7 +371,7 @@ public interface AllocSnapshot { boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ); - List getAllocationsFromLogical( long logicalId ); + List getAllocationsFromLogical( long logicalId ); boolean isPartitioned( long id ); @@ -409,4 +407,6 @@ public interface AllocSnapshot { boolean adapterHasPlacement( long adapterId, long id ); + AllocationEntity getAllocation( long adapterId, long entityId ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java index fed2589603..3beac4d673 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java @@ -41,4 +41,9 @@ public interface PhysicalSnapshot { List getPhysicalsOnAdapter( long adapterId ); + + PhysicalEntity getPhysicalEntity( long id ); + + List fromLogical( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index e97a017969..2d44a627a4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -16,13 +16,13 @@ package org.polypheny.db.catalog.snapshot.impl; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import lombok.Value; import org.polypheny.db.catalog.catalogs.AllocationCatalog; import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; @@ -47,18 +47,24 @@ import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.util.Pair; +@Value public class AllocSnapshotImpl implements AllocSnapshot { - private final ImmutableMap tables; - private final ImmutableMap collections; - private final ImmutableMap graphs; + ImmutableMap tables; + ImmutableMap collections; + ImmutableMap graphs; - private final ImmutableMap, CatalogColumnPlacement> adapterColumnPlacement; + ImmutableMap, CatalogColumnPlacement> adapterColumnPlacement; - private final ImmutableMap allocs; - private final ImmutableMap> allocsOnAdapters; - private final ImmutableMap> columPlacements; - private final ImmutableMap, List> adapterLogicalTablePlacements; + ImmutableMap allocs; + ImmutableMap> allocsOnAdapters; + ImmutableMap> columPlacements; + + ImmutableMap> tablePlacements; + ImmutableMap, List> adapterLogicalTablePlacements; + ImmutableMap, AllocationEntity> adapterLogicalTableAlloc; + ImmutableMap> logicalAllocs; + ImmutableMap>> tableAdapterColumns; public AllocSnapshotImpl( Map allocationCatalogs ) { @@ -86,19 +92,69 @@ public AllocSnapshotImpl( Map allocationCatalogs ) { this.adapterColumnPlacement = buildAdapterColumnPlacement(); this.columPlacements = buildColumnPlacements(); this.adapterLogicalTablePlacements = buildAdapterLogicalTablePlacements(); + this.adapterLogicalTableAlloc = buildAdapterLogicalTableAlloc(); + this.tablePlacements = buildTablePlacements(); + this.logicalAllocs = buildLogicalAllocs(); + this.tableAdapterColumns = buildTableAdapterColumns(); + } + + + private ImmutableMap>> buildTableAdapterColumns() { + Map>> map = new HashMap<>(); + this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { + if ( !map.containsKey( v.logicalId ) ) { + map.put( v.logicalId, new HashMap<>() ); + } + if ( !map.get( v.logicalId ).containsKey( v.adapterId ) ) { + map.get( v.logicalId ).put( v.logicalId, new ArrayList<>() ); + } + map.get( v.logicalId ).get( v.logicalId ).add( p.columnId ); + } ) ); + + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap> buildLogicalAllocs() { + Map> map = new HashMap<>(); + allocs.forEach( ( k, v ) -> { + if ( !map.containsKey( v.logicalId ) ) { + map.put( v.logicalId, new ArrayList<>() ); + } + map.get( v.logicalId ).add( v ); + } ); + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap> buildTablePlacements() { + Map> map = new HashMap<>(); + this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { + if ( !map.containsKey( v.id ) ) { + map.put( v.id, new ArrayList<>() ); + } + map.get( v.id ).add( p ); + } ) ); + + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap, AllocationEntity> buildAdapterLogicalTableAlloc() { + Map, AllocationEntity> map = new HashMap<>(); + this.allocs.forEach( ( k, v ) -> map.put( Pair.of( v.adapterId, v.logicalId ), v ) ); + return ImmutableMap.copyOf( map ); } private ImmutableMap, List> buildAdapterLogicalTablePlacements() { Map, List> map = new HashMap<>(); - this.tables.forEach( ( k, v ) -> { - v.placements.forEach( p -> { - if ( !map.containsKey( Pair.of( p.adapterId, p.tableId ) ) ) { - map.put( Pair.of( p.adapterId, p.tableId ), new ArrayList<>() ); - } - map.get( Pair.of( p.adapterId, p.tableId ) ).add( p ); - } ); - } ); + this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { + if ( !map.containsKey( Pair.of( p.adapterId, p.tableId ) ) ) { + map.put( Pair.of( p.adapterId, p.tableId ), new ArrayList<>() ); + } + map.get( Pair.of( p.adapterId, p.tableId ) ).add( p ); + } ) ); return ImmutableMap.copyOf( map ); } @@ -106,14 +162,12 @@ private ImmutableMap, List> buildAdapte private ImmutableMap> buildColumnPlacements() { Map> map = new HashMap<>(); - this.tables.forEach( ( k, v ) -> { - v.placements.forEach( p -> { - if ( !map.containsKey( p.columnId ) ) { - map.put( p.columnId, new ArrayList<>() ); - } - map.get( p.columnId ).add( p ); - } ); - } ); + this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { + if ( !map.containsKey( p.columnId ) ) { + map.put( p.columnId, new ArrayList<>() ); + } + map.get( p.columnId ).add( p ); + } ) ); return ImmutableMap.copyOf( map ); } @@ -222,8 +276,8 @@ public List getColumnPlacementsByColumn( long columnId ) @Override - public ImmutableMap> getColumnPlacementsByAdapter( long tableId ) { - return null; + public Map> getColumnPlacementsByAdapter( long tableId ) { + return tableAdapterColumns.get( tableId ); } @@ -414,8 +468,8 @@ public boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId @Override - public List getAllocationsFromLogical( long logicalId ) { - return null; + public List getAllocationsFromLogical( long logicalId ) { + return logicalAllocs.get( logicalId ); } @@ -472,4 +526,10 @@ public boolean adapterHasPlacement( long adapterId, long id ) { return false; } + + @Override + public AllocationEntity getAllocation( long adapterId, long entityId ) { + return adapterLogicalTableAlloc.get( Pair.of( adapterId, entityId ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java index 4fc24c7278..a84da1c47d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java @@ -16,59 +16,122 @@ package org.polypheny.db.catalog.snapshot.impl; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; +import lombok.Value; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; +import org.polypheny.db.util.Pair; +@Value public class PhysicalSnapshotImpl implements PhysicalSnapshot { + ImmutableMap entities; + + ImmutableMap, PhysicalEntity> adapterLogicalEntity; + ImmutableMap> adapterPhysicals; + + ImmutableMap> logicalToPhysicals; + + public PhysicalSnapshotImpl( Map physicalCatalogs ) { + this.entities = ImmutableMap.copyOf( physicalCatalogs.values().stream().flatMap( c -> c.getPhysicals().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); + this.adapterLogicalEntity = buildAdapterLogicalEntity(); + this.adapterPhysicals = buildAdapterPhysicals(); + this.logicalToPhysicals = buildLogicalToPhysicals(); + } + + + private ImmutableMap> buildLogicalToPhysicals() { + Map> map = new HashMap<>(); + this.entities.forEach( ( k, v ) -> { + if ( !map.containsKey( v.logicalId ) ) { + map.put( v.logicalId, new ArrayList<>() ); + } + map.get( v.logicalId ).add( v ); + } ); + + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap> buildAdapterPhysicals() { + Map> map = new HashMap<>(); + this.entities.forEach( ( k, v ) -> { + if ( !map.containsKey( v.adapterId ) ) { + map.put( v.adapterId, new ArrayList<>() ); + } + map.get( v.adapterId ).add( v ); + } ); + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap, PhysicalEntity> buildAdapterLogicalEntity() { + Map, PhysicalEntity> map = new HashMap<>(); + this.entities.forEach( ( k, v ) -> map.put( Pair.of( v.adapterId, v.logicalId ), v ) ); + return ImmutableMap.copyOf( map ); } @Override public PhysicalTable getPhysicalTable( long id ) { - return null; + return entities.get( id ).unwrap( PhysicalTable.class ); } @Override public PhysicalTable getPhysicalTable( long logicalId, long adapterId ) { - return null; + return adapterLogicalEntity.get( Pair.of( adapterId, logicalId ) ).unwrap( PhysicalTable.class ); } @Override public PhysicalCollection getPhysicalCollection( long id ) { - return null; + return entities.get( id ).unwrap( PhysicalCollection.class ); } @Override public PhysicalCollection getPhysicalCollection( long logicalId, long adapterId ) { - return null; + return adapterLogicalEntity.get( Pair.of( adapterId, logicalId ) ).unwrap( PhysicalCollection.class ); } @Override public PhysicalGraph getPhysicalGraph( long id ) { - return null; + return entities.get( id ).unwrap( PhysicalGraph.class ); } @Override public PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ) { - return null; + return adapterLogicalEntity.get( Pair.of( adapterId, logicalId ) ).unwrap( PhysicalGraph.class ); } @Override public List getPhysicalsOnAdapter( long adapterId ) { + return adapterPhysicals.get( adapterId ); + } + + + @Override + public PhysicalEntity getPhysicalEntity( long id ) { + return entities.get( id ); + } + + + public List fromLogical( long id ) { return null; } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index fb2b69c08d..c178e49365 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -67,6 +67,7 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -268,8 +269,9 @@ private void handleSource( DataSource adapter ) { exportedColumn.cardinality, exportedColumn.nullable, Collation.getDefaultCollation() ); - catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( tableId ), - adapter.getAdapterId(), + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( adapter.getAdapterId(), tableId ); + catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( + allocation.id, columnId, PlacementType.STATIC, exportedColumn.physicalSchemaName, @@ -371,11 +373,11 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte if ( table.entityType != EntityType.SOURCE ) { throw new RuntimeException( "Trying to drop a table located on a data source which is not of table type SOURCE. This should not happen!" ); } - + AllocationEntity entity = catalog.getSnapshot().getAllocSnapshot().getAllocation( catalogAdapter.id, tableId ); // Delete column placement in catalog for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getColumns( tableId ) ) { if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { - catalog.getAllocRel( defaultNamespaceId ).deleteColumnPlacement( catalogAdapter.id, column.id, false ); + catalog.getAllocRel( defaultNamespaceId ).deleteColumnPlacement( entity.id, column.id, false ); } } @@ -480,9 +482,10 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); LogicalColumn addedColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ); + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( adapterId, catalogTable.id ); // Add column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, - adapterId, + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + allocation.id, addedColumn.id, PlacementType.STATIC, exportedColumn.physicalSchemaName, @@ -554,8 +557,9 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo // Add column on underlying data stores and insert default value for ( DataStore store : stores ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, - store.getAdapterId(), + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( store.getAdapterId(), catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + allocation.id, addedColumn.id, // Will be set later PlacementType.AUTOMATIC, // Will be set later null, // Will be set later @@ -854,10 +858,11 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { // Gather all partitions relevant to add depending on the specified partitionGroup tempPartitionGroupList.forEach( pg -> catalog.getSnapshot().getAllocSnapshot().getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( dataStore.getAdapterId(), catalogTable.id ); // Create column placements for ( long cid : columnIds ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, - dataStore.getAdapterId(), + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + allocation.id, cid, PlacementType.MANUAL, null, @@ -868,8 +873,8 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); for ( long cid : primaryKey.columnIds ) { if ( !columnIds.contains( cid ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, - dataStore.getAdapterId(), + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + allocation.id, cid, PlacementType.AUTOMATIC, null, @@ -924,8 +929,9 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, for ( CatalogColumnPlacement ccp : oldPkPlacements ) { for ( long columnId : columnIds ) { if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, - ccp.adapterId, + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( ccp.adapterId, catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + allocation.id, columnId, // Will be set later PlacementType.AUTOMATIC, // Will be set later null, // Will be set later @@ -1001,12 +1007,13 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( dp.adapterId, dp.columnId, true ); }*/ - for ( AllocationTable table : catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ) ) { - for ( CatalogColumnPlacement placement : table.placements ) { + for ( AllocationEntity table : catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ) ) { + for ( CatalogColumnPlacement placement : table.unwrap( AllocationTable.class ).placements ) { if ( catalogTable.entityType == EntityType.ENTITY ) { AdapterManager.getInstance().getStore( table.adapterId ).dropColumn( statement.getPrepareContext(), placement ); } - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( placement.adapterId, placement.columnId, true ); + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( placement.adapterId, catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, placement.columnId, true ); } } @@ -1028,14 +1035,14 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement private void checkModelLogic( LogicalTable catalogTable ) { - if ( catalogTable.getNamespaceType() == NamespaceType.DOCUMENT ) { + if ( catalogTable.namespaceType == NamespaceType.DOCUMENT ) { throw new RuntimeException( "Modification operation is not allowed by schema type DOCUMENT" ); } } private void checkModelLogic( LogicalTable catalogTable, String columnName ) { - if ( catalogTable.getNamespaceType() == NamespaceType.DOCUMENT + if ( catalogTable.namespaceType == NamespaceType.DOCUMENT && (columnName.equals( "_data" ) || columnName.equals( "_id" )) ) { throw new RuntimeException( "Modification operation is not allowed by schema type DOCUMENT" ); } @@ -1344,7 +1351,8 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Drop Column on store storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); // Drop column placement - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), columnId, true ); + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, columnId, true ); } List tempPartitionGroupList = new ArrayList<>(); @@ -1412,9 +1420,10 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); } } else { + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Create column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, - storeInstance.getAdapterId(), + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + allocation.id, cid, PlacementType.MANUAL, null, @@ -1565,9 +1574,10 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da throw new PlacementAlreadyExistsException(); } } else { + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Create column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( catalogTable, - storeInstance.getAdapterId(), + catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + allocation.id, logicalColumn.id, PlacementType.MANUAL, null, @@ -1621,8 +1631,9 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D } // Drop Column on store storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Drop column placement - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id, false ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, logicalColumn.id, false ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1816,8 +1827,9 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a for ( DataStore s : stores ) { long adapterId = s.getAdapterId(); - catalog.getAllocRel( namespaceId ).addColumnPlacement( catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ), - s.getAdapterId(), + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( adapterId, tableId ); + catalog.getAllocRel( namespaceId ).addColumnPlacement( + allocation.id, columnId, placementType, null, @@ -2863,8 +2875,9 @@ private void addColumn( long namespaceId, String columnName, ColumnTypeInformati addDefaultValue( namespaceId, defaultValue, addedColumnId ); for ( DataStore s : stores ) { - catalog.getAllocRel( namespaceId ).addColumnPlacement( catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ), - s.getAdapterId(), + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( s.getAdapterId(), tableId ); + catalog.getAllocRel( namespaceId ).addColumnPlacement( + allocation.id, addedColumnId, placementType, null, @@ -3034,7 +3047,8 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); for ( LogicalColumn column : columns ) { if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( placement.adapterId, column.id ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( placement.adapterId, column.id, false ); + AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( placement.getAdapterId(), catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, column.id, false ); } } } diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index fe9a2b5415..f31f806d9c 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -472,7 +472,7 @@ public static List selectSourcePlacements( LogicalTable Snapshot snapshot = catalog.getSnapshot(); long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : snapshot.getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : snapshot.getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getKey() != excludingAdapterId && entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 601c5b2d7c..9b8f8e1c03 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -229,7 +229,7 @@ protected List buildSelect( AlgNode node, List 1 ) { + if ( snapshot.getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ).size() > 1 ) { return handleVerticalPartitioningOrReplication( node, catalogTable, statement, logicalTable, builders, cluster, queryInformation ); } return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 5dc24478a4..f24f366023 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -60,6 +60,7 @@ import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -74,7 +75,6 @@ import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; -import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexBuilder; @@ -115,7 +115,7 @@ protected static Map> selectPlacement( Logica // Find the adapter with the most column placements long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : snapshot.getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -125,16 +125,12 @@ protected static Map> selectPlacement( Logica // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); for ( LogicalColumn column : snapshot.getRelSnapshot( table.namespaceId ).getColumns( table.id ) ) { - if ( snapshot.getAllocSnapshot().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( snapshot.getAllocSnapshot().getColumnPlacements( column.id ).get( 0 ) ); - } else { - placementList.add( snapshot.getAllocSnapshot().getColumnPlacements( column.id ).get( 0 ) ); - } + placementList.add( snapshot.getAllocSnapshot().getColumnPlacements( column.id ).get( 0 ) ); } return new HashMap<>() {{ - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); - put( property.partitionIds.get( 0 ), placementList ); + List allocs = snapshot.getAllocSnapshot().getAllocationsFromLogical( table.id ); + put( allocs.get( 0 ).id, placementList ); }}; } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index 6a583de798..18609f4004 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -28,11 +28,11 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.routing.LogicalQueryInformation; -import org.polypheny.db.schema.LogicalEntity; import org.polypheny.db.tools.RoutedAlgBuilder; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.Pair; diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 6ea0a16ddf..566c0b8ef5 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -40,6 +40,7 @@ import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.partition.properties.PartitionProperty; @@ -268,8 +269,8 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { relevantPartitions = accessedPartitions.get( p.getId() ); } else if ( table != null ) { if ( table.namespaceType == NamespaceType.RELATIONAL ) { - PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( table.id ); - relevantPartitions = property.partitionIds; + List allocations = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAllocationsFromLogical( table.id ); + relevantPartitions = allocations.stream().map( a -> a.id ).collect( Collectors.toList() ); } else { relevantPartitions = List.of(); } diff --git a/lombok.config b/lombok.config index 0bab2f7ae1..a06a0e95fa 100644 --- a/lombok.config +++ b/lombok.config @@ -1,3 +1,4 @@ # This file is used for all submodule and can be overwritten by placing a lombok.config file in the submodule itself lombok.val.flagUsage = error lombok.var.flagUsage = error +lombok.anyConstructor.addConstructorProperties=true diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index 854ce7c773..29d3f615f1 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -79,7 +79,7 @@ public StatisticTable( Long tableId ) { if ( catalog.getSnapshot().getLogicalEntity( tableId ) != null ) { LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); this.table = catalogTable.name; - this.namespaceType = catalogTable.getNamespaceType(); + this.namespaceType = catalogTable.namespaceType; this.dataPlacements = ImmutableList.copyOf( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).stream().map( c -> c.adapterId ).collect( Collectors.toList() ) ); this.entityType = catalogTable.entityType; } diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java index d2bc9ba571..02f2060f47 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java @@ -18,11 +18,15 @@ import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.cql.BooleanGroup.TableOpsBooleanOperator; import org.polypheny.db.cql.exception.InvalidMethodInvocation; import org.polypheny.db.cql.exception.InvalidModifierException; @@ -146,7 +150,10 @@ private static String[] getColumnsToJoinOn( TableIndex left, TableIndex right, S LogicalTable rightCatalogTable = right.catalogTable; List columnList = Arrays.asList( columnStrs ); - if ( !leftCatalogTable.getColumnNames().containsAll( columnList ) || !rightCatalogTable.getColumnNames().containsAll( columnList ) ) { + LogicalRelSnapshot relSnapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( leftCatalogTable.namespaceId ); + List lColumnNames = relSnapshot.getColumns( leftCatalogTable.id ).stream().map( c -> c.name ).collect( Collectors.toList() ); + List rColumnNames = relSnapshot.getColumns( rightCatalogTable.id ).stream().map( c -> c.name ).collect( Collectors.toList() ); + if ( !new HashSet<>( lColumnNames ).containsAll( columnList ) || !new HashSet<>( rColumnNames ).containsAll( columnList ) ) { log.error( "Invalid Modifier Values. Cannot join tables '{}' and '{}' on columns {}", leftCatalogTable.name, rightCatalogTable.name, columnList ); throw new InvalidModifierException( "Invalid Modifier Values. Cannot join tables '" + @@ -163,8 +170,9 @@ private static String[] getCommonColumns( TableIndex table1, TableIndex table2 ) if ( log.isDebugEnabled() ) { log.debug( "Getting Common Columns between '{}' and '{}'.", table1.fullyQualifiedName, table2.fullyQualifiedName ); } - List table1Columns = table1.catalogTable.getColumnNames(); - List table2Columns = table2.catalogTable.getColumnNames(); + LogicalRelSnapshot relSnapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( table1.catalogTable.namespaceId ); + List table1Columns = relSnapshot.getColumns( table1.catalogTable.id ).stream().map( c -> c.name ).collect( Collectors.toList() ); + List table2Columns = relSnapshot.getColumns( table2.catalogTable.id ).stream().map( c -> c.name ).collect( Collectors.toList() ); return table1Columns.stream().filter( table2Columns::contains ).toArray( String[]::new ); } diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java index fefbd74a52..f898846511 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java @@ -119,8 +119,8 @@ private void setScanColumnOrdinalities() { cqlQuery.queryRelation.traverse( TraversalType.INORDER, ( treeNode, nodeType, direction, frame ) -> { if ( nodeType == NodeType.DESTINATION_NODE && treeNode.isLeaf() ) { TableIndex tableIndex = treeNode.getExternalNode(); - for ( LogicalColumn column : tableIndex.catalogTable.columns ) { - tableScanColumnOrdinalities.put( column.id, tableScanColumnOrdinalities.size() ); + for ( Long id : tableIndex.catalogTable.getColumnIds() ) { + tableScanColumnOrdinalities.put( id, tableScanColumnOrdinalities.size() ); } } return true; @@ -147,7 +147,7 @@ private AlgBuilder generateScan( AlgBuilder algBuilder, RexBuilder rexBuilder ) if ( treeNode.isLeaf() ) { LogicalTable catalogTable = treeNode.getExternalNode().catalogTable; algBuilderAtomicReference.set( - algBuilderAtomicReference.get().scan( catalogTable.getNamespaceName(), catalogTable.name ) + algBuilderAtomicReference.get().scan( Catalog.getInstance().getSnapshot().getNamespace( catalogTable.namespaceId ).name, catalogTable.name ) ); } else { Combiner combiner = treeNode.getInternalNode(); @@ -195,7 +195,7 @@ private AlgBuilder generateProjections( AlgBuilder algBuilder, RexBuilder rexBui TableIndex tableIndex = treeNode.getExternalNode(); String columnNamePrefix = tableIndex.fullyQualifiedName + "."; LogicalTable catalogTable = tableIndex.catalogTable; - for ( LogicalColumn column : catalogTable.columns ) { + for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { int ordinal = tableScanColumnOrdinalities.size(); RexNode inputRef = rexBuilder.makeInputRef( baseNode, ordinal ); inputRefs.add( inputRef ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index e3074c117e..f5f2df7a08 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -159,13 +159,13 @@ public Map> getExportedColumns() { Map> exportedColumnCache = new HashMap<>(); Set fileNames; if ( csvDir.getProtocol().equals( "jar" ) ) { - List> placements = Catalog + List placements = Catalog .getInstance() .getSnapshot() .getPhysicalSnapshot() .getPhysicalsOnAdapter( getAdapterId() ); fileNames = new HashSet<>(); - for ( PhysicalEntity ccp : placements ) { + for ( PhysicalEntity ccp : placements ) { fileNames.add( ccp.namespaceName ); } } else if ( Sources.of( csvDir ).file().isFile() ) { diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java index e5c57bed6c..3fbd55e9dc 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java @@ -33,8 +33,9 @@ package org.polypheny.db.adapter.csv; -import java.util.ArrayList; import java.util.List; +import java.util.stream.Collectors; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.util.Source; @@ -55,7 +56,12 @@ public abstract class CsvTable extends PhysicalTable { * Creates a CsvTable. */ CsvTable( Source source, AllocationTable allocationTable, List fieldTypes, int[] fields, CsvSource csvSource ) { - super( allocationTable, allocationTable.name, allocationTable.getNamespaceName(), new ArrayList<>( allocationTable.getColumnNames().values() ) ); + super( + allocationTable, + allocationTable.name, + allocationTable.getNamespaceName(), + allocationTable.getColumnNames(), + allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> AlgDataTypeFactory.DEFAULT.createPolyType( c.type ) ) ) ); this.source = source; this.fieldTypes = fieldTypes; this.fields = fields; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java index 3dbabcca6f..e3dcf660e8 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java @@ -77,7 +77,7 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "Error while adding graph placement." ); } - if ( graphs.get( 0 ).placements.stream().anyMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { + if ( graphs.get( 0 ).getPlacements().stream().anyMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p.adapterId ) ) ) { throw new RuntimeException( "Could not create placement of graph as it already exists." ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index f446cbffc4..2adb4eb60d 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -37,6 +37,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import lombok.NonNull; import org.apache.calcite.avatica.ColumnMetaData; @@ -110,7 +111,8 @@ public JdbcEntity( allocationTable, getPhysicalTableName( jdbcSchema.adapter, logicalTable, allocationTable ), getPhysicalSchemaName( jdbcSchema.adapter ), - getPhysicalColumnNames( jdbcSchema.adapter, allocationTable ) ); + getPhysicalColumnNames( jdbcSchema.adapter, allocationTable ), + allocationTable.getColumnTypes() ); this.logical = logicalTable; this.allocation = allocationTable; this.jdbcSchema = jdbcSchema; @@ -118,9 +120,9 @@ public JdbcEntity( } - private static List getPhysicalColumnNames( Adapter adapter, AllocationTable allocationTable ) { + private static Map getPhysicalColumnNames( Adapter adapter, AllocationTable allocationTable ) { AbstractJdbcStore store = (AbstractJdbcStore) adapter; - return allocationTable.getColumns().values().stream().map( c -> store.getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ); + return allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> store.getPhysicalColumnName( c.id ) ) ); } @@ -154,7 +156,7 @@ private List> fieldClasses( final JavaTypeFact SqlString generateSql() { List pcnl = Expressions.list(); - for ( String str : columnNames ) { + for ( String str : allocation.getColumnNames().values() ) { pcnl.add( new SqlIdentifier( Arrays.asList( name, str ), ParserPos.ZERO ) ); } @@ -184,7 +186,7 @@ public SqlIdentifier physicalTableName() { public SqlIdentifier physicalColumnName( String logicalColumnName ) { - String physicalName = columnNames.get( List.copyOf( allocation.getColumnNames().values() ).indexOf( logicalColumnName ) ); + String physicalName = columns.get( List.of( allocation.getColumnNamesId().values() ).indexOf( logicalColumnName ) ); return new SqlIdentifier( Collections.singletonList( physicalName ), ParserPos.ZERO ); } @@ -197,7 +199,7 @@ public boolean hasPhysicalColumnName( String logicalColumnName ) { public SqlNodeList getNodeList() { List pcnl = Expressions.list(); int i = 0; - for ( String str : columnNames ) { + for ( String str : columns.values() ) { SqlNode[] operands = new SqlNode[]{ new SqlIdentifier( Arrays.asList( namespaceName, name, str ), ParserPos.ZERO ), new SqlIdentifier( Collections.singletonList( List.copyOf( allocation.getColumnNames().values() ).get( i++ ) ), ParserPos.ZERO ) diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java index d026900ea8..5b8b55ae01 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java @@ -57,7 +57,7 @@ public void execute( Context context, Statement statement, QueryParameters param .map( store -> (DataStore) adapterManager.getAdapter( store ) ) .collect( Collectors.toList() ); - if ( collections.get( 0 ).placements.stream().anyMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { + if ( statement.getTransaction().getSnapshot().getAllocSnapshot().getCollectionPlacements( collections.get( 0 ).id ).stream().anyMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { throw new RuntimeException( "Error while adding a new collection placement, placement already present." ); } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java index b33c969a01..766cd9af28 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java @@ -56,7 +56,7 @@ public void execute( Context context, Statement statement, QueryParameters param .map( store -> (DataStore) adapterManager.getAdapter( store ) ) .collect( Collectors.toList() ); - if ( collections.get( 0 ).placements.stream().noneMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { + if ( statement.getTransaction().getSnapshot().getAllocSnapshot().getCollectionPlacements( collections.get( 0 ).id ).stream().noneMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { throw new RuntimeException( "Error while adding a new collection placement, placement already present." ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 34972e1cb0..dfcbd93514 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -45,6 +45,7 @@ import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; @@ -266,8 +267,8 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { filename += ".gz"; } - - getAllocRel( table.namespaceId ).addColumnPlacement( table, csv.id, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); + long allocId = getAllocRel( table.namespaceId ).addDataPlacement( csv.id, table.id ); + getAllocRel( table.namespaceId ).addColumnPlacement( allocId, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( csv.id, colId, position ); // long partitionId = table.partitionProperty.partitionIds.get( 0 ); @@ -279,7 +280,8 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { if ( !getSnapshot().getRelSnapshot( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - getAllocRel( table.namespaceId ).addColumnPlacement( table, adapter.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); + AllocationEntity entity = getSnapshot().getAllocSnapshot().getAllocation( adapter.id, table.id ); + getAllocRel( table.namespaceId ).addColumnPlacement( entity.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, colId, position ); } } @@ -411,15 +413,15 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case switch ( namespaceType ) { case RELATIONAL: logicalCatalogs.put( id, new RelationalCatalog( namespace ) ); - allocationCatalogs.put( id, new PolyAllocRelCatalog() ); + allocationCatalogs.put( id, new PolyAllocRelCatalog( namespace ) ); break; case DOCUMENT: logicalCatalogs.put( id, new DocumentCatalog( namespace ) ); - allocationCatalogs.put( id, new PolyAllocDocCatalog() ); + allocationCatalogs.put( id, new PolyAllocDocCatalog( namespace ) ); break; case GRAPH: logicalCatalogs.put( id, new GraphCatalog( namespace ) ); - allocationCatalogs.put( id, new PolyAllocGraphCatalog() ); + allocationCatalogs.put( id, new PolyAllocGraphCatalog( namespace ) ); break; } physicalCatalogs.put( id, new PolyPhysicalCatalog() ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java index 5aca473b4c..5088985718 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java @@ -18,11 +18,13 @@ import io.activej.serializer.BinarySerializer; import java.util.List; +import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.PlacementType; @@ -31,6 +33,9 @@ public class PolyAllocDocCatalog implements Serializable, AllocationDocumentCata @Getter public final LogicalNamespace namespace; + @Getter + public ConcurrentHashMap collections; + public PolyAllocDocCatalog( LogicalNamespace namespace ) { this.namespace = namespace; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java index 6fa2273b61..83c3a5c23a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java @@ -17,10 +17,12 @@ package org.polypheny.db.catalog.allocation; import io.activej.serializer.BinarySerializer; +import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.allocation.AllocationGraph; public class PolyAllocGraphCatalog implements Serializable, AllocationGraphCatalog { @@ -29,6 +31,9 @@ public class PolyAllocGraphCatalog implements Serializable, AllocationGraphCatal @Getter public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocGraphCatalog.class ); + @Getter + public ConcurrentHashMap graphs; + public PolyAllocGraphCatalog( LogicalNamespace namespace ) { this.namespace = namespace; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index aed015765e..84943e3a9a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -19,110 +19,50 @@ import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import org.jetbrains.annotations.Nullable; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.partition.properties.PartitionProperty; -import org.polypheny.db.util.Pair; @Slf4j public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Serializable { private final IdBuilder idBuilder = IdBuilder.getInstance(); + @Getter + @Serialize + public final LogicalNamespace namespace; @Getter public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocRelCatalog.class ); @Serialize - public final PusherMap allocations; - - private final ConcurrentHashMap, Long> adapterLogicalToAllocId; - private final ConcurrentHashMap, AllocationTable> adapterLogicalColumnToAlloc; - private final ConcurrentHashMap> logicalColumnToPlacements; - private final ConcurrentHashMap, List> adapterLogicalTableToAllocs; - - private final ConcurrentHashMap> adapterToAllocs; - - private final ConcurrentHashMap> logicalTableToAllocs; + @Getter + public final ConcurrentHashMap tables; - public PolyAllocRelCatalog() { - this( new ConcurrentHashMap<>() ); + public PolyAllocRelCatalog( LogicalNamespace namespace ) { + this( namespace, new ConcurrentHashMap<>() ); } public PolyAllocRelCatalog( - @Deserialize("allocations") Map allocations ) { - this.allocations = new PusherMap<>( allocations ); - this.adapterLogicalToAllocId = new ConcurrentHashMap<>(); - this.allocations.addRowConnection( this.adapterLogicalToAllocId, ( k, v ) -> Pair.of( v.adapterId, v.logical.id ), ( k, v ) -> k ); - this.adapterLogicalColumnToAlloc = new ConcurrentHashMap<>(); - this.allocations.addRowConnection( this.adapterLogicalColumnToAlloc, ( k, v ) -> Pair.of( v.adapterId, v.logical.id ), ( k, v ) -> v ); - //// - this.logicalColumnToPlacements = new ConcurrentHashMap<>(); - this.allocations.addConnection( a -> { - logicalColumnToPlacements.clear(); - a.forEach( ( k, v ) -> v.placements.forEach( p -> { - if ( logicalColumnToPlacements.containsKey( p.columnId ) ) { - logicalColumnToPlacements.get( p.columnId ).add( p ); - } else { - logicalColumnToPlacements.put( p.columnId, new ArrayList<>( List.of( p ) ) ); - } - } ) ); - } ); - - //// - this.adapterLogicalTableToAllocs = new ConcurrentHashMap<>(); - this.allocations.addConnection( a -> a.forEach( ( k, v ) -> { - if ( adapterLogicalTableToAllocs.containsKey( Pair.of( v.adapterId, v.logical.id ) ) ) { - adapterLogicalTableToAllocs.get( Pair.of( v.adapterId, v.logical.id ) ).add( v ); - } else { - adapterLogicalTableToAllocs.put( Pair.of( v.adapterId, v.logical.id ), new ArrayList<>( List.of( v ) ) ); - } - } ) ); - - //// - this.adapterToAllocs = new ConcurrentHashMap<>(); - this.allocations.addConnection( a -> { - adapterToAllocs.clear(); - for ( AllocationTable value : a.values() ) { - if ( adapterToAllocs.containsKey( value.adapterId ) ) { - adapterToAllocs.get( value.adapterId ).add( value ); - } else { - adapterToAllocs.put( value.adapterId, new ArrayList<>( List.of( value ) ) ); - } - } - } ); - - //// - this.logicalTableToAllocs = new ConcurrentHashMap<>(); - this.allocations.addConnection( a -> { - logicalTableToAllocs.clear(); - for ( AllocationTable table : a.values() ) { - if ( logicalTableToAllocs.containsKey( table.logical.id ) ) { - logicalTableToAllocs.get( table.logical.id ).add( table ); - } else { - logicalTableToAllocs.put( table.logical.id, new ArrayList<>( List.of( table ) ) ); - } - } - } ); + @Deserialize("namespace") LogicalNamespace namespace, + @Deserialize("tables") Map tables ) { + this.tables = new ConcurrentHashMap<>( tables ); + this.namespace = namespace; } @@ -134,50 +74,27 @@ public PolyAllocRelCatalog copy() { // move to Snapshot - @Nullable - private Long getAllocId( long adapterId, long tableId ) { - Long allocId = adapterLogicalToAllocId.get( Pair.of( adapterId, tableId ) ); - if ( allocId == null ) { - log.warn( "AllocationEntity does not yet exist." ); - return null; - } - return allocId; - } - - @Override - public void addColumnPlacement( LogicalTable table, long adapterId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ) { - Long allocationId = adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ); - - AllocationTable allocationTable; - - if ( allocationId == null ) { - allocationId = idBuilder.getNewAllocId(); - allocationTable = new AllocationTable( table, allocationId, physicalTableName, adapterId, List.of( - new CatalogColumnPlacement( table.namespaceId, table.id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, position ) ) ); - } else { - allocationTable = adapterLogicalColumnToAlloc.get( Pair.of( adapterId, columnId ) ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ); - } - - allocations.put( allocationId, allocationTable ); + public void addColumnPlacement( long allocationId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ) { + tables.put( allocationId, tables.get( allocationId ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); } // might replace above one with this private void addColumnPlacementAlloc( long allocTableId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - allocations.put( allocTableId, allocations.get( allocTableId ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); + tables.put( allocTableId, tables.get( allocTableId ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); } @Override - public void deleteColumnPlacement( long adapterId, long columnId, boolean columnOnly ) { - allocations.put( adapterLogicalToAllocId.get( Pair.of( adapterId, columnId ) ), adapterLogicalColumnToAlloc.get( Pair.of( adapterId, columnId ) ).withRemovedColumn( columnId ) ); + public void deleteColumnPlacement( long allocationId, long columnId, boolean columnOnly ) { + tables.put( allocationId, tables.get( allocationId ).withRemovedColumn( columnId ) ); } // might replace above one with this private void deleteColumnPlacementAlloc( long allocTableId, long columnId, boolean columnOnly ) { - allocations.put( allocTableId, allocations.get( allocTableId ).withRemovedColumn( columnId ) ); + tables.put( allocTableId, tables.get( allocTableId ).withRemovedColumn( columnId ) ); } @@ -284,8 +201,10 @@ public void addPartitionPlacement( long namespaceId, long adapterId, long tableI @Override - public void addDataPlacement( long adapterId, long tableId ) { - + public long addDataPlacement( long adapterId, long tableId ) { + long id = idBuilder.getNewAllocId(); + tables.put( id, new AllocationTable( id, tableId, namespace.id, adapterId, List.of() ) ); + return id; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 10aab5c6ae..15b2f75f6c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -156,7 +156,7 @@ public LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ) { @Override public long addTable( String name, EntityType entityType, boolean modifiable ) { long id = idBuilder.getNewEntityId(); - LogicalTable table = new LogicalTable( id, name, List.of(), logicalNamespace.id, logicalNamespace.name, entityType, null, List.of(), modifiable, null, List.of() ); + LogicalTable table = new LogicalTable( id, name, logicalNamespace.id, entityType, null, modifiable, null ); tables.put( id, table ); return id; } @@ -267,7 +267,6 @@ public long addColumn( String name, long tableId, int position, PolyType type, P long id = idBuilder.getNewFieldId(); LogicalColumn column = new LogicalColumn( id, name, tableId, logicalNamespace.id, position, type, collectionsType, length, scale, dimension, cardinality, nullable, collation, null ); columns.put( id, column ); - tables.put( tableId, tables.get( tableId ).withAddedColumn( column ) ); return id; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java index 44900b4e2a..65a6019d5d 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java @@ -16,20 +16,16 @@ package org.polypheny.db.catalog.physical; -import java.util.ArrayList; -import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.polypheny.db.catalog.PusherMap; +import lombok.Getter; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; public class PolyPhysicalCatalog implements PhysicalCatalog { - private final PusherMap> physicals; - - private final ConcurrentHashMap> logicalPhysical; - private final ConcurrentHashMap>> physicalsPerAdapter; + @Getter + private final ConcurrentHashMap physicals; public PolyPhysicalCatalog() { @@ -37,46 +33,21 @@ public PolyPhysicalCatalog() { } - public PolyPhysicalCatalog( Map> physicals ) { - this.physicals = new PusherMap<>( physicals ); - - this.logicalPhysical = new ConcurrentHashMap<>(); - this.physicals.addRowConnection( this.logicalPhysical, ( k, v ) -> v.logical.id, ( k, v ) -> v ); - this.physicalsPerAdapter = new ConcurrentHashMap<>(); - this.physicals.addConnection( m -> { - physicalsPerAdapter.clear(); - m.forEach( ( k, v ) -> { - if ( physicalsPerAdapter.containsKey( v.adapterId ) ) { - physicalsPerAdapter.get( v.adapterId ).add( v ); - } else { - physicalsPerAdapter.put( v.adapterId, new ArrayList<>( List.of( v ) ) ); - } - } ); - } ); - } + public PolyPhysicalCatalog( Map physicals ) { + this.physicals = new ConcurrentHashMap<>( physicals ); - - @Override - public List> getPhysicalsOnAdapter( long id ) { - return physicalsPerAdapter.get( id ); } - @Override - public PhysicalEntity getPhysicalEntity( long id ) { + PhysicalEntity getPhysicalEntity( long id ) { return physicals.get( id ); } @Override - public void addPhysicalEntity( PhysicalEntity physicalEntity ) { + public void addPhysicalEntity( PhysicalEntity physicalEntity ) { physicals.put( physicalEntity.id, physicalEntity ); } - @Override - public PhysicalEntity getFromLogical( long id ) { - return logicalPhysical.get( id ); - } - } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 139c9ec202..4acfed046d 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -278,7 +278,7 @@ List newParseProjectionsAndAggregations( String[] possibleProject for ( LogicalTable table : tables ) { tableOffsets.put( table.id, columnOffset ); validColumns.addAll( table.getColumnIds() ); - columnOffset += table.columns.size(); + columnOffset += table.getColumns().size(); } List columns; @@ -306,7 +306,7 @@ List generateRequestColumnsWithoutProject( List tab List columns = new ArrayList<>(); long internalPosition = 0L; for ( LogicalTable table : tables ) { - for ( LogicalColumn column : table.columns ) { + for ( LogicalColumn column : table.getColumns() ) { int calculatedPosition = tableOffsets.get( table.id ) + column.position - 1; RequestColumn requestColumn = new RequestColumn( column, calculatedPosition, calculatedPosition, null, null, true ); columns.add( requestColumn ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index 5e796d02ca..12c24707e0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -253,7 +253,7 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { catalogTable = getCatalogTable( transaction, (SqlIdentifier) insert.getTargetTable() ); SqlNodeList newColumnList = new SqlNodeList( ParserPos.ZERO ); - int size = (int) catalogTable.columns.size(); + int size = (int) catalogTable.getColumns().size(); if ( namespaceType == NamespaceType.DOCUMENT ) { List columnNames = catalogTable.getColumnNames(); size += oldColumnList.getSqlList().stream().filter( column -> !columnNames.contains( ((SqlIdentifier) column).names.get( 0 ) ) ).count(); @@ -261,7 +261,7 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { SqlNode[][] newValues = new SqlNode[((SqlBasicCall) insert.getSource()).getOperands().length][size]; int pos = 0; - for ( LogicalColumn column : catalogTable.columns ) { + for ( LogicalColumn column : catalogTable.getColumns() ) { // Add column newColumnList.add( new SqlIdentifier( column.name, ParserPos.ZERO ) ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index 5a5715c499..5ba7c4b4e9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -22,9 +22,10 @@ import java.util.List; import java.util.Objects; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; import org.polypheny.db.ddl.exception.ColumnNotExistsException; @@ -131,8 +132,8 @@ public void execute( Context context, Statement statement, QueryParameters param } // Make sure that all adapters are of type store (and not source) - for ( long storeId : catalogTable.dataPlacements ) { - getDataStoreInstance( storeId ); + for ( CatalogDataPlacement placement : statement.getTransaction().getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ) ) { + getDataStoreInstance( placement.adapterId ); } String defaultValue = this.defaultValue == null ? null : this.defaultValue.toString(); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java index 88f44ad4eb..0e60623d9b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java @@ -23,8 +23,6 @@ import java.util.Objects; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -33,6 +31,8 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManager.PartitionInformation; import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; @@ -153,7 +153,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { // Check if table is already partitioned - if ( catalogTable.partitionProperty.partitionType == PartitionType.NONE ) { + if ( statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).partitionType == PartitionType.NONE ) { DdlManager.getInstance().addPartitioning( PartitionInformation.fromNodeLists( catalogTable, diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index 8ed1397b1a..06682fe098 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -125,7 +125,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // You can't partition placements if the table is not partitioned - if ( !catalogTable.partitionProperty.isPartitioned && (!partitionGroupsList.isEmpty() || !partitionGroupNamesList.isEmpty()) ) { + if ( !statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).isPartitioned && (!partitionGroupsList.isEmpty() || !partitionGroupNamesList.isEmpty()) ) { throw new RuntimeException( "Partition Placement is not allowed for unpartitioned table '" + catalogTable.name + "'" ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java index f24bac590a..3d68e52650 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java @@ -20,14 +20,14 @@ import java.util.List; import java.util.Objects; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownKeyException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -88,10 +88,10 @@ public void execute( Context context, Statement statement, QueryParameters param } // Check if table is even partitioned - if ( catalogTable.partitionProperty.partitionType != PartitionType.NONE ) { + if ( statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).partitionType != PartitionType.NONE ) { if ( log.isDebugEnabled() ) { - log.debug( "Merging partitions for table: {} with id {} on schema: {}", catalogTable.name, catalogTable.id, catalogTable.getNamespaceName() ); + log.debug( "Merging partitions for table: {} with id {} on schema: {}", catalogTable.name, catalogTable.id, statement.getTransaction().getSnapshot().getNamespace( catalogTable.namespaceId ).name ); } try { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java index fc2ae675ea..7f41439c9d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java @@ -105,7 +105,7 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); } - if ( !catalogTable.partitionProperty.isPartitioned ) { + if ( !statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).isPartitioned ) { throw new RuntimeException( "Table '" + catalogTable.name + "' is not partitioned" ); } @@ -123,7 +123,7 @@ public void execute( Context context, Statement statement, QueryParameters param } long storeId = storeInstance.getAdapterId(); // Check whether this placement already exists - if ( !catalogTable.dataPlacements.contains( storeId ) ) { + if ( !statement.getTransaction().getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ).contains( storeId ) ) { throw CoreUtil.newContextException( storeName.getPos(), RESOURCE.placementDoesNotExist( storeName.getSimple(), catalogTable.name ) ); @@ -137,10 +137,10 @@ public void execute( Context context, Statement statement, QueryParameters param for ( int partitionId : partitionGroupList ) { // Check if specified partition index is even part of table and if so get corresponding uniquePartId try { - tempPartitionList.add( catalogTable.partitionProperty.partitionGroupIds.get( partitionId ) ); + tempPartitionList.add( statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).partitionGroupIds.get( partitionId ) ); } catch ( IndexOutOfBoundsException e ) { throw new RuntimeException( "Specified Partition-Index: '" + partitionId + "' is not part of table '" - + catalogTable.name + "', has only " + catalogTable.partitionProperty.numPartitionGroups + " partitions" ); + + catalogTable.name + "', has only " + statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).numPartitionGroups + " partitions" ); } } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java index a980dc3561..2a7666467a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java @@ -24,8 +24,8 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.IndexPreventsRemovalException; import org.polypheny.db.ddl.exception.LastPlacementException; @@ -123,7 +123,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // You can't partition placements if the table is not partitioned - if ( !catalogTable.partitionProperty.isPartitioned && (!partitionGroupList.isEmpty() || !partitionGroupNamesList.isEmpty()) ) { + if ( !statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).isPartitioned && (!partitionGroupList.isEmpty() || !partitionGroupNamesList.isEmpty()) ) { throw new RuntimeException( "Partition Placement is not allowed for unpartitioned table '" + catalogTable.name + "'" ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 9199651b92..8bbd2f72b5 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -112,7 +112,7 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; -import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; @@ -305,7 +305,7 @@ Result getTable( final UIRequest request ) { // determine if it is a view or a table LogicalTable catalogTable; catalogTable = catalog.getSnapshot().getRelSnapshot( catalog.getSnapshot().getNamespace( t[0] ).id ).getTable( t[1] ); - result.setNamespaceType( catalogTable.getNamespaceType() ); + result.setNamespaceType( catalogTable.namespaceType ); if ( catalogTable.modifiable ) { result.setType( ResultType.TABLE ); } else { @@ -1187,7 +1187,7 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk } ctx.json( new Result( columns.toArray( new DbColumn[0] ), null ).setType( ResultType.VIEW ) ); } else { - List allocs = catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ); + List allocs = catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ); if ( catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } @@ -1224,7 +1224,7 @@ void getAvailableSourceColumns( final Context ctx ) throws UnknownTableException LogicalNamespace namespace = catalog.getSnapshot().getNamespace( request.getSchemaName() ); LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( request.getTableName() ); - ImmutableMap> placements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( table.id ); + Map> placements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( table.id ); Set adapterIds = placements.keySet(); if ( adapterIds.size() > 1 ) { log.warn( String.format( "The number of sources of an entity should not be > 1 (%s.%s)", request.getSchemaName(), request.getTableName() ) ); From 301b4170442aeb94a06ac6426238605ac890eb56 Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 14 Mar 2023 17:08:02 +0100 Subject: [PATCH 048/436] startup queries fixed, adjusted some of the routing --- .../org/polypheny/db/adapter/Adapter.java | 5 +- .../algebra/AlgStructuredTypeFlattener.java | 5 ++ .../polypheny/db/algebra/rules/ScanRule.java | 5 +- .../org/polypheny/db/catalog/Catalog.java | 2 + .../org/polypheny/db/catalog/IdBuilder.java | 10 ++++ .../catalog/catalogs/AllocationCatalog.java | 1 + .../catalogs/AllocationRelationalCatalog.java | 4 +- .../db/catalog/catalogs/PhysicalCatalog.java | 6 ++- .../db/catalog/entity/CatalogEntity.java | 3 +- .../entity/allocation/AllocationTable.java | 16 +++++-- .../entity/physical/PhysicalCollection.java | 4 +- .../entity/physical/PhysicalEntity.java | 4 +- .../entity/physical/PhysicalGraph.java | 4 +- .../entity/physical/PhysicalTable.java | 14 ++++-- .../refactor/ProjectableFilterableEntity.java | 7 +++ .../db/catalog/snapshot/AllocSnapshot.java | 2 +- .../db/catalog/snapshot/PhysicalSnapshot.java | 2 + .../snapshot/impl/AllocSnapshotImpl.java | 4 +- .../snapshot/impl/PhysicalSnapshotImpl.java | 29 ++++++++++- .../polypheny/db/interpreter/Bindables.java | 4 +- .../polypheny/db/interpreter/ScanNode.java | 16 +++---- .../org/polypheny/db/plan/AlgOptCluster.java | 2 +- .../org/polypheny/db/plan/AlgTraitSet.java | 2 +- .../org/polypheny/db/plan/Convention.java | 2 +- .../java/org/polypheny/db/PolyphenyDb.java | 1 + .../db/processing/DataMigratorImpl.java | 2 +- .../db/routing/routers/BaseRouter.java | 48 ++++++++++++------- .../db/routing/routers/CachedPlanRouter.java | 2 +- .../routers/FullPlacementQueryRouter.java | 24 +++++----- .../db/routing/routers/IcarusRouter.java | 4 +- .../db/routing/routers/SimpleRouter.java | 10 ++-- .../monitoring/statistics/StatisticTable.java | 8 ++-- .../db/adapter/csv/CsvFilterableTable.java | 4 +- .../db/adapter/csv/CsvScannableTable.java | 7 ++- .../polypheny/db/adapter/csv/CsvSchema.java | 19 +++----- .../polypheny/db/adapter/csv/CsvSource.java | 6 +-- .../polypheny/db/adapter/csv/CsvTable.java | 6 ++- .../db/adapter/csv/CsvTranslatableTable.java | 4 +- .../db/hsqldb/stores/HsqldbStore.java | 7 +-- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 5 +- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 2 + .../jdbc/stores/AbstractJdbcStore.java | 3 +- .../org/polypheny/db/catalog/PolyCatalog.java | 48 +++++++++++++++---- .../allocation/PolyAllocRelCatalog.java | 14 +----- .../catalog/physical/PolyPhysicalCatalog.java | 7 +++ 45 files changed, 254 insertions(+), 130 deletions(-) rename {plugins/poly-catalog => core}/src/main/java/org/polypheny/db/catalog/IdBuilder.java (94%) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 4ad2650c8a..539d05ae84 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -47,6 +47,7 @@ import lombok.experimental.Accessors; import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; @@ -325,8 +326,8 @@ public Adapter( long adapterId, String uniqueName, Map settings public abstract void createNewSchema( Snapshot snapshot, String name, long id ); - @Deprecated - public abstract PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ); + + public abstract List createAdapterTable( IdBuilder idBuilder, LogicalTable logical, AllocationTable allocationTable ); public abstract Namespace getCurrentSchema(); diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java index 499d3eaa4b..2b951c34c0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgStructuredTypeFlattener.java @@ -863,6 +863,11 @@ private boolean isConstructor( RexNode rexNode ) { @SuppressWarnings("unused") public void rewriteAlg( LogicalRelScan alg ) { + if ( alg.entity.unwrap( TranslatableEntity.class ) == null ) { + rewriteGeneric( alg ); + return; + } + AlgNode newAlg = alg.entity.unwrap( TranslatableEntity.class ).toAlg( toAlgContext, alg.traitSet ); if ( !PolyTypeUtil.isFlat( alg.getRowType() ) ) { final List> flattenedExpList = new ArrayList<>(); diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java index f4758177cd..275a9aa0e7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java @@ -21,9 +21,7 @@ import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.catalog.refactor.TranslatableEntity; -import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.plan.AlgOptEntity.ToAlgContext; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.tools.AlgBuilderFactory; @@ -50,6 +48,9 @@ public ScanRule( AlgBuilderFactory algBuilderFactory ) { @Override public void onMatch( AlgOptRuleCall call ) { final LogicalRelScan oldAlg = call.alg( 0 ); + if ( oldAlg.getEntity().unwrap( TranslatableEntity.class ) == null ) { + return; + } AlgNode newAlg = oldAlg.getEntity().unwrap( TranslatableEntity.class ).toAlg( oldAlg::getCluster, oldAlg.getTraitSet() ); call.transformTo( newAlg ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index b7012f999c..c772b430ff 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -74,6 +74,8 @@ public static Catalog getInstance() { } + public abstract void init(); + public abstract void commit() throws NoTablePrimaryKeyException; public abstract void rollback(); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java b/core/src/main/java/org/polypheny/db/catalog/IdBuilder.java similarity index 94% rename from plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java rename to core/src/main/java/org/polypheny/db/catalog/IdBuilder.java index ba68864266..a7e4de94ec 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/IdBuilder.java +++ b/core/src/main/java/org/polypheny/db/catalog/IdBuilder.java @@ -27,6 +27,8 @@ public class IdBuilder { AtomicLong namespaceId; AtomicLong entityId; + AtomicLong physicalId; + AtomicLong allocId; AtomicLong fieldId; @@ -66,6 +68,7 @@ private IdBuilder() { new AtomicLong( 0 ), new AtomicLong( 0 ), new AtomicLong( 0 ), + new AtomicLong( 0 ), new AtomicLong( 0 ) ); } @@ -78,6 +81,7 @@ public IdBuilder( AtomicLong fieldId, AtomicLong userId, AtomicLong allocId, + AtomicLong physicalId, AtomicLong indexId, AtomicLong keyId, AtomicLong adapterId, @@ -94,6 +98,7 @@ public IdBuilder( this.keyId = keyId; this.userId = userId; this.allocId = allocId; + this.physicalId = physicalId; this.constraintId = constraintId; this.adapterId = adapterId; @@ -160,4 +165,9 @@ public long getNewConstraintId() { return constraintId.getAndIncrement(); } + + public long getNewPhysicalId() { + return physicalId.getAndIncrement(); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java index 9518d9a08d..3c78906003 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java @@ -22,4 +22,5 @@ public interface AllocationCatalog { LogicalNamespace getNamespace(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 22eb3e54f6..feb3fe51ab 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -65,11 +65,11 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { /** * Update physical position of a column placement on a specified adapter. * - * @param adapterId The id of the adapter + * @param allocId The id of the adapter * @param columnId The id of the column * @param position The physical position to set */ - void updateColumnPlacementPhysicalPosition( long adapterId, long columnId, long position ); + void updateColumnPlacementPhysicalPosition( long allocId, long columnId, long position ); /** * Update physical position of a column placement on a specified adapter. Uses auto-increment to get the globally increasing number. diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index 7754d07767..fd568aed8d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -16,6 +16,8 @@ package org.polypheny.db.catalog.catalogs; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; public interface PhysicalCatalog { @@ -23,6 +25,8 @@ public interface PhysicalCatalog { void addPhysicalEntity( PhysicalEntity physicalEntity ); - java.util.concurrent.ConcurrentHashMap getPhysicals(); + ConcurrentHashMap getPhysicals(); + + void addEntities( List physicals ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index 1193222e45..ab1cd9b356 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -33,6 +33,7 @@ import org.polypheny.db.catalog.refactor.Expressible; import org.polypheny.db.plan.AlgMultipleTrait; import org.polypheny.db.schema.Statistic; +import org.polypheny.db.schema.Statistics; import org.polypheny.db.schema.Wrapper; import org.polypheny.db.util.ImmutableBitSet; @@ -102,7 +103,7 @@ public double getRowCount() { public List getCollations() { - return null; + return (List) Statistics.UNKNOWN.getCollations(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 2fcb319881..99a656b1ac 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -72,18 +72,18 @@ public Map getColumnNames() { public Map getColumns() { - return Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumns( id ).stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); + return Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumns( logicalId ).stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); } public String getNamespaceName() { - return Catalog.getInstance().getSnapshot().getNamespace( id ).name; + return Catalog.getInstance().getSnapshot().getNamespace( namespaceId ).name; } - public AllocationTable withAddedColumn( long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { + public AllocationTable withAddedColumn( long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ) { List placements = new ArrayList<>( this.placements ); - placements.add( new CatalogColumnPlacement( namespaceId, id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, 0 ) ); + placements.add( new CatalogColumnPlacement( namespaceId, id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, position ) ); return new AllocationTable( id, logicalId, namespaceId, adapterId, placements ); } @@ -104,4 +104,12 @@ public Map getColumnNamesId() { return getColumnNames().entrySet().stream().collect( Collectors.toMap( Entry::getValue, Entry::getKey ) ); } + + public List getColumnOrder() { + List columns = new ArrayList<>( placements ); + columns.sort( ( a, b ) -> Math.toIntExact( a.physicalPosition - b.physicalPosition ) ); + + return columns.stream().map( c -> c.columnId ).collect( Collectors.toList() ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java index 2334e8df2d..0a19af328b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -31,8 +31,8 @@ @NonFinal public class PhysicalCollection extends PhysicalEntity { - public PhysicalCollection( long id, long logicalId, long namespaceId, String name, String namespaceName, EntityType type, long adapterId ) { - super( id, logicalId, name, namespaceId, namespaceName, type, NamespaceType.DOCUMENT, adapterId ); + public PhysicalCollection( long id, long logicalId, long allocationId, long namespaceId, String name, String namespaceName, EntityType type, long adapterId ) { + super( id, logicalId, allocationId, name, namespaceId, namespaceName, type, NamespaceType.DOCUMENT, adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java index 2c4201ae85..898fbde9da 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java @@ -31,10 +31,12 @@ public abstract class PhysicalEntity extends CatalogEntity { public String namespaceName; public long adapterId; public long logicalId; + public long allocationId; - protected PhysicalEntity( long id, long logicalId, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { + protected PhysicalEntity( long id, long logicalId, long allocationId, String name, long namespaceId, String namespaceName, EntityType type, NamespaceType namespaceType, long adapterId ) { super( id, name, namespaceId, type, namespaceType ); + this.allocationId = allocationId; this.namespaceName = namespaceName; this.adapterId = adapterId; this.logicalId = logicalId; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index 6d9daee1ed..1084aee201 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -31,8 +31,8 @@ @NonFinal public class PhysicalGraph extends PhysicalEntity { - public PhysicalGraph( long id, long logicalId, String name, EntityType type, long adapterId ) { - super( id, logicalId, name, id, name, type, NamespaceType.GRAPH, adapterId ); // for graph both name and namespaceName are the same + public PhysicalGraph( long id, long logicalId, long allocationId, String name, EntityType type, long adapterId ) { + super( id, logicalId, allocationId, name, id, name, type, NamespaceType.GRAPH, adapterId ); // for graph both name and namespaceName are the same } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index 621d7aaccb..c651aa6690 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -16,8 +16,10 @@ package org.polypheny.db.catalog.entity.physical; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.io.Serializable; +import java.util.List; import java.util.Map; import lombok.EqualsAndHashCode; import lombok.Value; @@ -45,18 +47,20 @@ public class PhysicalTable extends PhysicalEntity { public String namespaceName; public ImmutableMap types; + public ImmutableList order; - public PhysicalTable( long id, long logicalId, String name, long namespaceId, String namespaceName, long adapterId, Map columns, Map types ) { - super( id, logicalId, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); + public PhysicalTable( long id, long logicalId, long allocationId, String name, long namespaceId, String namespaceName, long adapterId, Map columns, Map types, List order ) { + super( id, logicalId, allocationId, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.namespaceName = namespaceName; this.columns = ImmutableMap.copyOf( columns ); this.types = ImmutableMap.copyOf( types ); + this.order = ImmutableList.copyOf( order ); } - public PhysicalTable( AllocationTable table, String name, String namespaceName, Map columns, Map types ) { - this( table.id, table.logicalId, name, table.namespaceId, namespaceName, table.adapterId, columns, types ); + public PhysicalTable( long id, AllocationTable table, String name, String namespaceName, Map columns, Map types, List order ) { + this( id, table.logicalId, table.id, name, table.namespaceId, namespaceName, table.adapterId, columns, types, order ); } @@ -70,7 +74,7 @@ public AlgProtoDataType buildProto() { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - for ( long id : columns.keySet() ) { + for ( long id : order ) { fieldInfo.add( columns.get( id ), columns.get( id ), types.get( id ) ).nullable( types.get( id ).isNullable() ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/ProjectableFilterableEntity.java b/core/src/main/java/org/polypheny/db/catalog/refactor/ProjectableFilterableEntity.java index f5343c85ef..13d3afc900 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/ProjectableFilterableEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/ProjectableFilterableEntity.java @@ -16,6 +16,13 @@ package org.polypheny.db.catalog.refactor; +import java.util.List; +import org.apache.calcite.linq4j.Enumerable; +import org.polypheny.db.adapter.DataContext; +import org.polypheny.db.rex.RexNode; + public interface ProjectableFilterableEntity { + Enumerable scan( DataContext root, List mutableFilters, int[] projectInts ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index 2c8396bdeb..6b7d914ead 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -405,7 +405,7 @@ public interface AllocSnapshot { PartitionProperty getPartitionProperty( long id ); - boolean adapterHasPlacement( long adapterId, long id ); + boolean adapterHasPlacement( long adapterId, long logicalId ); AllocationEntity getAllocation( long adapterId, long entityId ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java index 3beac4d673..8877c5eb59 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java @@ -46,4 +46,6 @@ public interface PhysicalSnapshot { List fromLogical( long id ); + List fromAlloc( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index 2d44a627a4..021dbaa36a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -522,8 +522,8 @@ public PartitionProperty getPartitionProperty( long id ) { @Override - public boolean adapterHasPlacement( long adapterId, long id ) { - return false; + public boolean adapterHasPlacement( long adapterId, long logicalId ) { + return adapterLogicalTableAlloc.containsKey( Pair.of( adapterId, logicalId ) ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java index a84da1c47d..09e812aadb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java @@ -42,12 +42,28 @@ public class PhysicalSnapshotImpl implements PhysicalSnapshot { ImmutableMap> logicalToPhysicals; + ImmutableMap> allocToPhysicals; + public PhysicalSnapshotImpl( Map physicalCatalogs ) { this.entities = ImmutableMap.copyOf( physicalCatalogs.values().stream().flatMap( c -> c.getPhysicals().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); this.adapterLogicalEntity = buildAdapterLogicalEntity(); this.adapterPhysicals = buildAdapterPhysicals(); this.logicalToPhysicals = buildLogicalToPhysicals(); + this.allocToPhysicals = buildAllocToPhysicals(); + } + + + private ImmutableMap> buildAllocToPhysicals() { + Map> map = new HashMap<>(); + this.entities.forEach( ( k, v ) -> { + if ( !map.containsKey( v.allocationId ) ) { + map.put( v.allocationId, new ArrayList<>() ); + } + map.get( v.allocationId ).add( v ); + } ); + + return ImmutableMap.copyOf( map ); } @@ -85,7 +101,10 @@ private ImmutableMap, PhysicalEntity> buildAdapterLogicalEntity @Override public PhysicalTable getPhysicalTable( long id ) { - return entities.get( id ).unwrap( PhysicalTable.class ); + if ( entities.get( id ) != null ) { + return entities.get( id ).unwrap( PhysicalTable.class ); + } + return null; } @@ -131,8 +150,16 @@ public PhysicalEntity getPhysicalEntity( long id ) { } + @Override public List fromLogical( long id ) { return null; } + + @Override + public List fromAlloc( long id ) { + return allocToPhysicals.get( id ); + } + + } diff --git a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java index d52eb27058..e31c8bf825 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java @@ -78,6 +78,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.refactor.FilterableEntity; +import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptPlanner; @@ -88,9 +90,7 @@ import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.FilterableEntity; import org.polypheny.db.schema.ProjectableFilterableEntity; -import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.ImmutableIntList; diff --git a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java index 920d257dcb..2423c57936 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java +++ b/core/src/main/java/org/polypheny/db/interpreter/ScanNode.java @@ -53,14 +53,14 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.refactor.FilterableEntity; +import org.polypheny.db.catalog.refactor.ProjectableFilterableEntity; +import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexUtil; import org.polypheny.db.runtime.Enumerables; -import org.polypheny.db.schema.FilterableEntity; -import org.polypheny.db.schema.ProjectableFilterableEntity; -import org.polypheny.db.schema.QueryableEntity; -import org.polypheny.db.schema.ScannableEntity; import org.polypheny.db.schema.Schemas; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.ImmutableIntList; @@ -73,7 +73,7 @@ */ public class ScanNode implements Node { - private ScanNode( Compiler compiler, RelScan alg, Enumerable enumerable ) { + private ScanNode( Compiler compiler, RelScan alg, Enumerable enumerable ) { compiler.enumerable( alg, enumerable ); } @@ -170,7 +170,7 @@ private static ScanNode createFilterable( Compiler compiler, RelScan alg, Imm } - private static ScanNode createProjectableFilterable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, ProjectableFilterableEntity pfTable ) { + private static ScanNode createProjectableFilterable( Compiler compiler, RelScan alg, ImmutableList filters, ImmutableIntList projects, ProjectableFilterableEntity pfTable ) { final DataContext root = compiler.getDataContext(); final ImmutableIntList originalProjects = projects; for ( ; ; ) { @@ -215,7 +215,7 @@ private static ScanNode createProjectableFilterable( Compiler compiler, RelScan } - private static ScanNode createEnumerable( Compiler compiler, RelScan alg, Enumerable enumerable, final ImmutableIntList acceptedProjects, List rejectedFilters, final ImmutableIntList rejectedProjects ) { + private static ScanNode createEnumerable( Compiler compiler, RelScan alg, Enumerable enumerable, final ImmutableIntList acceptedProjects, List rejectedFilters, final ImmutableIntList rejectedProjects ) { if ( !rejectedFilters.isEmpty() ) { final RexNode filter = RexUtil.composeConjunction( alg.getCluster().getRexBuilder(), rejectedFilters ); // Re-map filter for the projects that have been applied already @@ -244,7 +244,7 @@ private static ScanNode createEnumerable( Compiler compiler, RelScan alg, Enumer } if ( rejectedProjects != null ) { enumerable = enumerable.select( - new Function1() { + new Function1<>() { final Object[] values = new Object[rejectedProjects.size()]; diff --git a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java index 1266bebaf7..ee816d5ed9 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgOptCluster.java @@ -89,7 +89,7 @@ private AlgOptCluster( AlgOptPlanner planner, AlgDataTypeFactory typeFactory, Re // set up a default alg metadata provider, giving the planner first crack at everything setMetadataProvider( DefaultAlgMetadataProvider.INSTANCE ); - this.emptyTraitSet = traitSet == null ? AlgTraitSet.createEmpty() : traitSet; + this.emptyTraitSet = traitSet == null ? planner.emptyTraitSet() : traitSet; assert emptyTraitSet.size() == planner.getAlgTraitDefs().size(); this.snapshot = snapshot; } diff --git a/core/src/main/java/org/polypheny/db/plan/AlgTraitSet.java b/core/src/main/java/org/polypheny/db/plan/AlgTraitSet.java index 28b3db9aab..21e8f50eb2 100644 --- a/core/src/main/java/org/polypheny/db/plan/AlgTraitSet.java +++ b/core/src/main/java/org/polypheny/db/plan/AlgTraitSet.java @@ -196,7 +196,7 @@ public AlgTraitSet replace( AlgTrait trait ) { if ( containsShallow( traits, trait ) ) { return this; } - final AlgTraitDef traitDef = trait.getTraitDef(); + final AlgTraitDef traitDef = trait.getTraitDef(); int index = findIndex( traitDef ); if ( index < 0 ) { // Trait is not present. Ignore it. diff --git a/core/src/main/java/org/polypheny/db/plan/Convention.java b/core/src/main/java/org/polypheny/db/plan/Convention.java index 42ed67d1f7..09b828770a 100644 --- a/core/src/main/java/org/polypheny/db/plan/Convention.java +++ b/core/src/main/java/org/polypheny/db/plan/Convention.java @@ -121,7 +121,7 @@ public String getName() { @Override - public AlgTraitDef getTraitDef() { + public AlgTraitDef getTraitDef() { return ConventionTraitDef.INSTANCE; } diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index 975aa95427..5b64190e1b 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -341,6 +341,7 @@ public void join( final long millis ) throws InterruptedException { Catalog.defaultStore = Adapter.fromString( defaultStoreName, AdapterType.STORE ); Catalog.defaultSource = Adapter.fromString( defaultSourceName, AdapterType.SOURCE ); catalog = PolyPluginManager.getCATALOG_SUPPLIER().get(); + catalog.init(); if ( catalog == null ) { throw new RuntimeException( "There was no catalog submitted, aborting." ); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index f31f806d9c..35ec13aa33 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -461,7 +461,7 @@ public AlgRoot getSourceIterator( Statement statement, Map physicals = snapshot.getPhysicalSnapshot().fromAlloc( allocId ); + PhysicalEntity physical = physicals.get( 0 ); - PhysicalEntity physical = snapshot.getPhysicalSnapshot().getPhysicalTable( partitionId ); AlgNode node = builder.scan( physical ).build(); builder.push( node ); @@ -248,21 +247,31 @@ protected List handleGeneric( AlgNode node, List> placements ) { + public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List allocationEntities ) { RoutedAlgBuilder builder = RoutedAlgBuilder.create( statement, cluster ); if ( RuntimeConfig.JOINED_TABLE_SCAN_CACHE.getBoolean() ) { - AlgNode cachedNode = joinedScanCache.getIfPresent( placements.hashCode() ); + AlgNode cachedNode = joinedScanCache.getIfPresent( allocationEntities.hashCode() ); if ( cachedNode != null ) { return cachedNode; } } + if ( allocationEntities.size() == 1 ) { + builder = handleScan( + builder, + statement, + allocationEntities.get( 0 ).id ); + // Final project + buildFinalProject( builder, allocationEntities.get( 0 ).unwrap( AllocationTable.class ) ); + + } - for ( Map.Entry> partitionToPlacement : placements.entrySet() ) { + + /*for ( Map.Entry> partitionToPlacement : allocationEntities.entrySet() ) { long partitionId = partitionToPlacement.getKey(); List currentPlacements = partitionToPlacement.getValue(); // Sort by adapter - Map> placementsByAdapter = new HashMap<>(); + /*Map> placementsByAdapter = new HashMap<>(); for ( CatalogColumnPlacement placement : currentPlacements ) { if ( !placementsByAdapter.containsKey( placement.adapterId ) ) { placementsByAdapter.put( placement.adapterId, new LinkedList<>() ); @@ -274,6 +283,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< // List ccps = placementsByAdapter.values().iterator().next(); // CatalogColumnPlacement ccp = ccps.get( 0 ); // CatalogPartitionPlacement cpp = catalog.getPartitionPlacement( ccp.adapterId, partitionId ); + partitionId = snapshot.getAllocSnapshot().getAllocation( partitionId, currentPlacements.get( 0 ).tableId ).id; builder = handleScan( builder, @@ -345,16 +355,16 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< } else { throw new RuntimeException( "The table '" + currentPlacements.get( 0 ).getLogicalTableName() + "' seems to have no placement. This should not happen!" ); } - } + }*/ - builder.union( true, placements.size() ); + builder.union( true, allocationEntities.size() ); AlgNode node = builder.build(); if ( RuntimeConfig.JOINED_TABLE_SCAN_CACHE.getBoolean() ) { - joinedScanCache.put( placements.hashCode(), node ); + joinedScanCache.put( allocationEntities.hashCode(), node ); } - CatalogColumnPlacement placement = new ArrayList<>( placements.values() ).get( 0 ).get( 0 ); + CatalogColumnPlacement placement = allocationEntities.get( 0 ).unwrap( AllocationTable.class ).placements.get( 0 ); // todo dl: remove after RowType refactor if ( snapshot.getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); @@ -374,15 +384,19 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, Map< } - private void buildFinalProject( RoutedAlgBuilder builder, List currentPlacements ) { + private void buildFinalProject( RoutedAlgBuilder builder, AllocationTable entity ) { List rexNodes = new ArrayList<>(); - List placementList = currentPlacements.stream() + /*List placementList = currentPlacements.stream() .map( col -> snapshot.getRelSnapshot( currentPlacements.get( 0 ).namespaceId ).getColumn( col.columnId ) ) .sorted( Comparator.comparingInt( col -> col.position ) ) .collect( Collectors.toList() ); for ( LogicalColumn logicalColumn : placementList ) { rexNodes.add( builder.field( logicalColumn.name ) ); + }*/ + for ( String name : entity.getColumnNames().values() ) { + rexNodes.add( builder.field( name ) ); } + builder.project( rexNodes ); } @@ -433,7 +447,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace na AlgOptCluster cluster = alg.getCluster(); List tables = snapshot.getRelSnapshot( namespace.id ).getTables( null ); List> scans = tables.stream() - .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, selectPlacement( t ) ) ) ) + .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, null ) ) ) .collect( Collectors.toList() ); Builder infoBuilder = cluster.getTypeFactory().builder(); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index 0b61461488..ca3579f6c1 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -75,7 +75,7 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build } } - return builder.push( super.buildJoinedScan( statement, cluster, placement ) ); + return builder.push( super.buildJoinedScan( statement, cluster, null ) ); } else if ( node instanceof LogicalValues ) { return super.handleValues( (LogicalValues) node, builder ); } else { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index 74b43e8223..c113a636c3 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -18,7 +18,6 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -28,6 +27,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionManager; @@ -65,7 +65,7 @@ protected List handleHorizontalPartitioning( for ( RoutedAlgBuilder builder : builders ) { RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); newBuilder.addPhysicalInfo( placementCombination ); - newBuilder.push( super.buildJoinedScan( statement, cluster, placementCombination ) ); + newBuilder.push( super.buildJoinedScan( statement, cluster, null ) ); newBuilders.add( newBuilder ); } } @@ -107,18 +107,20 @@ protected List handleNonePartitioning( final Set> placements = selectPlacement( catalogTable, queryInformation ); List newBuilders = new ArrayList<>(); - for ( List placementCombination : placements ) { + /*for ( List placementCombination : placements ) { Map> currentPlacementDistribution = new HashMap<>(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); - currentPlacementDistribution.put( property.partitionIds.get( 0 ), placementCombination ); + PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id );*/ + //currentPlacementDistribution.put( property.partitionIds.get( 0 ), placementCombination ); - for ( RoutedAlgBuilder builder : builders ) { - RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); - newBuilder.addPhysicalInfo( currentPlacementDistribution ); - newBuilder.push( super.buildJoinedScan( statement, cluster, currentPlacementDistribution ) ); - newBuilders.add( newBuilder ); - } + List allocationEntities = snapshot.getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ); + + for ( RoutedAlgBuilder builder : builders ) { + RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); + //newBuilder.addPhysicalInfo( currentPlacementDistribution ); + newBuilder.push( super.buildJoinedScan( statement, cluster, allocationEntities ) ); + newBuilders.add( newBuilder ); } + //} builders.clear(); builders.addAll( newBuilders ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index 18609f4004..d4b6bc7741 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -77,7 +77,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa final RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builders.get( 0 ) ); newBuilder.addPhysicalInfo( currentPlacementDistribution ); - newBuilder.push( super.buildJoinedScan( statement, cluster, currentPlacementDistribution ) ); + newBuilder.push( super.buildJoinedScan( statement, cluster, null ) ); newBuilders.add( newBuilder ); } } else { @@ -116,7 +116,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa final RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); newBuilder.addPhysicalInfo( currentPlacementDistribution ); - newBuilder.push( super.buildJoinedScan( statement, cluster, currentPlacementDistribution ) ); + newBuilder.push( super.buildJoinedScan( statement, cluster, null ) ); newBuilders.add( newBuilder ); } if ( newBuilders.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java index 44133865d6..342b860c21 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java @@ -23,6 +23,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionManager; @@ -54,11 +55,12 @@ protected List handleVerticalPartitioningOrReplication( AlgNod @Override protected List handleNonePartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // Get placements and convert into placement distribution - final Map> placements = selectPlacement( catalogTable ); + // final Map> placements = selectPlacement( catalogTable ); + List entities = snapshot.getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ); // Only one builder available - builders.get( 0 ).addPhysicalInfo( placements ); - builders.get( 0 ).push( super.buildJoinedScan( statement, cluster, placements ) ); + // builders.get( 0 ).addPhysicalInfo( placements ); + builders.get( 0 ).push( super.buildJoinedScan( statement, cluster, entities ) ); return builders; } @@ -79,7 +81,7 @@ protected List handleHorizontalPartitioning( AlgNode node, Log // Only one builder available builders.get( 0 ).addPhysicalInfo( placementDistribution ); - builders.get( 0 ).push( super.buildJoinedScan( statement, cluster, placementDistribution ) ); + builders.get( 0 ).push( super.buildJoinedScan( statement, cluster, null ) ); return builders; } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index 29d3f615f1..0e8df34f8c 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -17,10 +17,8 @@ package org.polypheny.db.monitoring.statistics; -import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; import lombok.Getter; import lombok.Setter; import org.polypheny.db.catalog.Catalog; @@ -47,8 +45,8 @@ public class StatisticTable> { @Getter private NamespaceType namespaceType; - @Getter - private ImmutableList dataPlacements; + //@Getter + //private ImmutableList dataPlacements; @Getter private final List availableAdapters = new ArrayList<>(); @@ -80,7 +78,7 @@ public StatisticTable( Long tableId ) { LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); this.table = catalogTable.name; this.namespaceType = catalogTable.namespaceType; - this.dataPlacements = ImmutableList.copyOf( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).stream().map( c -> c.adapterId ).collect( Collectors.toList() ) ); + //this.dataPlacements = ImmutableList.copyOf( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).stream().map( c -> c.adapterId ).collect( Collectors.toList() ) ); this.entityType = catalogTable.entityType; } calls = new TableCalls( tableId, 0, 0, 0, 0 ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java index fbc08a3fa6..68dcb50d7b 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvFilterableTable.java @@ -59,8 +59,8 @@ public class CsvFilterableTable extends CsvTable implements FilterableEntity { /** * Creates a CsvFilterableTable. */ - public CsvFilterableTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { - super( source, table, fieldTypes, fields, csvSource ); + public CsvFilterableTable( long id, Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { + super( id, source, table, fieldTypes, fields, csvSource ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java index 2cc31e1fdb..477406dee6 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvScannableTable.java @@ -39,7 +39,6 @@ import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.algebra.type.AlgProtoDataType; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.refactor.ScannableEntity; import org.polypheny.db.util.Source; @@ -56,8 +55,8 @@ public class CsvScannableTable extends CsvTable implements ScannableEntity { /** * Creates a CsvScannableTable. */ - protected CsvScannableTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { - super( source, table, fieldTypes, fields, csvSource ); + protected CsvScannableTable( long id, Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { + super( id, source, table, fieldTypes, fields, csvSource ); } @@ -70,7 +69,7 @@ public String toString() { public Enumerable scan( DataContext dataContext ) { dataContext.getStatement().getTransaction().registerInvolvedAdapter( csvSource ); final AtomicBoolean cancelFlag = DataContext.Variable.CANCEL_FLAG.get( dataContext ); - return new AbstractEnumerable() { + return new AbstractEnumerable<>() { @Override public Enumerator enumerator() { return new CsvEnumerator<>( source, cancelFlag, false, null, new CsvEnumerator.ArrayRowConverter( fieldTypes, fields ) ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 871a6fe230..25e9bfe52c 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -83,7 +83,7 @@ public CsvSchema( long id, URL directoryUrl, CsvTable.Flavor flavor ) { } - public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable allocationTable, CsvSource csvSource ) { + public PhysicalTable createCsvTable( long id, LogicalTable catalogTable, AllocationTable allocationTable, CsvSource csvSource ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List fieldTypes = new LinkedList<>(); @@ -96,12 +96,7 @@ public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable fieldIds.add( (int) placement.physicalPosition ); } - String csvFileName = Catalog - .getInstance() - .getSnapshot() - .getAllocSnapshot() - .getColumnPlacementsOnAdapterPerTable( csvSource.getAdapterId(), catalogTable.id ).iterator().next() - .physicalSchemaName; + String csvFileName = allocationTable.placements.get( 0 ).physicalSchemaName; Source source; try { source = Sources.of( new URL( directoryUrl, csvFileName ) ); @@ -109,7 +104,7 @@ public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable throw new RuntimeException( e ); } int[] fields = fieldIds.stream().mapToInt( i -> i ).toArray(); - CsvTable table = createTable( source, allocationTable, fieldTypes, fields, csvSource ); + CsvTable table = createTable( id, source, allocationTable, fieldTypes, fields, csvSource ); tableMap.put( catalogTable.name + "_" + allocationTable.id, table ); return table; } @@ -118,14 +113,14 @@ public PhysicalTable createCsvTable( LogicalTable catalogTable, AllocationTable /** * Creates different subtype of table based on the "flavor" attribute. */ - private CsvTable createTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { + private CsvTable createTable( long id, Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { switch ( flavor ) { case TRANSLATABLE: - return new CsvTranslatableTable( source, table, fieldTypes, fields, csvSource ); + return new CsvTranslatableTable( id, source, table, fieldTypes, fields, csvSource ); case SCANNABLE: - return new CsvScannableTable( source, table, fieldTypes, fields, csvSource ); + return new CsvScannableTable( id, source, table, fieldTypes, fields, csvSource ); case FILTERABLE: - return new CsvFilterableTable( source, table, fieldTypes, fields, csvSource ); + return new CsvFilterableTable( id, source, table, fieldTypes, fields, csvSource ); default: throw new AssertionError( "Unknown flavor " + this.flavor ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index f5f2df7a08..7a64673906 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -41,10 +41,10 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.csv.CsvTable.Flavor; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; -import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; @@ -133,8 +133,8 @@ public void createNewSchema( Snapshot snapshot, String name, long id ) { @Override - public PhysicalTable createAdapterTable( LogicalTable logical, AllocationTable allocationTable ) { - return currentSchema.createCsvTable( logical, allocationTable, this ); + public List createAdapterTable( IdBuilder idBuilder, LogicalTable logical, AllocationTable allocationTable ) { + return List.of( currentSchema.createCsvTable( idBuilder.getNewPhysicalId(), logical, allocationTable, this ) ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java index 3fbd55e9dc..1cb1dd91b6 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java @@ -55,13 +55,15 @@ public abstract class CsvTable extends PhysicalTable { /** * Creates a CsvTable. */ - CsvTable( Source source, AllocationTable allocationTable, List fieldTypes, int[] fields, CsvSource csvSource ) { + CsvTable( long id, Source source, AllocationTable allocationTable, List fieldTypes, int[] fields, CsvSource csvSource ) { super( + id, allocationTable, allocationTable.name, allocationTable.getNamespaceName(), allocationTable.getColumnNames(), - allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> AlgDataTypeFactory.DEFAULT.createPolyType( c.type ) ) ) ); + allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> AlgDataTypeFactory.DEFAULT.createPolyType( c.type ) ) ), + allocationTable.getColumnOrder() ); this.source = source; this.fieldTypes = fieldTypes; this.fields = fields; diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java index dab7c48178..21741cab94 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTranslatableTable.java @@ -55,8 +55,8 @@ public class CsvTranslatableTable extends CsvTable implements TranslatableEntity /** * Creates a CsvTable. */ - CsvTranslatableTable( Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { - super( source, table, fieldTypes, fields, csvSource ); + CsvTranslatableTable( long id, Source source, AllocationTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { + super( id, source, table, fieldTypes, fields, csvSource ); } diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 30d6bb0636..150125418c 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -30,17 +30,18 @@ import org.polypheny.db.adapter.Adapter.AdapterSettingInteger; import org.polypheny.db.adapter.Adapter.AdapterSettingList; import org.polypheny.db.adapter.DeployMode; -import org.polypheny.db.adapter.jdbc.JdbcEntity; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.adapter.jdbc.stores.AbstractJdbcStore; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plugins.PolyPluginManager; @@ -106,8 +107,8 @@ protected ConnectionFactory deployEmbedded() { @Override - public JdbcEntity createAdapterTable( LogicalTable logicalTable, AllocationTable allocationTable ) { - return currentJdbcSchema.createJdbcTable( logicalTable, allocationTable ); + public List createAdapterTable( IdBuilder idBuilder, LogicalTable logicalTable, AllocationTable allocationTable ) { + return List.of( currentJdbcSchema.createJdbcTable( idBuilder.getNewPhysicalId(), logicalTable, allocationTable ) ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 2adb4eb60d..47fe65d1e3 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -104,15 +104,18 @@ public class JdbcEntity extends PhysicalTable implements TranslatableEntity, Sca public JdbcEntity( JdbcSchema jdbcSchema, + long id, LogicalTable logicalTable, AllocationTable allocationTable, @NonNull TableType jdbcTableType ) { super( + id, allocationTable, getPhysicalTableName( jdbcSchema.adapter, logicalTable, allocationTable ), getPhysicalSchemaName( jdbcSchema.adapter ), getPhysicalColumnNames( jdbcSchema.adapter, allocationTable ), - allocationTable.getColumnTypes() ); + allocationTable.getColumnTypes(), + allocationTable.getColumnOrder() ); this.logical = logicalTable; this.allocation = allocationTable; this.jdbcSchema = jdbcSchema; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index ca48112c8c..c82e7018c0 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -135,10 +135,12 @@ public JdbcSchema( public JdbcEntity createJdbcTable( + long id, LogicalTable logicalTable, AllocationTable allocationTable ) { return new JdbcEntity( this, + id, logicalTable, allocationTable, TableType.TABLE ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 7f7946a975..31db524945 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -30,6 +30,7 @@ import org.polypheny.db.adapter.jdbc.JdbcUtils; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; +import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; @@ -136,7 +137,7 @@ public PhysicalTable createPhysicalTable( Context context, LogicalTable logicalT } executeUpdate( query, context ); - return this.currentJdbcSchema.createJdbcTable( logicalTable, allocationTable ); + return this.currentJdbcSchema.createJdbcTable( IdBuilder.getInstance().getNewPhysicalId(), logicalTable, allocationTable ); //return new PhysicalTable( allocationTable, getDefaultPhysicalSchemaName(), physicalTableName, allocationTable.getColumns().values().stream().map( c -> getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ) ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index dfcbd93514..54f6d06b80 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -23,10 +23,14 @@ import io.activej.serializer.annotations.Serialize; import java.beans.PropertyChangeSupport; import java.util.Collections; +import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.allocation.PolyAllocDocCatalog; import org.polypheny.db.catalog.allocation.PolyAllocGraphCatalog; @@ -47,6 +51,7 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -113,6 +118,11 @@ public PolyCatalog() { new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); + } + + + @Override + public void init() { try { insertDefaultData(); } catch ( UnknownAdapterException e ) { @@ -161,19 +171,17 @@ private void insertDefaultData() throws UnknownAdapterException { // init adapters if ( adapters.size() == 0 ) { // Deploy default store - addAdapter( "hsqldb", defaultStore.getAdapterName(), AdapterType.STORE, defaultStore.getDefaultSettings() ); + AdapterManager.getInstance().addAdapter( defaultStore.getAdapterName(), "hsqldb", AdapterType.STORE, defaultStore.getDefaultSettings() ); // Deploy default CSV view - addAdapter( "hr", defaultSource.getAdapterName(), AdapterType.SOURCE, defaultSource.getDefaultSettings() ); + Adapter adapter = AdapterManager.getInstance().addAdapter( defaultSource.getAdapterName(), "hr", AdapterType.SOURCE, defaultSource.getDefaultSettings() ); + adapter.createNewSchema( getSnapshot(), "public", namespaceId ); // init schema getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); - getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); - getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); - getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); updateSnapshot(); @@ -198,7 +206,6 @@ private void insertDefaultData() throws UnknownAdapterException { */ private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) throws UnknownTableException, UnknownColumnException, GenericCatalogException { LogicalTable depts = getSnapshot().getRelSnapshot( namespaceId ).getTable( "depts" ); - addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); @@ -267,9 +274,19 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { filename += ".gz"; } - long allocId = getAllocRel( table.namespaceId ).addDataPlacement( csv.id, table.id ); + + updateSnapshot(); + long allocId = 0; + if ( !getSnapshot().getAllocSnapshot().adapterHasPlacement( csv.id, table.id ) ) { + allocId = getAllocRel( table.namespaceId ).addDataPlacement( csv.id, table.id ); + } else { + allocId = getSnapshot().getAllocSnapshot().getAllocation( csv.id, table.id ).id; + } + getAllocRel( table.namespaceId ).addColumnPlacement( allocId, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); - getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( csv.id, colId, position ); + //getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( allocId, colId, position ); + + updateSnapshot(); // long partitionId = table.partitionProperty.partitionIds.get( 0 ); // getAllocRel( table.namespaceId ).addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); @@ -288,6 +305,21 @@ private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, Strin private void updateSnapshot() { + // reset physical catalogs + Set keys = this.physicalCatalogs.keySet(); + keys.forEach( k -> this.physicalCatalogs.replace( k, new PolyPhysicalCatalog() ) ); + + // generate new physical entities, atm only relational + this.allocationCatalogs.forEach( ( k, v ) -> { + if ( v.getNamespace().namespaceType == NamespaceType.RELATIONAL ) { + ((AllocationRelationalCatalog) v).getTables().forEach( ( k2, v2 ) -> { + LogicalTable table = getSnapshot().getLogicalEntity( v2.logicalId ).unwrap( LogicalTable.class ); + List physicals = AdapterManager.getInstance().getAdapter( v2.adapterId ).createAdapterTable( idBuilder, table, v2 ); + getPhysical( table.namespaceId ).addEntities( physicals ); + } ); + } + } ); + this.snapshot = SnapshotBuilder.createSnapshot( idBuilder.getNewSnapshotId(), this, logicalCatalogs, allocationCatalogs, physicalCatalogs ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index 84943e3a9a..e804f2fe1c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -76,15 +76,10 @@ public PolyAllocRelCatalog copy() { @Override public void addColumnPlacement( long allocationId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ) { - tables.put( allocationId, tables.get( allocationId ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); + tables.put( allocationId, tables.get( allocationId ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName, position ) ); } - // might replace above one with this - private void addColumnPlacementAlloc( long allocTableId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName ) { - tables.put( allocTableId, tables.get( allocTableId ).withAddedColumn( columnId, placementType, physicalSchemaName, physicalTableName, physicalColumnName ) ); - } - @Override public void deleteColumnPlacement( long allocationId, long columnId, boolean columnOnly ) { @@ -92,11 +87,6 @@ public void deleteColumnPlacement( long allocationId, long columnId, boolean col } - // might replace above one with this - private void deleteColumnPlacementAlloc( long allocTableId, long columnId, boolean columnOnly ) { - tables.put( allocTableId, tables.get( allocTableId ).withRemovedColumn( columnId ) ); - } - @Override public void updateColumnPlacementType( long adapterId, long columnId, PlacementType placementType ) { @@ -105,7 +95,7 @@ public void updateColumnPlacementType( long adapterId, long columnId, PlacementT @Override - public void updateColumnPlacementPhysicalPosition( long adapterId, long columnId, long position ) { + public void updateColumnPlacementPhysicalPosition( long allocId, long columnId, long position ) { } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java index 65a6019d5d..1413f8a0eb 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java @@ -16,6 +16,7 @@ package org.polypheny.db.catalog.physical; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; @@ -50,4 +51,10 @@ public void addPhysicalEntity( PhysicalEntity physicalEntity ) { } + @Override + public void addEntities( List physicals ) { + physicals.forEach( p -> this.physicals.put( p.id, p ) ); + } + + } From 6c18875f8ea2917f4e9313a0c4680a33b714d142 Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 14 Mar 2023 23:44:44 +0100 Subject: [PATCH 049/436] started fixing validation --- .../org/polypheny/db/catalog/entity/CatalogEntity.java | 6 +++--- .../polypheny/db/catalog/snapshot/PhysicalSnapshot.java | 3 ++- .../db/catalog/snapshot/impl/PhysicalSnapshotImpl.java | 5 +++-- core/src/main/java/org/polypheny/db/util/Util.java | 2 +- .../db/sql/language/validate/DelegatingScope.java | 9 ++++----- .../db/sql/language/validate/IdentifierNamespace.java | 6 +++++- webui/src/main/java/org/polypheny/db/webui/Crud.java | 3 ++- 7 files changed, 20 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index ab1cd9b356..6a194534ba 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -23,6 +23,7 @@ import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; import org.polypheny.db.StatisticsManager; +import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.DocumentType; @@ -31,7 +32,6 @@ import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.refactor.CatalogType; import org.polypheny.db.catalog.refactor.Expressible; -import org.polypheny.db.plan.AlgMultipleTrait; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; import org.polypheny.db.schema.Wrapper; @@ -102,8 +102,8 @@ public double getRowCount() { } - public List getCollations() { - return (List) Statistics.UNKNOWN.getCollations(); + public List getCollations() { + return Statistics.UNKNOWN.getCollations(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java index 8877c5eb59..2aeda84671 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.snapshot; import java.util.List; +import lombok.NonNull; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; @@ -39,7 +40,7 @@ public interface PhysicalSnapshot { PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ); - List getPhysicalsOnAdapter( long adapterId ); + @NonNull List getPhysicalsOnAdapter( long adapterId ); PhysicalEntity getPhysicalEntity( long id ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java index 09e812aadb..b554047f7c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java @@ -24,6 +24,7 @@ import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.Value; +import org.jetbrains.annotations.NotNull; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; import org.polypheny.db.catalog.entity.physical.PhysicalCollection; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; @@ -139,8 +140,8 @@ public PhysicalGraph getPhysicalGraph( long logicalId, long adapterId ) { @Override - public List getPhysicalsOnAdapter( long adapterId ) { - return adapterPhysicals.get( adapterId ); + public @NotNull List getPhysicalsOnAdapter( long adapterId ) { + return adapterPhysicals.get( adapterId ) == null ? List.of() : adapterPhysicals.get( adapterId ); } diff --git a/core/src/main/java/org/polypheny/db/util/Util.java b/core/src/main/java/org/polypheny/db/util/Util.java index f82aba4ac9..a18e1e40af 100644 --- a/core/src/main/java/org/polypheny/db/util/Util.java +++ b/core/src/main/java/org/polypheny/db/util/Util.java @@ -2025,7 +2025,7 @@ public static Monotonicity getMonotonicity( CatalogEntity entity, String columnN return Monotonicity.NOT_MONOTONIC; } - for ( AlgCollation collation : entity.getStatistic().getCollations() ) { + for ( AlgCollation collation : entity.getCollations() ) { final AlgFieldCollation fieldCollation = collation.getFieldCollations().get( 0 ); final int fieldIndex = fieldCollation.getFieldIndex(); if ( fieldIndex < entity.getRowType().getFieldCount() && entity.getRowType().getFieldNames().get( fieldIndex ).equals( columnName ) ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index 95430dc8fd..121f9a0bd9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -27,7 +27,6 @@ import org.polypheny.db.algebra.type.DynamicRecordType; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.CustomColumnResolvingEntity; import org.polypheny.db.schema.Entity; @@ -219,12 +218,12 @@ public SqlValidator getValidator() { */ @Override public SqlQualified fullyQualify( SqlIdentifier identifier ) { - if ( identifier.isStar() ) { + /*if ( identifier.isStar() ) { return SqlQualified.create( this, 1, null, identifier ); } - /* + final SqlIdentifier previous = identifier; - final NameMatcher nameMatcher = validator.catalogReader.nameMatcher; + final NameMatcher nameMatcher = Snapshot.nameMatcher; String columnName; final String tableName; final SqlValidatorNamespace namespace; @@ -292,7 +291,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { resolve( prefix.names, nameMatcher, false, resolved ); if ( resolved.count() == 1 ) { final Resolve resolve = resolved.only(); - fromNs = resolve.getEntity().unwrap( CatalogTable.class ); + fromNs = resolve.getEntity().unwrap( LogicalTable.class ); fromRowType = resolve.rowType(); break; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java index c034f1d809..37d4bf341f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java @@ -27,7 +27,9 @@ import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlIdentifier; @@ -148,7 +150,9 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { }*/ } } - throw validator.newValidationError( id, Static.RESOURCE.objectNotFound( id.getComponent( 0 ).toString() ) ); + List ns = id.names; + LogicalNamespace namespace = Catalog.getInstance().getSnapshot().getNamespace( ns.get( 0 ) ); + return new EntityNamespace( validator, Catalog.getInstance().getSnapshot().getRelSnapshot( namespace.id ).getTable( ns.get( 1 ) ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 8bbd2f72b5..293e6ba63e 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -3574,7 +3574,8 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Tra public static Transaction getTransaction( boolean analyze, boolean useCache, TransactionManager transactionManager, long userId, long databaseId, String origin ) { try { - Transaction transaction = transactionManager.startTransaction( userId, analyze, origin, MultimediaFlavor.FILE ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + Transaction transaction = transactionManager.startTransaction( snapshot.getUser( Catalog.defaultUserId ), snapshot.getNamespace( Catalog.defaultDatabaseId ), analyze, origin, MultimediaFlavor.FILE ); transaction.setUseCache( useCache ); return transaction; } catch ( UnknownUserException | UnknownSchemaException e ) { From 343414d03c955f72d7dd9ad6722fd6a0b7aecee5 Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 21 Mar 2023 18:04:44 +0100 Subject: [PATCH 050/436] adjusted some of the tests to new format --- .../snapshot/LogicalGraphSnapshot.java | 2 + .../org/polypheny/db/catalog/MockCatalog.java | 50 ++++++--------- .../db/catalog/MockCatalogReader.java | 6 +- .../db/docker/DockerInstanceTest.java | 2 +- .../db/docker/DockerManagerTest.java | 2 +- .../db/docker/MockCatalogDocker.java | 7 ++ .../org/polypheny/db/plan/RelOptUtilTest.java | 1 - .../db/schemas/HrClusteredSchema.java | 5 +- .../java/org/polypheny/db/TestHelper.java | 3 +- .../java/org/polypheny/db/cypher/DdlTest.java | 46 +++++++------ .../org/polypheny/db/misc/AlgBuilderTest.java | 1 - .../db/sql/clause/SimpleSqlTest.java | 64 +++++++++++++++++++ .../db/statistics/StatisticsTest.java | 19 ++++-- .../language/validate/DelegatingScope.java | 12 +++- 14 files changed, 153 insertions(+), 67 deletions(-) create mode 100644 dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java index e5ebb4488f..a993e8d637 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java @@ -32,6 +32,8 @@ public interface LogicalGraphSnapshot { */ public abstract LogicalGraph getGraph( long id ); + + /** * Get a collection of all graphs, which match the given conditions. * diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index a4c295f04c..ae5f4ba7d4 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -28,8 +28,11 @@ import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogDatabase; +import org.polypheny.db.catalog.entity.CatalogQueryInterface; +import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; @@ -54,101 +57,90 @@ public abstract class MockCatalog extends Catalog { @Override - public LogicalRelationalCatalog getLogicalRel( long namespaceId ) { + public void init() { throw new NotImplementedException(); } @Override - public LogicalDocumentCatalog getLogicalDoc( long namespaceId ) { + public Map getUsers() { throw new NotImplementedException(); } @Override - public LogicalGraphCatalog getLogicalGraph( long namespaceId ) { + public Map getAdapters() { throw new NotImplementedException(); } @Override - public AllocationRelationalCatalog getAllocRel( long namespaceId ) { + public Map getInterfaces() { throw new NotImplementedException(); } - @Override - public AllocationDocumentCatalog getAllocDoc( long namespaceId ) { + public LogicalRelationalCatalog getLogicalRel( long namespaceId ) { throw new NotImplementedException(); } @Override - public AllocationGraphCatalog getAllocGraph( long namespaceId ) { + public LogicalDocumentCatalog getLogicalDoc( long namespaceId ) { throw new NotImplementedException(); } @Override - public LogicalEntity getLogicalEntity( long id ) { + public LogicalGraphCatalog getLogicalGraph( long namespaceId ) { throw new NotImplementedException(); } @Override - public PhysicalCatalog getPhysical( long namespaceId ) { + public AllocationRelationalCatalog getAllocRel( long namespaceId ) { throw new NotImplementedException(); } @Override - public void addObserver( PropertyChangeListener listener ) { - super.addObserver( listener ); - } - - - @Override - public void removeObserver( PropertyChangeListener listener ) { - super.removeObserver( listener ); - } - - - @Override - public Snapshot getSnapshot() { + public AllocationDocumentCatalog getAllocDoc( long namespaceId ) { throw new NotImplementedException(); } @Override - public LogicalDocSnapshot getDocSnapshot( long namespaceId ) { + public AllocationGraphCatalog getAllocGraph( long namespaceId ) { throw new NotImplementedException(); } + @Override - public LogicalGraphSnapshot getGraphSnapshot( long namespaceId ) { + public PhysicalCatalog getPhysical( long namespaceId ) { throw new NotImplementedException(); } @Override - public LogicalRelSnapshot getRelSnapshot( long namespaceId ) { - throw new NotImplementedException(); + public void addObserver( PropertyChangeListener listener ) { + super.addObserver( listener ); } @Override - public PhysicalSnapshot getPhysicalSnapshot() { - throw new NotImplementedException(); + public void removeObserver( PropertyChangeListener listener ) { + super.removeObserver( listener ); } @Override - public AllocSnapshot getAllocSnapshot() { + public Snapshot getSnapshot() { throw new NotImplementedException(); } + @Override public void commit() throws NoTablePrimaryKeyException { throw new NotImplementedException(); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index 2e1a9a7ac8..01796caeca 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -280,7 +280,7 @@ public void registerRolledUpColumn( String columnName ) { private MockEntity( MockCatalogReader catalogReader, List names, boolean stream, double rowCount, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { - super( -1, Util.last( names ), null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), true, null ); + super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true, ImmutableList.of() ); this.catalogReader = catalogReader; this.stream = stream; this.rowCount = rowCount; @@ -296,7 +296,7 @@ private MockEntity( MockCatalogReader catalogReader, List names, boolean protected MockEntity( MockCatalogReader catalogReader, boolean stream, double rowCount, List> columnList, List keyList, AlgDataType rowType, List collationList, List names, Set monotonicColumnSet, StructKind kind, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { - super( -1, Util.last( names ), null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), true, null ); + super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true, ImmutableList.of() ); this.catalogReader = catalogReader; this.stream = stream; this.rowCount = rowCount; @@ -318,7 +318,7 @@ protected MockEntity( private class ModifiableEntity extends LogicalTable implements Wrapper { protected ModifiableEntity( String tableName ) { - super( -1, tableName, null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), false, null ); + super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true, ImmutableList.of() ); } diff --git a/core/src/test/java/org/polypheny/db/docker/DockerInstanceTest.java b/core/src/test/java/org/polypheny/db/docker/DockerInstanceTest.java index e2b4cb8504..e13726a5a8 100644 --- a/core/src/test/java/org/polypheny/db/docker/DockerInstanceTest.java +++ b/core/src/test/java/org/polypheny/db/docker/DockerInstanceTest.java @@ -39,7 +39,7 @@ public class DockerInstanceTest { @BeforeClass public static void initClass() { - if ( Catalog.INSTANCE == null ) { + if ( Catalog.getInstance()== null ) { // some functionality needs to use the catalog, so we use a mock Catalog.setAndGetInstance( new MockCatalogDocker() ); } diff --git a/core/src/test/java/org/polypheny/db/docker/DockerManagerTest.java b/core/src/test/java/org/polypheny/db/docker/DockerManagerTest.java index 949996fde9..3597c57002 100644 --- a/core/src/test/java/org/polypheny/db/docker/DockerManagerTest.java +++ b/core/src/test/java/org/polypheny/db/docker/DockerManagerTest.java @@ -44,7 +44,7 @@ public class DockerManagerTest { @BeforeClass public static void initClass() { - if ( Catalog.INSTANCE == null ) { + if ( Catalog.getInstance() == null ) { // some functionality needs to use the catalog, so we use a mock Catalog.setAndGetInstance( new MockCatalogDocker() ); } diff --git a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java index c5536a9bc5..c540579f50 100644 --- a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java +++ b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java @@ -22,6 +22,8 @@ import org.polypheny.db.catalog.MockCatalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.entity.CatalogQueryInterface; +import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.snapshot.Snapshot; /** @@ -34,6 +36,9 @@ public class MockCatalogDocker extends MockCatalog { HashMap adapters = new HashMap<>(); + + + @Override public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { i++; @@ -48,4 +53,6 @@ public Snapshot getSnapshot() { } + + } diff --git a/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java b/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java index e4ff273ce7..2cda22c086 100644 --- a/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java +++ b/core/src/test/java/org/polypheny/db/plan/RelOptUtilTest.java @@ -77,7 +77,6 @@ public JavaTypeFactory getTypeFactory() { }, "", 0, - 0, null ) ); } diff --git a/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java b/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java index bf85b1156b..6317ddbcbc 100644 --- a/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java +++ b/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java @@ -52,7 +52,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.refactor.ScannableEntity; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.Namespace.Schema; import org.polypheny.db.schema.Statistic; import org.polypheny.db.schema.Statistics; @@ -123,7 +122,7 @@ private static class PkClusteredEntity extends LogicalTable implements Scannable PkClusteredEntity( Function dataTypeBuilder, ImmutableBitSet pkColumns, List data ) { - super( -1, "", null, -1, -1, -1, EntityType.ENTITY, null, ImmutableList.of(), false, null ); + super( -1, "", -1, EntityType.ENTITY, null, false, ImmutableList.of() ); this.data = data; this.typeBuilder = dataTypeBuilder; this.pkColumns = pkColumns; @@ -139,10 +138,12 @@ public Statistic getStatistic() { return Statistics.of( (double) data.size(), ImmutableList.of( pkColumns ), ImmutableList.of( AlgCollations.of( collationFields ) ) ); } + @Override public Enumerable scan( final DataContext root ) { return Linq4j.asEnumerable( data ); } + } } diff --git a/dbms/src/test/java/org/polypheny/db/TestHelper.java b/dbms/src/test/java/org/polypheny/db/TestHelper.java index 816b4bde37..c06368426b 100644 --- a/dbms/src/test/java/org/polypheny/db/TestHelper.java +++ b/dbms/src/test/java/org/polypheny/db/TestHelper.java @@ -49,7 +49,6 @@ import org.junit.Assert; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.runtime.functions.Functions; @@ -128,7 +127,7 @@ private TestHelper() { public Transaction getTransaction() { try { return transactionManager.startTransaction( Catalog.defaultUserId, true, "Test Helper" ); - } catch ( GenericCatalogException | UnknownUserException | UnknownDatabaseException | UnknownSchemaException e ) { + } catch ( GenericCatalogException | UnknownUserException | UnknownSchemaException e ) { throw new RuntimeException( "Error while starting transaction", e ); } } diff --git a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java index 2a020d15a0..4949b05996 100644 --- a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java @@ -17,6 +17,8 @@ package org.polypheny.db.cypher; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import java.sql.Connection; import java.sql.SQLException; @@ -26,8 +28,10 @@ import org.polypheny.db.AdapterTestSuite; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.excluded.CassandraExcluded; import org.polypheny.db.webui.models.Result; @@ -39,21 +43,19 @@ public class DdlTest extends CypherTestTemplate { @Test public void addCollectionTest() { - Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); execute( "CREATE DATABASE " + graphName ); - LogicalGraph graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); - - assertEquals( 1, catalog.getGraphs( new Pattern( graphName ) ).size() ); + assertNotNull( snapshot.getNamespace( graphName ) ); execute( "DROP DATABASE " + graphName ); - assertEquals( 0, catalog.getGraphs( new Pattern( graphName ) ).size() ); + assertNull( snapshot.getNamespace( graphName ) ); execute( "CREATE DATABASE " + graphName ); - assertEquals( 1, catalog.getGraphs( new Pattern( graphName ) ).size() ); + assertNotNull( snapshot.getNamespace( graphName ) ); execute( "DROP DATABASE " + graphName ); } @@ -65,17 +67,19 @@ public void addPlacementTest() throws SQLException { try { execute( "CREATE DATABASE " + graphName ); - LogicalGraph graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( graphName ); + LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); - assertEquals( 1, graph.placements.size() ); + assertEquals( 1, graph.getPlacements().size() ); addStore( "store1" ); execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); - graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); + namespace = catalog.getSnapshot().getNamespace( graphName ); + graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); - assertEquals( 2, graph.placements.size() ); + assertEquals( 2, graph.getPlacements().size() ); execute( "DROP DATABASE " + graphName ); @@ -94,16 +98,18 @@ public void initialPlacementTest() throws SQLException { addStore( "store1" ); execute( String.format( "CREATE DATABASE %s ON STORE %s", graphName, "store1" ) ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace(graphName); + LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); - LogicalGraph graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); - - assertEquals( 1, graph.placements.size() ); + assertEquals( 1, graph.getPlacements().size() ); execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "hsqldb" ), graphName ); - graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); + namespace = catalog.getSnapshot().getNamespace(graphName); + graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); - assertEquals( 2, graph.placements.size() ); + assertEquals( 1, graph.getPlacements().size() ); + assertEquals( 2, graph.getPlacements().size() ); execute( "DROP DATABASE " + graphName ); @@ -122,17 +128,19 @@ public void deletePlacementTest() throws SQLException { execute( "CREATE DATABASE " + graphName ); - LogicalGraph graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace(graphName); + LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); - assertEquals( 1, graph.placements.size() ); + assertEquals( 1, graph.getPlacements().size() ); addStore( "store1" ); execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); - graph = catalog.getGraphs( new Pattern( graphName ) ).get( 0 ); + namespace = catalog.getSnapshot().getNamespace(graphName); + graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); - assertEquals( 2, graph.placements.size() ); + assertEquals( 2, graph.getPlacements().size() ); execute( String.format( "DROP PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); diff --git a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java index a5a759f982..aa2ca47cff 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/AlgBuilderTest.java @@ -165,7 +165,6 @@ public JavaTypeFactory getTypeFactory() { }, "", 0, - 0, null ) ).build(); return AlgBuilder.create( config ); } diff --git a/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java b/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java new file mode 100644 index 0000000000..149e118b19 --- /dev/null +++ b/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java @@ -0,0 +1,64 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.sql.clause; + +import com.google.common.collect.ImmutableList; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.List; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.polypheny.db.TestHelper; +import org.polypheny.db.TestHelper.JdbcConnection; +import org.polypheny.db.excluded.CottontailExcluded; +import org.polypheny.db.excluded.FileExcluded; + +public class SimpleSqlTest { + + @BeforeClass + public static void start() throws SQLException { + // Ensures that Polypheny-DB is running + //noinspection ResultOfMethodCallIgnored + TestHelper.getInstance(); + insertData(); + } + + + private static void insertData() { + + } + + + @Test + public void select() throws SQLException { + try ( JdbcConnection jdbcConnection = new JdbcConnection( false ) ) { + Connection connection = jdbcConnection.getConnection(); + try ( Statement statement = connection.createStatement() ) { + statement.executeUpdate( "CREATE TABLE TableA(ID INTEGER NOT NULL, NAME VARCHAR(20), AGE INTEGER, PRIMARY KEY (ID))" ); + statement.executeUpdate( "INSERT INTO TableA VALUES (12, 'Name1', 60)" ); + statement.executeUpdate( "INSERT INTO TableA VALUES (15, 'Name2', 24)" ); + statement.executeUpdate( "INSERT INTO TableA VALUES (99, 'Name3', 11)" ); + + connection.commit(); + } + } + + } + +} diff --git a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java index ca4b188e48..632f977241 100644 --- a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java +++ b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java @@ -31,11 +31,12 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.Snapshot; @SuppressWarnings({ "SqlDialectInspection", "SqlNoDataSourceInspection" }) @@ -254,15 +255,17 @@ public void testSimpleRowCount() throws SQLException { ); waiter.await( 20, TimeUnit.SECONDS ); try { - LogicalTable catalogTableNation = Catalog.getInstance().getTable( "statisticschema", "nation" ); - LogicalTable catalogTableRegion = Catalog.getInstance().getTable( "statisticschema", "region" ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + LogicalNamespace namespace = snapshot.getNamespace( "statisticschema" ); + LogicalTable catalogTableNation = snapshot.getRelSnapshot( namespace.id ).getTable( "nation" ); + LogicalTable catalogTableRegion = snapshot.getRelSnapshot( namespace.id ).getTable( "region" ); Integer rowCountNation = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); Integer rowCountRegion = StatisticsManager.getInstance().rowCountPerTable( catalogTableRegion.id ); Assert.assertEquals( Integer.valueOf( 3 ), rowCountNation ); Assert.assertEquals( Integer.valueOf( 2 ), rowCountRegion ); - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException e ) { + } catch ( UnknownTableException | UnknownSchemaException e ) { log.error( "Caught exception test", e ); } connection.commit(); @@ -308,13 +311,15 @@ private void assertStatisticsConvertTo( int maxSeconds, int target ) { boolean inCatalog = true; while ( !successfull && count < maxSeconds ) { waiter.await( 1, TimeUnit.SECONDS ); - if ( Catalog.getInstance().getTables( new Pattern( "statisticschema" ), new Pattern( "nationdelete" ) ).size() != 1 ) { + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + LogicalNamespace namespace = snapshot.getNamespace( "statisticschema" ); + if ( snapshot.getRelSnapshot( namespace.id ).getTable( "nationdelete" ) == null ) { count++; inCatalog = false; continue; } inCatalog = true; - LogicalTable catalogTableNation = Catalog.getInstance().getTable( "statisticschema", "nationdelete" ); + LogicalTable catalogTableNation = snapshot.getRelSnapshot(namespace.id).getTable( "nationdelete" ); Integer rowCount = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); // potentially table exists not yet in statistics but in catalog if ( rowCount != null && rowCount == target ) { @@ -333,7 +338,7 @@ private void assertStatisticsConvertTo( int maxSeconds, int target ) { log.warn( "Collection was already removed from the catalog, therefore the count will be null, which is correct" ); } - } catch ( UnknownTableException | UnknownDatabaseException | UnknownSchemaException | InterruptedException e ) { + } catch ( UnknownTableException | UnknownSchemaException | InterruptedException e ) { log.error( "Caught exception test", e ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index 121f9a0bd9..8ca7cfaad8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -17,7 +17,11 @@ package org.polypheny.db.sql.language.validate; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Map; import org.polypheny.db.algebra.constant.MonikerType; @@ -27,6 +31,9 @@ import org.polypheny.db.algebra.type.DynamicRecordType; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.snapshot.Snapshot; +import org.polypheny.db.languages.ParserPos; import org.polypheny.db.prepare.Prepare.PreparingEntity; import org.polypheny.db.schema.CustomColumnResolvingEntity; import org.polypheny.db.schema.Entity; @@ -39,7 +46,10 @@ import org.polypheny.db.util.Moniker; import org.polypheny.db.util.MonikerImpl; import org.polypheny.db.util.NameMatcher; +import org.polypheny.db.util.NameMatchers; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Static; +import org.polypheny.db.util.Util; /** @@ -218,7 +228,7 @@ public SqlValidator getValidator() { */ @Override public SqlQualified fullyQualify( SqlIdentifier identifier ) { - /*if ( identifier.isStar() ) { + if ( identifier.isStar() ) { return SqlQualified.create( this, 1, null, identifier ); } From 1b92f16b577433a09a6a6a63cb744478191f0e1a Mon Sep 17 00:00:00 2001 From: datomo Date: Mon, 27 Mar 2023 22:22:03 +0200 Subject: [PATCH 051/436] adjusted the interface to interact with snapshot --- .../org/polypheny/db/catalog/Catalog.java | 6 +++ .../db/catalog/snapshot/Snapshot.java | 40 +++++++++---------- .../catalog/snapshot/impl/SnapshotImpl.java | 3 +- .../db/misc/HorizontalPartitioningTest.java | 4 +- .../language/validate/DelegatingScope.java | 14 +++---- .../validate/SqlValidatorNamespace.java | 1 - .../language/validate/SqlValidatorScope.java | 6 ++- 7 files changed, 40 insertions(+), 34 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index c772b430ff..79d47c5043 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -74,6 +74,7 @@ public static Catalog getInstance() { } + public abstract void init(); public abstract void commit() throws NoTablePrimaryKeyException; @@ -238,6 +239,11 @@ protected final boolean isValidIdentifier( final String str ) { public abstract Snapshot getSnapshot(); + public static Snapshot snapshot() { + return INSTANCE.getSnapshot(); + } + + public abstract Map getUsers(); public abstract Map getAdapters(); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index 7d9924eb4d..899566c918 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -61,7 +61,7 @@ default Expression getSnapshotExpression( long id ) { * @param name Pattern for the schema name. null returns all. * @return List of schemas which fit to the specified filter. If there is no schema which meets the criteria, an empty list is returned. */ - public abstract @NonNull List getNamespaces( @Nullable Pattern name ); + @NonNull List getNamespaces( @Nullable Pattern name ); /** * Returns the schema with the specified id. @@ -69,7 +69,7 @@ default Expression getSnapshotExpression( long id ) { * @param id The id of the schema * @return The schema */ - public abstract LogicalNamespace getNamespace( long id ); + LogicalNamespace getNamespace( long id ); /** * Returns the schema with the given name in the specified database. @@ -77,7 +77,7 @@ default Expression getSnapshotExpression( long id ) { * @param name The name of the schema * @return The schema */ - public abstract LogicalNamespace getNamespace( String name ); + LogicalNamespace getNamespace( String name ); /** @@ -86,7 +86,7 @@ default Expression getSnapshotExpression( long id ) { * @param name The name of the schema to check * @return True if there is a schema with this name. False if not. */ - public abstract boolean checkIfExistsNamespace( String name ); + boolean checkIfExistsNamespace( String name ); /** @@ -96,7 +96,7 @@ default Expression getSnapshotExpression( long id ) { * @return The user * @throws UnknownUserException If there is no user with the specified name */ - public abstract CatalogUser getUser( String name ) throws UnknownUserException; + CatalogUser getUser( String name ) throws UnknownUserException; /** * Get the user with the specified id. @@ -104,28 +104,28 @@ default Expression getSnapshotExpression( long id ) { * @param id The id of the user * @return The user */ - public abstract CatalogUser getUser( long id ); + CatalogUser getUser( long id ); /** * Get list of all adapters * * @return List of adapters */ - public abstract List getAdapters(); + List getAdapters(); /** * Get an adapter by its unique name * * @return The adapter */ - public abstract CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException; + CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException; /** * Get an adapter by its id * * @return The adapter */ - public abstract CatalogAdapter getAdapter( long id ); + CatalogAdapter getAdapter( long id ); /** * Check if an adapter with the given id exists @@ -133,7 +133,7 @@ default Expression getSnapshotExpression( long id ) { * @param id the id of the adapter * @return if the adapter exists */ - public abstract boolean checkIfExistsAdapter( long id ); + boolean checkIfExistsAdapter( long id ); /* @@ -141,7 +141,7 @@ default Expression getSnapshotExpression( long id ) { * * @return List of query interfaces */ - public abstract List getQueryInterfaces(); + List getQueryInterfaces(); /** * Get a query interface by its unique name @@ -149,7 +149,7 @@ default Expression getSnapshotExpression( long id ) { * @param uniqueName The unique name of the query interface * @return The CatalogQueryInterface */ - public abstract CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException; + CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException; /** * Get a query interface by its id @@ -157,11 +157,10 @@ default Expression getSnapshotExpression( long id ) { * @param id The id of the query interface * @return The CatalogQueryInterface */ - public abstract CatalogQueryInterface getQueryInterface( long id ); + CatalogQueryInterface getQueryInterface( long id ); - - public abstract List getTablesForPeriodicProcessing(); + List getTablesForPeriodicProcessing(); //// ENTITIES @@ -180,21 +179,22 @@ default List getOperatorList() { } - public abstract LogicalDocSnapshot getDocSnapshot( long namespaceId ); + LogicalDocSnapshot getDocSnapshot( long namespaceId ); - public abstract LogicalGraphSnapshot getGraphSnapshot( long namespaceId ); + LogicalGraphSnapshot getGraphSnapshot( long namespaceId ); - public abstract LogicalRelSnapshot getRelSnapshot( long namespaceId ); + LogicalRelSnapshot getRelSnapshot( long namespaceId ); - public abstract PhysicalSnapshot getPhysicalSnapshot(); + PhysicalSnapshot getPhysicalSnapshot(); - public abstract AllocSnapshot getAllocSnapshot(); + AllocSnapshot getAllocSnapshot(); List getIndexes(); LogicalEntity getLogicalEntity( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index 94eb9330cc..bf21eef1c6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -222,7 +222,7 @@ public List getIndexes() { @Override public LogicalEntity getLogicalEntity( long id ) { - LogicalEntity entity = null; + LogicalEntity entity; for ( LogicalRelSnapshot value : relationals.values() ) { entity = value.getTable( id ); if ( entity != null ) { @@ -247,4 +247,5 @@ public LogicalEntity getLogicalEntity( long id ) { return null; } + } diff --git a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java index d8fc88ca51..0a795c700e 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java @@ -532,9 +532,9 @@ public void rangePartitioningTest() throws SQLException { + "( PARTITION parta VALUES(5,4), " + "PARTITION partb VALUES(10,6))" ); - LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "rangepartitioning3" ) ).get( 0 ); + LogicalTable table = Catalog.snapshot().getTables( null, new Pattern( "rangepartitioning3" ) ).get( 0 ); - List catalogPartitions = Catalog.getInstance().getPartitionsByTable( table.id ); + List catalogPartitions = Catalog.snapshot().getPartitionsByTable( table.id ); Assert.assertEquals( new ArrayList<>( Arrays.asList( "4", "5" ) ) , catalogPartitions.get( 0 ).partitionQualifiers ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index 8ca7cfaad8..57c6cf3e81 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -31,7 +31,6 @@ import org.polypheny.db.algebra.type.DynamicRecordType; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.prepare.Prepare.PreparingEntity; @@ -289,7 +288,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { } // fall through default: { - CatalogEntity fromNs = null; + SqlValidatorNamespace fromNs = null; Path fromPath = null; AlgDataType fromRowType = null; final ResolvedImpl resolved = new ResolvedImpl(); @@ -301,7 +300,8 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { resolve( prefix.names, nameMatcher, false, resolved ); if ( resolved.count() == 1 ) { final Resolve resolve = resolved.only(); - fromNs = resolve.getEntity().unwrap( LogicalTable.class ); + fromNs = resolve.namespace; + fromPath = resolve.path; fromRowType = resolve.rowType(); break; } @@ -311,10 +311,9 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { resolved.clear(); resolve( prefix.names, liberalMatcher, false, resolved ); if ( resolved.count() == 1 ) { - final Step lastStep = Util.last( resolved.only().path.steps() ); throw validator.newValidationError( prefix, - Static.RESOURCE.tableNameNotFoundDidYouMean( prefix.toString(), lastStep.name ) ); + Static.RESOURCE.tableNameNotFoundDidYouMean( prefix.toString(), resolved.only().getEntity().name ) ); } } } @@ -369,7 +368,7 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { // // change "e.empno" to "E.empno". if ( fromNs.getEnclosingNode() != null && !(this instanceof MatchRecognizeScope) ) { - + String alias = SqlValidatorUtil.getAlias( fromNs.getEnclosingNode(), -1 ); if ( alias != null && i > 0 && !alias.equals( identifier.names.get( i - 1 ) ) ) { identifier = identifier.setName( i - 1, alias ); } @@ -491,8 +490,7 @@ private int worstKind( Path path ) { } return SqlQualified.create( this, i, fromNs, identifier ); } - }*/ - throw new RuntimeException(); + } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorNamespace.java index 20446b69c4..f0600e1782 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorNamespace.java @@ -23,7 +23,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.validate.ValidatorNamespace; -import org.polypheny.db.nodes.validate.ValidatorTable; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.util.Pair; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java index fe61255f3e..289c066221 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java @@ -19,12 +19,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; import lombok.Getter; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; @@ -242,7 +242,7 @@ public List steps() { * Returns a list ["step1", "step2"]. */ List stepNames() { - return Lists.transform( steps(), input -> input.name ); + return steps().stream().map( input -> input.name ).collect( Collectors.toList() ); } @@ -344,6 +344,8 @@ class Resolve { @Getter private final CatalogEntity entity; + public Path path; + public SqlValidatorNamespace namespace; Resolve( CatalogEntity entity ) { From b4df1cb3c999eebbefc9be237bbbcd0086ba7f39 Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 28 Mar 2023 14:28:56 +0200 Subject: [PATCH 052/436] adjusted the interface to interact with snapshot --- .../catalog/snapshot/LogicalRelSnapshot.java | 60 ++++++++++--------- .../db/catalog/snapshot/Snapshot.java | 10 ++++ .../snapshot/impl/LogicalRelSnapshotImpl.java | 6 +- .../catalog/snapshot/impl/SnapshotImpl.java | 38 +++++++++--- .../org/polypheny/db/ddl/DdlManagerImpl.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 2 +- .../db/routing/routers/BaseRouter.java | 2 +- .../db/misc/HorizontalPartitioningTest.java | 26 ++++---- .../statistics/StatisticQueryProcessor.java | 6 +- .../org/polypheny/db/avatica/DbmsMeta.java | 2 +- .../db/languages/mql/MqlRenameCollection.java | 2 +- .../java/org/polypheny/db/webui/Crud.java | 6 +- 12 files changed, 99 insertions(+), 63 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 120f975b5f..a294759e08 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -41,10 +41,11 @@ public interface LogicalRelSnapshot { * Get all tables of the specified schema which fit to the specified filters. * getTables(xid, databaseName, null, null, null) returns all tables of the database. * + * @param namespace * @param name Pattern for the table name. null returns all. * @return List of tables which fit to the specified filters. If there is no table which meets the criteria, an empty list is returned. */ - public abstract List getTables( @Nullable Pattern name ); + List getTables( @Nullable Pattern namespace, @Nullable Pattern name ); /** * Returns the table with the given name in the specified schema. @@ -53,7 +54,7 @@ public interface LogicalRelSnapshot { * @return The table * @throws UnknownTableException If there is no table with this name in the specified database and schema. */ - public abstract LogicalTable getTable( String tableName ); + LogicalTable getTable( String tableName ); /** @@ -61,7 +62,7 @@ public interface LogicalRelSnapshot { * * @return The keys */ - public abstract List getKeys(); + List getKeys(); /** @@ -70,7 +71,7 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table for which the keys are returned * @return The collection of keys */ - public abstract List getTableKeys( long tableId ); + List getTableKeys( long tableId ); /** @@ -79,7 +80,7 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getColumns( long tableId ); + List getColumns( long tableId ); /** * Get all columns of the specified database which fit to the specified filter patterns. @@ -89,7 +90,7 @@ public interface LogicalRelSnapshot { * @param columnName Pattern for the column name. null returns all. * @return List of columns which fit to the specified filters. If there is no column which meets the criteria, an empty list is returned. */ - public abstract List getColumns( @Nullable Pattern tableName, @Nullable Pattern columnName ); + List getColumns( @Nullable Pattern tableName, @Nullable Pattern columnName ); /** * Returns the column with the specified id. @@ -97,7 +98,7 @@ public interface LogicalRelSnapshot { * @param columnId The id of the column * @return A CatalogColumn */ - public abstract LogicalColumn getColumn( long columnId ); + LogicalColumn getColumn( long columnId ); /** * Returns the column with the specified name in the specified table of the specified database and schema. @@ -107,7 +108,7 @@ public interface LogicalRelSnapshot { * @return A CatalogColumn * @throws UnknownColumnException If there is no column with this name in the specified table of the database and schema. */ - public abstract LogicalColumn getColumn( long tableId, String columnName ); + LogicalColumn getColumn( long tableId, String columnName ); /** * Returns the column with the specified name in the specified table of the specified database and schema. @@ -116,7 +117,7 @@ public interface LogicalRelSnapshot { * @param columnName The name of the column * @return A CatalogColumn */ - public abstract LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; + LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; /** * Checks if there is a column with the specified name in the specified table. @@ -125,7 +126,7 @@ public interface LogicalRelSnapshot { * @param columnName The name to check for * @return true if there is a column with this name, false if not. */ - public abstract boolean checkIfExistsColumn( long tableId, String columnName ); + boolean checkIfExistsColumn( long tableId, String columnName ); /** * Returns a specified primary key @@ -133,7 +134,7 @@ public interface LogicalRelSnapshot { * @param key The id of the primary key * @return The primary key */ - public abstract CatalogPrimaryKey getPrimaryKey( long key ); + CatalogPrimaryKey getPrimaryKey( long key ); /** * Check whether a key is a primary key @@ -141,7 +142,7 @@ public interface LogicalRelSnapshot { * @param keyId The id of the key * @return Whether the key is a primary key */ - public abstract boolean isPrimaryKey( long keyId ); + boolean isPrimaryKey( long keyId ); /** * Check whether a key is a foreign key @@ -149,7 +150,7 @@ public interface LogicalRelSnapshot { * @param keyId The id of the key * @return Whether the key is a foreign key */ - public abstract boolean isForeignKey( long keyId ); + boolean isForeignKey( long keyId ); /** * Check whether a key is an index @@ -157,7 +158,7 @@ public interface LogicalRelSnapshot { * @param keyId The id of the key * @return Whether the key is an index */ - public abstract boolean isIndex( long keyId ); + boolean isIndex( long keyId ); /** * Check whether a key is a constraint @@ -165,7 +166,7 @@ public interface LogicalRelSnapshot { * @param keyId The id of the key * @return Whether the key is a constraint */ - public abstract boolean isConstraint( long keyId ); + boolean isConstraint( long keyId ); /** * Returns all (imported) foreign keys of a specified table @@ -173,7 +174,7 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table * @return List of foreign keys */ - public abstract List getForeignKeys( long tableId ); + List getForeignKeys( long tableId ); /** * Returns all foreign keys that reference the specified table (exported keys). @@ -181,7 +182,7 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table * @return List of foreign keys */ - public abstract List getExportedKeys( long tableId ); + List getExportedKeys( long tableId ); /** * Get all constraints of the specified table @@ -189,7 +190,7 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table * @return List of constraints */ - public abstract List getConstraints( long tableId ); + List getConstraints( long tableId ); /** @@ -198,7 +199,7 @@ public interface LogicalRelSnapshot { * @param key The key for which the collection is returned * @return The collection of constraints */ - public abstract List getConstraints( CatalogKey key ); + List getConstraints( CatalogKey key ); /** * Returns the constraint with the specified name in the specified table. @@ -207,7 +208,7 @@ public interface LogicalRelSnapshot { * @param constraintName The name of the constraint * @return The constraint */ - public abstract CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException; + CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException; /** * Return the foreign key with the specified name from the specified table @@ -216,9 +217,9 @@ public interface LogicalRelSnapshot { * @param foreignKeyName The name of the foreign key * @return The foreign key */ - public abstract CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException; + CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException; - public abstract List getIndexes(); + List getIndexes(); /** * Gets a collection of index for the given key. @@ -226,7 +227,7 @@ public interface LogicalRelSnapshot { * @param key The key for which the collection is returned * @return The collection of indexes */ - public abstract List getIndexes( CatalogKey key ); + List getIndexes( CatalogKey key ); /** * Gets a collection of foreign keys for a given {@link Catalog Key}. @@ -234,7 +235,7 @@ public interface LogicalRelSnapshot { * @param key The key for which the collection is returned * @return The collection foreign keys */ - public abstract List getForeignKeys( CatalogKey key ); + List getForeignKeys( CatalogKey key ); /** * Returns all indexes of a table @@ -243,7 +244,7 @@ public interface LogicalRelSnapshot { * @param onlyUnique true if only indexes for unique values are returned. false if all indexes are returned. * @return List of indexes */ - public abstract List getIndexes( long tableId, boolean onlyUnique ); + List getIndexes( long tableId, boolean onlyUnique ); /** * Returns the index with the specified name in the specified table @@ -252,7 +253,7 @@ public interface LogicalRelSnapshot { * @param indexName The name of the index * @return The Index */ - public abstract CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException; + CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException; /** * Checks if there is an index with the specified name in the specified table. @@ -261,7 +262,7 @@ public interface LogicalRelSnapshot { * @param indexName The name to check for * @return true if there is an index with this name, false if not. */ - public abstract boolean checkIfExistsIndex( long tableId, String indexName ); + boolean checkIfExistsIndex( long tableId, String indexName ); /** * Returns the index with the specified id @@ -269,12 +270,13 @@ public interface LogicalRelSnapshot { * @param indexId The id of the index * @return The Index */ - public abstract CatalogIndex getIndex( long indexId ); + CatalogIndex getIndex( long indexId ); LogicalTable getTable( long id ); - boolean checkIfExistsEntity( String newName ); + boolean checkIfExistsEntity( String name ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index 899566c918..77549189a9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -197,4 +197,14 @@ default List getOperatorList() { LogicalEntity getLogicalEntity( long id ); + LogicalRelSnapshot rel(); + + LogicalGraphSnapshot graph(); + + + LogicalDocSnapshot doc(); + + + AllocSnapshot alloc(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index c2af262bf6..13832a76cb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -157,11 +157,11 @@ public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { @Override - public List getTables( @Nullable Pattern name ) { + public List getTables( @javax.annotation.Nullable Pattern namespace, @Nullable Pattern name ) { if ( name == null ) { return tables.values().asList(); } - return tables.values().stream().filter( t -> namespace.caseSensitive ? t.name.matches( name.toRegex() ) : t.name.toLowerCase().matches( (name.toRegex().toLowerCase()) ) ).collect( Collectors.toList() ); + return tables.values().stream().filter( t -> this.namespace.caseSensitive ? t.name.matches( name.toRegex() ) : t.name.toLowerCase().matches( (name.toRegex().toLowerCase()) ) ).collect( Collectors.toList() ); } @@ -192,7 +192,7 @@ public List getColumns( long tableId ) { @Override public List getColumns( @Nullable Pattern tableName, @Nullable Pattern columnName ) { - List tables = getTables( tableName ); + List tables = getTables( null, tableName ); if ( columnName == null ) { return tables.stream().flatMap( t -> tableColumns.get( t.id ).stream() ).collect( Collectors.toList() ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index bf21eef1c6..8747a7603e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -47,9 +47,9 @@ @Value public class SnapshotImpl implements Snapshot { - ImmutableMap relationals; - ImmutableMap documents; - ImmutableMap graphs; + LogicalRelSnapshot rel; + LogicalDocSnapshot doc; + LogicalGraphSnapshot graph; AllocSnapshot alloc; PhysicalSnapshot physical; @Getter @@ -69,11 +69,11 @@ public class SnapshotImpl implements Snapshot { ImmutableMap namespaceNames; - public SnapshotImpl( long id, Catalog catalog, Map namespaces, Map relationals, Map documents, Map graphs, AllocSnapshot alloc, PhysicalSnapshot physical ) { + public SnapshotImpl( long id, Catalog catalog, Map namespaces, LogicalRelSnapshot rel, LogicalDocSnapshot doc, LogicalGraphSnapshot graph, AllocSnapshot alloc, PhysicalSnapshot physical ) { this.id = id; - this.relationals = ImmutableMap.copyOf( relationals ); - this.documents = ImmutableMap.copyOf( documents ); - this.graphs = ImmutableMap.copyOf( graphs ); + this.rel = rel; + this.doc = doc; + this.graph = graph; this.namespaces = ImmutableMap.copyOf( namespaces ); @@ -248,4 +248,28 @@ public LogicalEntity getLogicalEntity( long id ) { } + @Override + public LogicalRelSnapshot rel() { + return rel; + } + + + @Override + public LogicalGraphSnapshot graph() { + return graph; + } + + + @Override + public LogicalDocSnapshot doc() { + return doc; + } + + + @Override + public AllocSnapshot alloc() { + return alloc; + } + + } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index c178e49365..2f327b544a 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -2919,7 +2919,7 @@ public void dropNamespace( String schemaName, boolean ifExists, Statement statem } // Drop all tables in this schema - List catalogEntities = catalog.getSnapshot().getRelSnapshot( logicalNamespace.id ).getTables( null ); + List catalogEntities = catalog.getSnapshot().getRelSnapshot( logicalNamespace.id ).getTables( , null ); for ( LogicalTable catalogTable : catalogEntities ) { dropTable( catalogTable, statement ); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index b09e6a8231..ce1d0d41fd 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -650,7 +650,7 @@ private boolean testConstraintsValid() { .getSnapshot() .getNamespaces( null ) .stream() - .flatMap( n -> Catalog.getInstance().getSnapshot().getRelSnapshot( n.id ).getTables( null ).stream() ) + .flatMap( n -> Catalog.getInstance().getSnapshot().getRelSnapshot( n.id ).getTables( , null ).stream() ) .filter( t -> t.entityType == EntityType.ENTITY && t.getNamespaceType() == NamespaceType.RELATIONAL ) .collect( Collectors.toList() ); Transaction transaction = this.manager.startTransaction( Catalog.defaultUserId, false, "ConstraintEnforcement" ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 5c7ad4b81e..7fb833996d 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -445,7 +445,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = snapshot.getRelSnapshot( namespace.id ).getTables( null ); + List tables = snapshot.getRelSnapshot( namespace.id ).getTables( , null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, null ) ) ) .collect( Collectors.toList() ); diff --git a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java index 0a795c700e..6275230a91 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java @@ -532,9 +532,9 @@ public void rangePartitioningTest() throws SQLException { + "( PARTITION parta VALUES(5,4), " + "PARTITION partb VALUES(10,6))" ); - LogicalTable table = Catalog.snapshot().getTables( null, new Pattern( "rangepartitioning3" ) ).get( 0 ); + LogicalTable table = Catalog.snapshot().rel().getTables( null, new Pattern( "rangepartitioning3" ) ).get( 0 ); - List catalogPartitions = Catalog.snapshot().getPartitionsByTable( table.id ); + List catalogPartitions = Catalog.snapshot().alloc().getPartitionsByTable( table.id ); Assert.assertEquals( new ArrayList<>( Arrays.asList( "4", "5" ) ) , catalogPartitions.get( 0 ).partitionQualifiers ); @@ -642,31 +642,31 @@ public void partitionPlacementTest() throws SQLException { + "WITH (foo, bar, foobar, barfoo) " ); try { - LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "physicalpartitiontest" ) ).get( 0 ); + LogicalTable table = Catalog.snapshot().rel().getTables( null, new Pattern( "physicalpartitiontest" ) ).get( 0 ); // Check if sufficient PartitionPlacements have been created // Check if initially as many partitionPlacements are created as requested - Assert.assertEquals( partitionsToCreate, Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ).size() ); + Assert.assertEquals( partitionsToCreate, Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ).size() ); // ADD adapter statement.executeUpdate( "ALTER ADAPTERS ADD \"anotherstore\" USING 'Hsqldb' AS 'Store'" + " WITH '{maxConnections:\"25\",path:., trxControlMode:locks,trxIsolationLevel:read_committed,type:Memory,tableType:Memory,mode:embedded}'" ); - List debugPlacements = Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ); + List debugPlacements = Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ); // ADD FullPlacement statement.executeUpdate( "ALTER TABLE \"physicalPartitionTest\" ADD PLACEMENT ON STORE \"anotherstore\"" ); - Assert.assertEquals( partitionsToCreate * 2, Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ).size() ); - debugPlacements = Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ); + Assert.assertEquals( partitionsToCreate * 2, Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ).size() ); + debugPlacements = Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ); // Modify partitions on second store statement.executeUpdate( "ALTER TABLE \"physicalPartitionTest\" MODIFY PARTITIONS (\"foo\") ON STORE anotherstore" ); - Assert.assertEquals( partitionsToCreate + 1, Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ).size() ); - debugPlacements = Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ); + Assert.assertEquals( partitionsToCreate + 1, Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ).size() ); + debugPlacements = Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ); // After MERGE should only hold one partition statement.executeUpdate( "ALTER TABLE \"physicalPartitionTest\" MERGE PARTITIONS" ); - Assert.assertEquals( 2, Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ).size() ); - debugPlacements = Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ).size() ); + debugPlacements = Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ); // DROP STORE and verify number of partition Placements statement.executeUpdate( "ALTER TABLE \"physicalPartitionTest\" DROP PLACEMENT ON STORE \"anotherstore\"" ); - Assert.assertEquals( 1, Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ).size() ); + Assert.assertEquals( 1, Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ).size() ); } finally { // Drop tables and stores @@ -703,7 +703,7 @@ public void temperaturePartitionTest() throws SQLException { + " USING FREQUENCY write INTERVAL 10 minutes WITH 20 HASH PARTITIONS" ); try { - LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "temperaturetest" ) ).get( 0 ); + LogicalTable table = Catalog.snapshot().rel().getTables( null, new Pattern( "temperaturetest" ) ).get( 0 ); // Check if partition properties are correctly set and parsed Assert.assertEquals( 600, ((TemperaturePartitionProperty) table.partitionProperty).getFrequencyInterval() ); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 233eba07c6..2717de3a48 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -93,7 +93,7 @@ public List> getSchemaTree() { List schemas = snapshot.getNamespaces( null ); for ( LogicalNamespace schema : schemas ) { List tables = new ArrayList<>(); - List childTables = snapshot.getRelSnapshot( schema.id ).getTables( null ); + List childTables = snapshot.getRelSnapshot( schema.id ).getTables( , null ); for ( LogicalTable childTable : childTables ) { List table = new ArrayList<>(); List columns = snapshot.getRelSnapshot( schema.id ).getColumns( childTable.id ); @@ -121,7 +121,7 @@ public List getAllColumns() { return snapshot.getNamespaces( null ) .stream() .filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> snapshot.getRelSnapshot( n.id ).getColumns( t.id ).stream() ) ) + .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( , null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> snapshot.getRelSnapshot( n.id ).getColumns( t.id ).stream() ) ) .map( QueryResult::fromCatalogColumn ) .collect( Collectors.toList() ); } @@ -135,7 +135,7 @@ public List getAllColumns() { public List getAllTable() { Snapshot snapshot = Catalog.getInstance().getSnapshot(); return snapshot.getNamespaces( null ).stream().filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( null ).stream().filter( t -> t.entityType != EntityType.VIEW ) ).collect( Collectors.toList() ); + .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( , null ).stream().filter( t -> t.entityType != EntityType.VIEW ) ).collect( Collectors.toList() ); } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index de29054fc7..27e6becc0c 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -308,7 +308,7 @@ private List getLogicalTables( Pattern schemaPattern, Pattern tabl return namespaces .stream() .flatMap( - n -> catalog.getSnapshot().getRelSnapshot( n.id ).getTables( tablePattern ).stream() ).collect( Collectors.toList() ); + n -> catalog.getSnapshot().getRelSnapshot( n.id ).getTables( , tablePattern ).stream() ).collect( Collectors.toList() ); } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index 492e4f316c..fbbe7a495b 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -56,7 +56,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { LogicalNamespace schema = context.getSnapshot().getNamespace( database ); - List tables = context.getSnapshot().getRelSnapshot( schema.id ).getTables( null ); + List tables = context.getSnapshot().getRelSnapshot( schema.id ).getTables( , null ); if ( dropTarget ) { Optional newTable = tables.stream() diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 293e6ba63e..77704bffa2 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -382,7 +382,7 @@ void getSchemaTree( final Context ctx ) { ArrayList tableTree = new ArrayList<>(); ArrayList viewTree = new ArrayList<>(); ArrayList collectionTree = new ArrayList<>(); - List tables = catalog.getSnapshot().getRelSnapshot( schema.id ).getTables( null ); + List tables = catalog.getSnapshot().getRelSnapshot( schema.id ).getTables( , null ); for ( LogicalTable table : tables ) { String icon = "fa fa-table"; if ( table.entityType == EntityType.SOURCE ) { @@ -469,7 +469,7 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTables( null ); + List tables = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTables( , null ); ArrayList result = new ArrayList<>(); for ( LogicalTable t : tables ) { result.add( new DbTable( t.name, namespaceName, t.modifiable, t.entityType ) ); @@ -2515,7 +2515,7 @@ void getUml( final Context ctx ) { List catalogEntities = Catalog.getInstance().getSnapshot().getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( request.schema ) ) .stream() .filter( s -> s.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( s -> catalog.getSnapshot().getRelSnapshot( s.id ).getTables( null ).stream() ).collect( Collectors.toList() ); + .flatMap( s -> catalog.getSnapshot().getRelSnapshot( s.id ).getTables( , null ).stream() ).collect( Collectors.toList() ); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { From dddd225bf22d1d763c1b4c676ffa54d56e5d3565 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 5 Apr 2023 23:48:19 +0200 Subject: [PATCH 053/436] adjusted interface of snapshot --- .../org/polypheny/db/adapter/Adapter.java | 4 +- .../polypheny/db/adapter/AdapterManager.java | 2 +- .../db/adapter/index/IndexManager.java | 6 +- .../common/LogicalConstraintEnforcer.java | 4 +- .../entity/allocation/AllocationTable.java | 2 +- .../catalog/entity/logical/LogicalGraph.java | 2 +- .../catalog/entity/logical/LogicalTable.java | 4 +- .../db/catalog/snapshot/Snapshot.java | 19 +--- .../snapshot/impl/LogicalDocSnapshotImpl.java | 3 +- .../impl/LogicalGraphSnapshotImpl.java | 3 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 31 +++--- .../snapshot/impl/SnapshotBuilder.java | 24 ++--- .../catalog/snapshot/impl/SnapshotImpl.java | 98 ++++--------------- .../processing/LogicalAlgAnalyzeShuttle.java | 2 +- .../java/org/polypheny/db/schema/Schemas.java | 4 +- .../org/polypheny/db/tools/AlgBuilder.java | 4 +- 16 files changed, 68 insertions(+), 144 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 539d05ae84..d452ab27ad 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -488,14 +488,14 @@ public void addInformationPhysicalNames() { Snapshot snapshot = Catalog.getInstance().getSnapshot(); group.setRefreshFunction( () -> { physicalColumnNames.reset(); - List physicalsOnAdapter = snapshot.getPhysicalSnapshot().getPhysicalsOnAdapter( adapterId ); + List physicalsOnAdapter = snapshot.physical().getPhysicalsOnAdapter( adapterId ); for ( PhysicalEntity entity : physicalsOnAdapter ) { if ( entity.namespaceType != NamespaceType.RELATIONAL ) { continue; } PhysicalTable physicalTable = (PhysicalTable) entity; - LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( physicalTable.namespaceId ); + LogicalRelSnapshot relSnapshot = snapshot.rel(); for ( Entry entry : physicalTable.columns.entrySet() ) { physicalColumnNames.addRow( diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index 22358ea8df..eb49520408 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -244,7 +244,7 @@ public void removeAdapter( long adapterId ) { CatalogAdapter catalogAdapter = Catalog.getInstance().getSnapshot().getAdapter( adapterId ); // Check if the store has any placements - List placements = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAllocationsOnAdapter( catalogAdapter.id ); + List placements = Catalog.getInstance().getSnapshot().alloc().getAllocationsOnAdapter( catalogAdapter.id ); if ( placements.size() != 0 ) { throw new RuntimeException( "There is still data placed on this data store" ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index 7ed0ffc1b9..98224a7f68 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -149,7 +149,7 @@ public void initialize( final TransactionManager transactionManager ) { public void restoreIndexes() throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, TransactionException { - for ( final CatalogIndex index : Catalog.getInstance().getSnapshot().getIndexes() ) { + for ( final CatalogIndex index : Catalog.getInstance().getSnapshot().rel().getIndexes() ) { if ( index.location == 0 ) { addIndex( index ); } @@ -173,8 +173,8 @@ protected void addIndex( final long id, final String name, final CatalogKey key, .filter( it -> it.canProvide( method, unique, persistent ) ) .findFirst() .orElseThrow( IllegalArgumentException::new ); - final LogicalTable table = statement.getTransaction().getSnapshot().getRelSnapshot( key.namespaceId ).getTable( key.tableId ); - final CatalogPrimaryKey pk = statement.getTransaction().getSnapshot().getRelSnapshot( key.namespaceId ).getPrimaryKey( table.primaryKey ); + final LogicalTable table = statement.getTransaction().getSnapshot().rel().getTable( key.tableId ); + final CatalogPrimaryKey pk = statement.getTransaction().getSnapshot().rel().getPrimaryKey( table.primaryKey ); final Index index = factory.create( id, name, diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index a37e661994..aa28c6cdad 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -94,7 +94,7 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = modify.getCluster().getRexBuilder(); - LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( table.namespaceId ); + LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().rel(); EnforcementTime enforcementTime = EnforcementTime.ON_QUERY; final List constraints = new ArrayList<>( snapshot.getConstraints( table.id ) ) @@ -223,7 +223,7 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = builder.getRexBuilder(); - LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( table.namespaceId ); + LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().rel(); final List constraints = snapshot .getConstraints( table.id ) diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 99a656b1ac..fec9ade6ea 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -72,7 +72,7 @@ public Map getColumnNames() { public Map getColumns() { - return Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumns( logicalId ).stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); + return Catalog.getInstance().getSnapshot().rel().getColumns( logicalId ).stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index 13df4ee8e8..8a38802bff 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -74,7 +74,7 @@ public Expression asExpression() { public List getPlacements() { - return Catalog.getInstance().getSnapshot().getAllocSnapshot().getGraphPlacements( id ); + return Catalog.getInstance().getSnapshot().alloc().getGraphPlacements( id ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 1eca2295ec..ba22b7256a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -113,7 +113,7 @@ public int compareTo( LogicalTable o ) { public AlgDataType getRowType() { final AlgDataTypeFactory.Builder fieldInfo = AlgDataTypeFactory.DEFAULT.builder(); - for ( LogicalColumn column : Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumns( id ) ) { + for ( LogicalColumn column : Catalog.getInstance().getSnapshot().rel().getColumns( id ) ) { AlgDataType sqlType = column.getAlgDataType( AlgDataTypeFactory.DEFAULT ); fieldInfo.add( column.name, null, sqlType ).nullable( column.nullable ); } @@ -134,7 +134,7 @@ public List getColumnStrategies() { public List getColumns() { - return Catalog.getInstance().getSnapshot().getRelSnapshot( namespaceId ).getColumns( id ); + return Catalog.getInstance().getSnapshot().rel().getColumns( id ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index 77549189a9..decd597d16 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -27,7 +27,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; @@ -47,7 +46,7 @@ public interface Snapshot extends OperatorTable { NameMatcher nameMatcher = NameMatchers.withCaseSensitive( RuntimeConfig.RELATIONAL_CASE_SENSITIVE.getBoolean() ); - long getId(); + long id(); default Expression getSnapshotExpression( long id ) { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getSnapshot", Expressions.constant( id ) ); @@ -179,21 +178,6 @@ default List getOperatorList() { } - LogicalDocSnapshot getDocSnapshot( long namespaceId ); - - LogicalGraphSnapshot getGraphSnapshot( long namespaceId ); - - - LogicalRelSnapshot getRelSnapshot( long namespaceId ); - - - PhysicalSnapshot getPhysicalSnapshot(); - - AllocSnapshot getAllocSnapshot(); - - - List getIndexes(); - LogicalEntity getLogicalEntity( long id ); @@ -207,4 +191,5 @@ default List getOperatorList() { AllocSnapshot alloc(); + PhysicalSnapshot physical(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java index 04a5d23430..80e74be7ac 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.snapshot.impl; import java.util.List; +import java.util.Map; import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.Pattern; @@ -24,7 +25,7 @@ public class LogicalDocSnapshotImpl implements LogicalDocSnapshot { - public LogicalDocSnapshotImpl( LogicalDocumentCatalog value ) { + public LogicalDocSnapshotImpl( Map value ) { } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java index d14f46e765..d3f4ecd147 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.snapshot.impl; import java.util.List; +import java.util.Map; import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.Pattern; @@ -24,7 +25,7 @@ public class LogicalGraphSnapshotImpl implements LogicalGraphSnapshot { - public LogicalGraphSnapshotImpl( LogicalGraphCatalog value ) { + public LogicalGraphSnapshotImpl( Map value ) { } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index 13832a76cb..f0e7c8fb48 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -24,7 +24,6 @@ import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.Value; -import org.jetbrains.annotations.Nullable; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -46,7 +45,7 @@ @Value public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { - LogicalNamespace namespace; + ImmutableMap namespaces; ImmutableMap tables; @@ -79,14 +78,14 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap> tableForeignKeys; - public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { - namespace = catalog.getLogicalNamespace(); + public LogicalRelSnapshotImpl( Map catalogs ) { + namespaces = ImmutableMap.copyOf( catalogs.values().stream().map( LogicalRelationalCatalog::getLogicalNamespace ).collect( Collectors.toMap( n -> n.id, n -> n ) ) ); - tables = ImmutableMap.copyOf( catalog.getTables() ); - tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> namespace.caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase(), Entry::getValue ) ) ); + tables = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getTables().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); + tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> namespaces.get( e.getValue().namespaceId ).caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase(), Entry::getValue ) ) ); - columns = ImmutableMap.copyOf( catalog.getColumns() ); - columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespace.caseSensitive ? Pair.of( e.getValue().tableId, e.getValue().name ) : Pair.of( e.getValue().tableId, e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); + columns = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getColumns().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); + columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespaces.get( e.getValue().namespaceId ).caseSensitive ? Pair.of( e.getValue().tableId, e.getValue().name ) : Pair.of( e.getValue().tableId, e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); //// tables @@ -105,7 +104,7 @@ public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { //// KEYS - keys = ImmutableMap.copyOf( catalog.getKeys() ); + keys = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getKeys().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); Map> tableKeys = new HashMap<>(); keys.forEach( ( k, v ) -> { @@ -117,7 +116,7 @@ public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { this.tableKeys = ImmutableMap.copyOf( tableKeys ); - this.index = ImmutableMap.copyOf( catalog.getIndexes() ); + this.index = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getIndexes().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); Map> keyToIndexes = new HashMap<>(); this.index.forEach( ( k, v ) -> { @@ -143,7 +142,7 @@ public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { //// CONSTRAINTS - this.constraints = ImmutableMap.copyOf( catalog.getConstraints() ); + this.constraints = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getConstraints().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); HashMap> tableConstraints = new HashMap<>(); constraints.forEach( ( k, v ) -> { @@ -157,11 +156,11 @@ public LogicalRelSnapshotImpl( LogicalRelationalCatalog catalog ) { @Override - public List getTables( @javax.annotation.Nullable Pattern namespace, @Nullable Pattern name ) { + public List getTables( @javax.annotation.Nullable Pattern namespace, Pattern name ) { if ( name == null ) { return tables.values().asList(); } - return tables.values().stream().filter( t -> this.namespace.caseSensitive ? t.name.matches( name.toRegex() ) : t.name.toLowerCase().matches( (name.toRegex().toLowerCase()) ) ).collect( Collectors.toList() ); + return tables.values().stream().filter( t -> namespaces.get( t.namespaceId ).caseSensitive ? t.name.matches( name.toRegex() ) : t.name.toLowerCase().matches( (name.toRegex().toLowerCase()) ) ).collect( Collectors.toList() ); } @@ -171,7 +170,6 @@ public LogicalTable getTable( long tableId ) { } - @Override public List getKeys() { return keys.values().asList(); @@ -191,7 +189,7 @@ public List getColumns( long tableId ) { @Override - public List getColumns( @Nullable Pattern tableName, @Nullable Pattern columnName ) { + public List getColumns( Pattern tableName, Pattern columnName ) { List tables = getTables( null, tableName ); if ( columnName == null ) { return tables.stream().flatMap( t -> tableColumns.get( t.id ).stream() ).collect( Collectors.toList() ); @@ -200,14 +198,13 @@ public List getColumns( @Nullable Pattern tableName, @Nullable Pa return tables .stream() .flatMap( t -> tableColumns.get( t.id ).stream().filter( - c -> namespace.caseSensitive + c -> namespaces.get( t.namespaceId ).caseSensitive ? c.name.matches( columnName.toRegex() ) : c.name.toLowerCase().matches( columnName.toLowerCase().toRegex() ) ) ).collect( Collectors.toList() ); } - @Override public LogicalColumn getColumn( long tableId, String columnName ) { return tableIdColumnNameColumn.get( Pair.of( tableId, columnName ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java index 12eb1897f3..2e9bb66d03 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java @@ -38,9 +38,9 @@ public class SnapshotBuilder { public static Snapshot createSnapshot( long id, Catalog catalog, Map logicalCatalogs, Map allocationCatalogs, Map physicalCatalogs ) { - Map rels = buildRelSnapshots( logicalCatalogs ); - Map docs = buildDocSnapshots( logicalCatalogs ); - Map graphs = buildGraphSnapshots( logicalCatalogs ); + LogicalRelSnapshot rels = buildRelSnapshots( logicalCatalogs ); + LogicalDocSnapshot docs = buildDocSnapshots( logicalCatalogs ); + LogicalGraphSnapshot graphs = buildGraphSnapshots( logicalCatalogs ); AllocSnapshot alloc = buildAlloc( allocationCatalogs ); PhysicalSnapshot physical = buildPhysical( physicalCatalogs ); @@ -60,30 +60,30 @@ private static AllocSnapshot buildAlloc( Map allocation } - private static Map buildGraphSnapshots( Map logicalCatalogs ) { - return logicalCatalogs + private static LogicalGraphSnapshot buildGraphSnapshots( Map logicalCatalogs ) { + return new LogicalGraphSnapshotImpl( logicalCatalogs .entrySet() .stream() .filter( e -> e.getValue().getLogicalNamespace().namespaceType == NamespaceType.GRAPH ) - .collect( Collectors.toMap( Entry::getKey, e -> new LogicalGraphSnapshotImpl( (LogicalGraphCatalog) e.getValue() ) ) ); + .collect( Collectors.toMap( Entry::getKey, e -> (LogicalGraphCatalog) e.getValue() ) ) ); } - private static Map buildDocSnapshots( Map logicalCatalogs ) { - return logicalCatalogs + private static LogicalDocSnapshot buildDocSnapshots( Map logicalCatalogs ) { + return new LogicalDocSnapshotImpl( logicalCatalogs .entrySet() .stream() .filter( e -> e.getValue().getLogicalNamespace().namespaceType == NamespaceType.DOCUMENT ) - .collect( Collectors.toMap( Entry::getKey, e -> new LogicalDocSnapshotImpl( (LogicalDocumentCatalog) e.getValue() ) ) ); + .collect( Collectors.toMap( Entry::getKey, e -> (LogicalDocumentCatalog) e.getValue() ) ) ); } - private static Map buildRelSnapshots( Map logicalCatalogs ) { - return logicalCatalogs + private static LogicalRelSnapshot buildRelSnapshots( Map logicalCatalogs ) { + return new LogicalRelSnapshotImpl( logicalCatalogs .entrySet() .stream() .filter( e -> e.getValue().getLogicalNamespace().namespaceType == NamespaceType.RELATIONAL ) - .collect( Collectors.toMap( Entry::getKey, e -> new LogicalRelSnapshotImpl( (LogicalRelationalCatalog) e.getValue() ) ) ); + .collect( Collectors.toMap( Entry::getKey, e -> (LogicalRelationalCatalog) e.getValue() ) ) ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index 8747a7603e..f90187ef27 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -24,10 +24,10 @@ import lombok.Getter; import lombok.NonNull; import lombok.Value; +import lombok.experimental.Accessors; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; @@ -45,8 +45,10 @@ import org.polypheny.db.catalog.snapshot.Snapshot; @Value +@Accessors(fluent = true) public class SnapshotImpl implements Snapshot { + LogicalRelSnapshot rel; LogicalDocSnapshot doc; LogicalGraphSnapshot graph; @@ -180,95 +182,33 @@ public List getTablesForPeriodicProcessing() { @Override public CatalogEntity getEntity( long id ) { - return relationals.values().stream().map( r -> r.getTable( id ) ).findFirst().orElse( null ); - } - - - @Override - public LogicalDocSnapshot getDocSnapshot( long namespaceId ) { - return documents.get( namespaceId ); - } - - - @Override - public LogicalGraphSnapshot getGraphSnapshot( long namespaceId ) { - return graphs.get( namespaceId ); - } - - - @Override - public LogicalRelSnapshot getRelSnapshot( long namespaceId ) { - return relationals.get( namespaceId ); - } - - - @Override - public PhysicalSnapshot getPhysicalSnapshot() { - return physical; - } - - - @Override - public AllocSnapshot getAllocSnapshot() { - return alloc; - } + CatalogEntity entity = rel.getTable( id ); + if ( entity != null ) { + return entity; + } + entity = doc.getCollection( id ); + if ( entity != null ) { + return entity; + } - @Override - public List getIndexes() { - return relationals.values().stream().flatMap( r -> r.getIndexes().stream() ).collect( Collectors.toList() ); + return graph.getGraph( id ); } @Override public LogicalEntity getLogicalEntity( long id ) { - LogicalEntity entity; - for ( LogicalRelSnapshot value : relationals.values() ) { - entity = value.getTable( id ); - if ( entity != null ) { - return entity; - } + LogicalEntity entity = rel.getTable( id ); + if ( entity != null ) { + return entity; } - for ( LogicalDocSnapshot value : documents.values() ) { - entity = value.getCollection( id ); - if ( entity != null ) { - return entity; - } + entity = doc.getCollection( id ); + if ( entity != null ) { + return entity; } - for ( LogicalGraphSnapshot value : graphs.values() ) { - entity = value.getGraph( id ); - if ( entity != null ) { - return entity; - } - } - - return null; - } - - - @Override - public LogicalRelSnapshot rel() { - return rel; - } - - - @Override - public LogicalGraphSnapshot graph() { - return graph; - } - - - @Override - public LogicalDocSnapshot doc() { - return doc; - } - - - @Override - public AllocSnapshot alloc() { - return alloc; + return graph.getLogicalGraph( id ); } diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 9100dc7de1..37f03341bf 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -398,7 +398,7 @@ private void getAvailableColumns( AlgNode scan ) { this.entityId.add( scan.getEntity().id ); final LogicalTable table = scan.getEntity().unwrap( LogicalTable.class ); if ( table != null ) { - final List columns = Catalog.getInstance().getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( table.id ); + final List columns = Catalog.getInstance().getSnapshot().rel().getColumns( table.id ); final List names = columns.stream().map( c -> c.name ).collect( Collectors.toList() ); final String baseName = Catalog.getInstance().getSnapshot().getNamespace( table.namespaceId ) + "." + table.name + "."; diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index bf1ba6a129..5aaed086e9 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -105,7 +105,7 @@ private static boolean canConvert( AlgDataType fromType, AlgDataType toType ) { * Returns the expression for a schema. */ public static Expression expression( Snapshot snapshot ) { - return snapshot.getSnapshotExpression( snapshot.getId() ); + return snapshot.getSnapshotExpression( snapshot.id() ); } @@ -206,7 +206,7 @@ public static Queryable queryable( DataContext root, Class clazz, Iter */ public static Queryable queryable( DataContext root, Snapshot snapshot, Class clazz, String tableName ) { //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); - LogicalTable table = snapshot.getRelSnapshot( 0 ).getTable( tableName ); + LogicalTable table = snapshot.rel().getTable( tableName ); return table.unwrap( QueryableEntity.class ).asQueryable( root, snapshot, table.id ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index fc55c0c00c..84fd52e769 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -1330,7 +1330,7 @@ public RexNode patternExclude( RexNode node ) { public AlgBuilder scan( List tableNames ) { final List names = ImmutableList.copyOf( tableNames ); LogicalNamespace namespace = snapshot.getNamespace( tableNames.get( 0 ) ); - final LogicalTable entity = snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 1 ) ); + final LogicalTable entity = snapshot.rel().getTable( names.get( 1 ) ); if ( entity == null ) { throw RESOURCE.tableNotFound( String.join( ".", names ) ).ex(); } @@ -1376,7 +1376,7 @@ public AlgBuilder documentProject( List projects, List Date: Sun, 9 Apr 2023 14:27:02 +0200 Subject: [PATCH 054/436] simplifing create and drop methods for table --- .../org/polypheny/db/adapter/DataStore.java | 3 +- .../org/polypheny/db/catalog/Catalog.java | 11 +- .../catalogs/AllocationRelationalCatalog.java | 12 +- .../catalogs/LogicalRelationalCatalog.java | 2 +- .../db/catalog/catalogs/PhysicalCatalog.java | 4 +- .../entity/CatalogColumnPlacement.java | 18 +- .../entity/allocation/AllocationTable.java | 9 +- .../catalog/snapshot/LogicalDocSnapshot.java | 6 +- .../catalog/snapshot/LogicalRelSnapshot.java | 11 +- .../db/catalog/snapshot/Snapshot.java | 3 +- .../snapshot/impl/LogicalDocSnapshotImpl.java | 8 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 27 +- .../catalog/snapshot/impl/SnapshotImpl.java | 3 +- .../polypheny/db/iface/QueryInterface.java | 4 +- .../db/iface/QueryInterfaceManager.java | 6 +- .../db/plugins/PolyPluginManager.java | 2 + .../processing/LogicalAlgAnalyzeShuttle.java | 2 +- .../java/org/polypheny/db/schema/Schemas.java | 2 +- .../org/polypheny/db/tools/AlgBuilder.java | 3 +- .../org/polypheny/db/catalog/MockCatalog.java | 6 + .../db/docker/MockCatalogDocker.java | 2 - .../org/polypheny/db/ddl/DdlManagerImpl.java | 508 ++++++++++-------- .../partition/AbstractPartitionManager.java | 14 +- .../db/partition/FrequencyMapImpl.java | 20 +- .../db/partition/HashPartitionManager.java | 2 +- .../db/partition/ListPartitionManager.java | 2 +- .../db/partition/RangePartitionManager.java | 2 +- .../TemperatureAwarePartitionManager.java | 6 +- .../db/processing/AbstractQueryProcessor.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 6 +- .../db/processing/DataMigratorImpl.java | 52 +- .../db/routing/UiRoutingPageUtil.java | 12 +- .../db/routing/routers/AbstractDqlRouter.java | 4 +- .../db/routing/routers/BaseRouter.java | 34 +- .../db/routing/routers/CachedPlanRouter.java | 4 +- .../db/routing/routers/DmlRouterImpl.java | 84 +-- .../routers/FullPlacementQueryRouter.java | 10 +- .../db/routing/routers/IcarusRouter.java | 4 +- .../db/routing/routers/SimpleRouter.java | 4 +- .../CreateAllPlacementStrategy.java | 4 +- .../CreateSinglePlacementStrategy.java | 4 +- .../db/transaction/EntityAccessMap.java | 4 +- .../db/view/MaterializedViewManagerImpl.java | 24 +- .../java/org/polypheny/db/TestHelper.java | 37 ++ .../java/org/polypheny/db/cypher/DdlTest.java | 20 +- .../db/misc/HorizontalPartitioningTest.java | 70 +-- .../db/misc/VerticalPartitioningTest.java | 67 ++- .../java/org/polypheny/db/mql/DdlTest.java | 43 +- .../db/sql/clause/SimpleSqlTest.java | 41 +- .../db/statistics/StatisticsTest.java | 11 +- .../db/monitoring/statistics/QueryResult.java | 2 +- .../statistics/StatisticColumn.java | 2 +- .../statistics/StatisticQueryProcessor.java | 19 +- .../monitoring/statistics/StatisticTable.java | 2 +- .../statistics/StatisticsManagerImpl.java | 16 +- .../db/avatica/AvaticaInterfacePlugin.java | 2 +- .../org/polypheny/db/avatica/DbmsMeta.java | 12 +- .../org/polypheny/db/cql/ColumnIndex.java | 2 +- .../java/org/polypheny/db/cql/Combiner.java | 4 +- .../polypheny/db/cql/Cql2RelConverter.java | 2 +- .../java/org/polypheny/db/cql/TableIndex.java | 3 +- .../polypheny/db/adapter/csv/CsvSchema.java | 12 +- .../polypheny/db/adapter/csv/CsvSource.java | 2 +- .../cypher2alg/CypherToAlgConverter.java | 2 +- .../db/cypher/ddl/CypherAddPlacement.java | 2 +- .../ExploreQueryProcessor.java | 2 +- .../db/hsqldb/stores/HsqldbStore.java | 6 +- .../db/http/HttpInterfacePlugin.java | 4 +- .../jdbc/sources/AbstractJdbcSource.java | 4 +- .../jdbc/stores/AbstractJdbcStore.java | 21 +- .../db/languages/MqlProcessorImpl.java | 2 +- .../db/languages/mql/MqlAddPlacement.java | 4 +- .../db/languages/mql/MqlDeletePlacement.java | 4 +- .../polypheny/db/languages/mql/MqlDrop.java | 2 +- .../db/languages/mql/MqlRenameCollection.java | 3 +- .../languages/mql2alg/MqlToAlgConverter.java | 2 +- .../org/polypheny/db/catalog/PolyCatalog.java | 84 ++- .../allocation/PolyAllocDocCatalog.java | 1 + .../allocation/PolyAllocGraphCatalog.java | 1 + .../allocation/PolyAllocRelCatalog.java | 21 +- .../db/catalog/logical/DocumentCatalog.java | 1 + .../db/catalog/logical/GraphCatalog.java | 1 + .../db/catalog/logical/RelationalCatalog.java | 7 +- .../catalog/physical/PolyPhysicalCatalog.java | 8 +- .../polypheny/db/restapi/RequestParser.java | 9 +- .../java/org/polypheny/db/restapi/Rest.java | 10 +- .../db/restapi/RestInterfacePlugin.java | 4 +- .../polypheny/db/sql/SqlProcessorImpl.java | 10 +- .../org/polypheny/db/sql/language/SqlDdl.java | 8 +- .../polypheny/db/sql/language/SqlUtil.java | 4 +- .../altertable/SqlAlterTableAddColumn.java | 2 +- .../SqlAlterTableAddPartitions.java | 2 +- .../altertable/SqlAlterTableAddPlacement.java | 2 +- .../SqlAlterTableMergePartitions.java | 2 +- .../SqlAlterTableModifyPartitions.java | 14 +- .../SqlAlterTableModifyPlacement.java | 2 +- .../db/sql/language/validate/EmptyScope.java | 18 +- .../validate/IdentifierNamespace.java | 3 +- .../language/validate/SqlValidatorImpl.java | 2 +- .../db/sql/language/validate/WithScope.java | 5 +- .../db/sql/web/SchemaToJsonMapper.java | 4 +- .../java/org/polypheny/db/webui/Crud.java | 107 ++-- .../org/polypheny/db/webui/HttpServer.java | 2 +- .../org/polypheny/db/webui/WebSocket.java | 4 +- .../polypheny/db/webui/crud/LanguageCrud.java | 19 +- .../db/webui/crud/StatisticCrud.java | 8 +- .../polypheny/db/webui/models/Placement.java | 4 - .../models/requests/BatchUpdateRequest.java | 5 +- 108 files changed, 958 insertions(+), 793 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index f7791fe442..9b65df1f5d 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -37,6 +37,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.prepare.Context; @@ -65,7 +66,7 @@ public List getSupportedSchemaType() { } - public abstract PhysicalTable createPhysicalTable( Context context, LogicalTable combinedTable, AllocationTable allocationTable ); + public abstract List createPhysicalTable( Context context, LogicalTable combinedTable, AllocationTable allocationTable ); public abstract void dropTable( Context context, LogicalTable combinedTable, List partitionIds ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 79d47c5043..b2649571a6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -45,7 +45,8 @@ public abstract class Catalog implements ExtensionPoint { public static Adapter defaultStore; public static Adapter defaultSource; public static long defaultUserId = 0; - public static long defaultDatabaseId = 0; + public static String defaultNamespaceName = "public"; + public static long defaultNamespaceId = 0; public static boolean resetDocker; protected final PropertyChangeSupport listeners = new PropertyChangeSupport( this ); public boolean isPersistent = false; @@ -74,9 +75,10 @@ public static Catalog getInstance() { } - public abstract void init(); + public abstract void updateSnapshot(); + public abstract void commit() throws NoTablePrimaryKeyException; public abstract void rollback(); @@ -239,6 +241,11 @@ protected final boolean isValidIdentifier( final String str ) { public abstract Snapshot getSnapshot(); + public Snapshot getSnapshot( long id ) { + return snapshot(); + } + + public static Snapshot snapshot() { return INSTANCE.getSnapshot(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index feb3fe51ab..c100af5159 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -35,12 +35,10 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param allocationId * @param columnId The id of the column to be placed * @param placementType The type of placement - * @param physicalSchemaName The schema name on the adapter - * @param physicalTableName The table name on the adapter - * @param physicalColumnName The column name on the adapter * @param position + * @return */ - void addColumnPlacement( long allocationId, long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ); + AllocationTable addColumnPlacement( long allocationId, long columnId, PlacementType placementType, int position ); /** * Deletes all dependent column placements @@ -223,7 +221,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placement from * @return */ - long addDataPlacement( long adapterId, long tableId ); + AllocationTable createAlloctionTable( long adapterId, long tableId ); /** * Adds a new DataPlacement for a given table on a specific store. @@ -251,7 +249,9 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param adapterId adapter where placement should be removed from * @param tableId table to retrieve the placement from */ - void removeDataPlacement( long adapterId, long tableId ); + void deleteAllocation( long adapterId, long tableId ); + + void deleteAllocation( long allocId ); /** * Adds a single dataPlacement on a store for a specific table diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 4f65bbc313..9191c471fa 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -47,7 +47,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param modifiable Whether the content of the table can be modified * @return The id of the inserted table */ - public abstract long addTable( String name, EntityType entityType, boolean modifiable ); + public abstract LogicalTable addTable( String name, EntityType entityType, boolean modifiable ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index fd568aed8d..70ebf9e97d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -27,6 +27,8 @@ public interface PhysicalCatalog { ConcurrentHashMap getPhysicals(); - void addEntities( List physicals ); + void addEntities( List physicals ); + + void deleteEntity( long id ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java index 11fc0e8566..bb5099e712 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java @@ -43,11 +43,7 @@ public class CatalogColumnPlacement implements CatalogObject { @Serialize public PlacementType placementType; @Serialize - public long physicalPosition; - @Serialize - public String physicalSchemaName; - @Serialize - public String physicalColumnName; + public long position; public CatalogColumnPlacement( @@ -56,17 +52,13 @@ public CatalogColumnPlacement( @Deserialize("columnId") final long columnId, @Deserialize("adapterId") final long adapterId, @Deserialize("placementType") @NonNull final PlacementType placementType, - @Deserialize("physicalSchemaName") final String physicalSchemaName, - @Deserialize("physicalColumnName") final String physicalColumnName, - @Deserialize("physicalPosition") final long physicalPosition ) { + @Deserialize("position") final long position ) { this.namespaceId = namespaceId; this.tableId = tableId; this.columnId = columnId; this.adapterId = adapterId; this.placementType = placementType; - this.physicalSchemaName = physicalSchemaName; - this.physicalColumnName = physicalColumnName; - this.physicalPosition = physicalPosition; + this.position = position; } @@ -96,9 +88,7 @@ public String getAdapterUniqueName() { public Serializable[] getParameterArray() { return new Serializable[]{ getLogicalTableName(), - placementType.name(), - physicalSchemaName, - physicalColumnName }; + placementType.name() }; } } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index fec9ade6ea..55c92b0089 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -29,6 +29,7 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -81,9 +82,9 @@ public String getNamespaceName() { } - public AllocationTable withAddedColumn( long columnId, PlacementType placementType, String physicalSchemaName, String physicalTableName, String physicalColumnName, int position ) { + public AllocationTable withAddedColumn( long columnId, PlacementType placementType, int position ) { List placements = new ArrayList<>( this.placements ); - placements.add( new CatalogColumnPlacement( namespaceId, id, columnId, adapterId, placementType, physicalSchemaName, physicalColumnName, position ) ); + placements.add( new CatalogColumnPlacement( namespaceId, id, columnId, adapterId, placementType, position ) ); return new AllocationTable( id, logicalId, namespaceId, adapterId, placements ); } @@ -96,7 +97,7 @@ public AllocationTable withRemovedColumn( long columnId ) { public Map getColumnTypes() { - return null; + return placements.stream().collect( Collectors.toMap( p -> p.columnId, p -> Catalog.snapshot().rel().getColumn( p.columnId ).getAlgDataType( AlgDataTypeFactory.DEFAULT ) ) ); } @@ -107,7 +108,7 @@ public Map getColumnNamesId() { public List getColumnOrder() { List columns = new ArrayList<>( placements ); - columns.sort( ( a, b ) -> Math.toIntExact( a.physicalPosition - b.physicalPosition ) ); + columns.sort( ( a, b ) -> Math.toIntExact( a.position - b.position ) ); return columns.stream().map( c -> c.columnId ).collect( Collectors.toList() ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java index 96a8573fd6..224b56c1a3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.snapshot; import java.util.List; +import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.Pattern; @@ -34,10 +35,11 @@ public interface LogicalDocSnapshot { /** * Get a collection of collections which match the given naming pattern. * + * @param namespaceId * @param namePattern The naming pattern of the collection itself, null if all are matched * @return collection of collections matching conditions */ - public abstract List getCollections( Pattern namePattern ); + public abstract List getCollections( long namespaceId, Pattern namePattern ); @Deprecated @@ -52,4 +54,6 @@ public interface LogicalDocSnapshot { LogicalCollection getCollection( String collection ); + LogicalCollection getCollection( long id, String collection ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index a294759e08..75d2cd0e29 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -24,6 +24,7 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -47,14 +48,22 @@ public interface LogicalRelSnapshot { */ List getTables( @Nullable Pattern namespace, @Nullable Pattern name ); + List getTables( long namespaceId, @Nullable Pattern name ); + + List getTables( @Nullable String namespace, @Nullable String name ); + + /** * Returns the table with the given name in the specified schema. * + * @param namespaceName * @param tableName The name of the table * @return The table * @throws UnknownTableException If there is no table with this name in the specified database and schema. */ - LogicalTable getTable( String tableName ); + LogicalTable getTable( long namespaceId, String tableName ); + + LogicalTable getTable( String namespaceName, String tableName ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index decd597d16..dd9f83347f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -32,7 +32,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.Pattern; @@ -117,7 +116,7 @@ default Expression getSnapshotExpression( long id ) { * * @return The adapter */ - CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException; + CatalogAdapter getAdapter( String uniqueName ); /** * Get an adapter by its id diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java index 80e74be7ac..de9f89134d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java @@ -37,7 +37,7 @@ public LogicalCollection getCollection( long collectionId ) { @Override - public List getCollections( Pattern namePattern ) { + public List getCollections( long namespaceId, Pattern namePattern ) { return null; } @@ -71,4 +71,10 @@ public LogicalCollection getCollection( String collection ) { return null; } + + @Override + public LogicalCollection getCollection( long id, String collection ) { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index f0e7c8fb48..788b16ead1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -24,6 +24,7 @@ import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.Value; +import org.jetbrains.annotations.Nullable; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -164,6 +165,18 @@ public List getTables( @javax.annotation.Nullable Pattern namespac } + @Override + public List getTables( long namespaceId, @Nullable Pattern name ) { + return tableNames.values().stream().filter( e -> e.name.matches( name.toRegex() ) || e.namespaceId == namespaceId ).collect( Collectors.toList() ); + } + + + @Override + public List getTables( @Nullable String namespace, @Nullable String name ) { + return null; + } + + @Override public LogicalTable getTable( long tableId ) { return tables.get( tableId ); @@ -333,8 +346,18 @@ public CatalogIndex getIndex( long indexId ) { @Override - public LogicalTable getTable( String name ) { - return tableNames.get( name ); + public LogicalTable getTable( long namespaceId, String name ) { + String adjustedName = name; + if ( !namespaces.get( namespaceId ).caseSensitive ) { + adjustedName = name.toLowerCase(); + } + return tableNames.get( adjustedName ); + } + + + @Override + public LogicalTable getTable( String namespaceName, String tableName ) { + return null; } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index f90187ef27..b98af6a6c1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -33,7 +33,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.Pattern; @@ -139,7 +138,7 @@ public List getAdapters() { @Override - public CatalogAdapter getAdapter( String uniqueName ) throws UnknownAdapterException { + public CatalogAdapter getAdapter( String uniqueName ) { return adapterNames.get( uniqueName ); } diff --git a/core/src/main/java/org/polypheny/db/iface/QueryInterface.java b/core/src/main/java/org/polypheny/db/iface/QueryInterface.java index afa8a50e5c..d1131371c1 100644 --- a/core/src/main/java/org/polypheny/db/iface/QueryInterface.java +++ b/core/src/main/java/org/polypheny/db/iface/QueryInterface.java @@ -37,7 +37,7 @@ public abstract class QueryInterface implements Runnable, PropertyChangeListener protected final transient Authenticator authenticator; @Getter - private final int queryInterfaceId; + private final long queryInterfaceId; @Getter private final String uniqueName; @@ -52,7 +52,7 @@ public abstract class QueryInterface implements Runnable, PropertyChangeListener public QueryInterface( final TransactionManager transactionManager, final Authenticator authenticator, - final int queryInterfaceId, + final long queryInterfaceId, final String uniqueName, final Map settings, final boolean supportsDml, diff --git a/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java b/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java index 02ed05cbc3..09aa0bf1aa 100644 --- a/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java +++ b/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java @@ -50,9 +50,9 @@ public class QueryInterfaceManager { @Getter private static final Map REGISTER = new ConcurrentHashMap<>(); - private final Map interfaceById = new HashMap<>(); + private final Map interfaceById = new HashMap<>(); private final Map interfaceByName = new HashMap<>(); - private final Map interfaceThreadById = new HashMap<>(); + private final Map interfaceThreadById = new HashMap<>(); private final TransactionManager transactionManager; private final Authenticator authenticator; @@ -137,7 +137,7 @@ public void restoreInterfaces( Snapshot snapshot ) { String[] split = iface.clazz.split( "\\$" ); split = split[split.length - 1].split( "\\." ); Class clazz = REGISTER.get( split[split.length - 1] ).clazz; - Constructor ctor = clazz.getConstructor( TransactionManager.class, Authenticator.class, int.class, String.class, Map.class ); + Constructor ctor = clazz.getConstructor( TransactionManager.class, Authenticator.class, long.class, String.class, Map.class ); QueryInterface instance = (QueryInterface) ctor.newInstance( transactionManager, authenticator, iface.id, iface.name, iface.settings ); Thread thread = new Thread( instance ); diff --git a/core/src/main/java/org/polypheny/db/plugins/PolyPluginManager.java b/core/src/main/java/org/polypheny/db/plugins/PolyPluginManager.java index 2ee1bbbd9f..2a0dc52dc7 100644 --- a/core/src/main/java/org/polypheny/db/plugins/PolyPluginManager.java +++ b/core/src/main/java/org/polypheny/db/plugins/PolyPluginManager.java @@ -46,11 +46,13 @@ import org.pf4j.DefaultPluginDescriptor; import org.pf4j.DefaultPluginLoader; import org.pf4j.DefaultPluginManager; +import org.pf4j.DevelopmentPluginRepository; import org.pf4j.JarPluginLoader; import org.pf4j.ManifestPluginDescriptorFinder; import org.pf4j.PluginClassLoader; import org.pf4j.PluginDescriptor; import org.pf4j.PluginLoader; +import org.pf4j.PluginRepository; import org.pf4j.PluginState; import org.pf4j.PluginWrapper; import org.polypheny.db.catalog.Catalog; diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 37f03341bf..639103fdd4 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -423,7 +423,7 @@ private void getPartitioningInfo( LogicalFilter filter ) { private void handleIfPartitioned( AlgNode node, LogicalTable catalogTable ) { // Only if table is partitioned throw new NotImplementedException(); - /*if ( Catalog.getInstance().getSnapshot().getAllocSnapshot().isPartitioned( catalogTable.id ) ) { + /*if ( Catalog.getInstance().getSnapshot().alloc().isPartitioned( catalogTable.id ) ) { WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, catalogTable.columns.stream().map( c -> c.id ).collect( Collectors.toList() ).indexOf( catalogTable.partitionProperty.partitionColumnId ) ); diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 5aaed086e9..604e5b6a47 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -206,7 +206,7 @@ public static Queryable queryable( DataContext root, Class clazz, Iter */ public static Queryable queryable( DataContext root, Snapshot snapshot, Class clazz, String tableName ) { //QueryableEntity table = (QueryableEntity) schema.getEntity( tableName ); - LogicalTable table = snapshot.rel().getTable( tableName ); + LogicalTable table = snapshot.rel().getTable( null, tableName ); return table.unwrap( QueryableEntity.class ).asQueryable( root, snapshot, table.id ); } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 84fd52e769..1611c3b09d 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -1329,8 +1329,7 @@ public RexNode patternExclude( RexNode node ) { */ public AlgBuilder scan( List tableNames ) { final List names = ImmutableList.copyOf( tableNames ); - LogicalNamespace namespace = snapshot.getNamespace( tableNames.get( 0 ) ); - final LogicalTable entity = snapshot.rel().getTable( names.get( 1 ) ); + final LogicalTable entity = snapshot.rel().getTable( tableNames.get( 0 ), names.get( 1 ) ); if ( entity == null ) { throw RESOURCE.tableNotFound( String.join( ".", names ) ).ex(); } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index ae5f4ba7d4..43a2e4da82 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -62,6 +62,12 @@ public void init() { } + @Override + public void updateSnapshot() { + throw new NotImplementedException(); + } + + @Override public Map getUsers() { throw new NotImplementedException(); diff --git a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java index c540579f50..446a062919 100644 --- a/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java +++ b/core/src/test/java/org/polypheny/db/docker/MockCatalogDocker.java @@ -37,8 +37,6 @@ public class MockCatalogDocker extends MockCatalog { - - @Override public long addAdapter( String uniqueName, String clazz, AdapterType type, Map settings ) { i++; diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 2f327b544a..f4194d0cb0 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -73,6 +73,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.exceptions.GenericCatalogException; @@ -98,6 +99,7 @@ import org.polypheny.db.catalog.logistic.NameGenerator; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PartitionType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; @@ -161,7 +163,7 @@ private void checkViewDependencies( LogicalTable catalogTable ) { if ( catalogTable.connectedViews.size() > 0 ) { List views = new ArrayList<>(); for ( Long id : catalogTable.connectedViews ) { - views.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTable( id ).name ); + views.add( catalog.getSnapshot().rel().getTable( id ).name ); } throw new PolyphenyDbException( "Cannot alter table because of underlying View " + views.stream().map( String::valueOf ).collect( Collectors.joining( (", ") ) ) ); } @@ -197,7 +199,7 @@ protected DataStore getDataStoreInstance( long storeId ) throws DdlOnSourceExcep private LogicalColumn getCatalogColumn( long namespaceId, long tableId, String columnName ) throws ColumnNotExistsException { - return catalog.getSnapshot().getRelSnapshot( namespaceId ).getColumn( tableId, columnName ); + return catalog.getSnapshot().rel().getColumn( tableId, columnName ); } @@ -243,15 +245,15 @@ private void handleSource( DataSource adapter ) { for ( Map.Entry> entry : exportedColumns.entrySet() ) { // Make sure the table name is unique String tableName = entry.getKey(); - if ( catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).checkIfExistsEntity( tableName ) ) { // apparently we put them all into 1? + if ( catalog.getSnapshot().rel().checkIfExistsEntity( tableName ) ) { // apparently we put them all into 1? int i = 0; - while ( catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).checkIfExistsEntity( tableName + i ) ) { + while ( catalog.getSnapshot().rel().checkIfExistsEntity( tableName + i ) ) { i++; } tableName += i; } - long tableId = catalog.getLogicalRel( defaultNamespaceId ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); + LogicalTable table = catalog.getLogicalRel( defaultNamespaceId ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); List primaryKeyColIds = new ArrayList<>(); int colPos = 1; String physicalSchemaName = null; @@ -259,7 +261,7 @@ private void handleSource( DataSource adapter ) { for ( ExportedColumn exportedColumn : entry.getValue() ) { long columnId = catalog.getLogicalRel( defaultNamespaceId ).addColumn( exportedColumn.name, - tableId, + table.id, colPos++, exportedColumn.type, exportedColumn.collectionsType, @@ -269,13 +271,12 @@ private void handleSource( DataSource adapter ) { exportedColumn.cardinality, exportedColumn.nullable, Collation.getDefaultCollation() ); - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( adapter.getAdapterId(), tableId ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapter.getAdapterId(), table.id ); catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( allocation.id, columnId, PlacementType.STATIC, - exportedColumn.physicalSchemaName, - exportedColumn.physicalTableName, exportedColumn.physicalColumnName, exportedColumn.physicalPosition ); // Not a valid partitionGroupID --> placeholder + exportedColumn.physicalPosition ); // Not a valid partitionGroupID --> placeholder catalog.getAllocRel( defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), columnId, exportedColumn.physicalPosition ); if ( exportedColumn.primary ) { primaryKeyColIds.add( columnId ); @@ -288,10 +289,10 @@ private void handleSource( DataSource adapter ) { } } try { - catalog.getLogicalRel( defaultNamespaceId ).addPrimaryKey( tableId, primaryKeyColIds ); - LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( tableId ); + catalog.getLogicalRel( defaultNamespaceId ).addPrimaryKey( table.id, primaryKeyColIds ); + LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( table.id ); - CatalogDataPlacement placement = catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).get( 0 ); + CatalogDataPlacement placement = catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ).get( 0 ); catalog.getAllocRel( defaultNamespaceId ) .addPartitionPlacement( catalogTable.namespaceId, @@ -322,15 +323,15 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte if ( catalogAdapter.type == AdapterType.SOURCE ) { // Remove collection Set collectionsToDrop = new HashSet<>(); - for ( CatalogCollectionPlacement collectionPlacement : catalog.getSnapshot().getAllocSnapshot().getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { + for ( CatalogCollectionPlacement collectionPlacement : catalog.getSnapshot().alloc().getCollectionPlacementsByAdapter( catalogAdapter.id ) ) { collectionsToDrop.add( collectionPlacement.collectionId ); } for ( long id : collectionsToDrop ) { - LogicalCollection collection = catalog.getSnapshot().getDocSnapshot( 1 ).getCollection( id ); + LogicalCollection collection = catalog.getSnapshot().doc().getCollection( id ); // Make sure that there is only one adapter - if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( collection.id ).size() != 1 ) { + if ( catalog.getSnapshot().alloc().getDataPlacements( collection.id ).size() != 1 ) { throw new RuntimeException( "The data source contains collections with more than one placement. This should not happen!" ); } @@ -340,19 +341,19 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte // Remove table Set tablesToDrop = new HashSet<>(); - for ( CatalogColumnPlacement ccp : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapter( catalogAdapter.id ) ) { + for ( CatalogColumnPlacement ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapter( catalogAdapter.id ) ) { tablesToDrop.add( ccp.tableId ); } for ( Long id : tablesToDrop ) { - if ( catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( id ).entityType != EntityType.MATERIALIZED_VIEW ) { + if ( catalog.getSnapshot().rel().getTable( id ).entityType != EntityType.MATERIALIZED_VIEW ) { tablesToDrop.add( id ); } } // Remove foreign keys for ( Long tableId : tablesToDrop ) { - for ( CatalogForeignKey fk : catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getForeignKeys( tableId ) ) { + for ( CatalogForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( tableId ) ) { try { catalog.getLogicalRel( defaultNamespaceId ).deleteForeignKey( fk.id ); } catch ( GenericCatalogException e ) { @@ -362,10 +363,10 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Drop tables for ( Long tableId : tablesToDrop ) { - LogicalTable table = catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getTable( tableId ); + LogicalTable table = catalog.getSnapshot().rel().getTable( tableId ); // Make sure that there is only one adapter - if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( tableId ).size() != 1 ) { + if ( catalog.getSnapshot().alloc().getDataPlacements( tableId ).size() != 1 ) { throw new RuntimeException( "The data source contains tables with more than one placement. This should not happen!" ); } @@ -373,10 +374,10 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte if ( table.entityType != EntityType.SOURCE ) { throw new RuntimeException( "Trying to drop a table located on a data source which is not of table type SOURCE. This should not happen!" ); } - AllocationEntity entity = catalog.getSnapshot().getAllocSnapshot().getAllocation( catalogAdapter.id, tableId ); + AllocationEntity entity = catalog.getSnapshot().alloc().getAllocation( catalogAdapter.id, tableId ); // Delete column placement in catalog - for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getColumns( tableId ) ) { - if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { + for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( tableId ) ) { + if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { catalog.getAllocRel( defaultNamespaceId ).deleteColumnPlacement( entity.id, column.id, false ); } } @@ -389,7 +390,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Delete columns - for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( defaultNamespaceId ).getColumns( tableId ) ) { + for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( tableId ) ) { catalog.getLogicalRel( defaultNamespaceId ).deleteColumn( column.id ); } @@ -421,7 +422,7 @@ public void renameSchema( String newName, String oldName ) throws NamespaceAlrea @Override public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException { - if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsColumn( catalogTable.id, columnLogicalName ) ) { + if ( catalog.getSnapshot().rel().checkIfExistsColumn( catalogTable.id, columnLogicalName ) ) { throw new ColumnAlreadyExistsException( columnLogicalName, catalogTable.name ); } @@ -434,14 +435,14 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure there is only one adapter - if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).size() != 1 ) { + if ( catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } - long adapterId = catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ).get( 0 ).adapterId; + long adapterId = catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ).get( 0 ).adapterId; DataSource dataSource = (DataSource) AdapterManager.getInstance().getAdapter( adapterId ); - String physicalTableName = catalog.getSnapshot().getPhysicalSnapshot().getPhysicalTable( catalogTable.id, adapterId ).name; + String physicalTableName = catalog.getSnapshot().physical().getPhysicalTable( catalogTable.id, adapterId ).name; List exportedColumns = dataSource.getExportedColumns().get( physicalTableName ); // Check if physicalColumnName is valid @@ -456,10 +457,10 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure this physical column has not already been added to this table - for ( CatalogColumnPlacement ccp : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { - if ( ccp.physicalColumnName.equalsIgnoreCase( columnPhysicalName ) ) { + for ( CatalogColumnPlacement ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { + /*if ( ccp.physicalColumnName.equalsIgnoreCase( columnPhysicalName ) ) { throw new RuntimeException( "The physical column '" + columnPhysicalName + "' has already been added to this table!" ); - } + }*/ } int position = updateAdjacentPositions( catalogTable, beforeColumn, afterColumn ); @@ -480,16 +481,15 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys // Add default value addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); - LogicalColumn addedColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ); + LogicalColumn addedColumn = catalog.getSnapshot().rel().getColumn( columnId ); - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( adapterId, catalogTable.id ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapterId, catalogTable.id ); // Add column placement catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( allocation.id, addedColumn.id, PlacementType.STATIC, - exportedColumn.physicalSchemaName, - exportedColumn.physicalTableName, exportedColumn.physicalColumnName, position );//Not a valid partitionID --> placeholder + position );//Not a valid partitionID --> placeholder // Set column position catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, columnId, exportedColumn.physicalPosition ); @@ -500,7 +500,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn beforeColumn, LogicalColumn afterColumn ) { - List columns = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = catalog.getSnapshot().rel().getColumns( catalogTable.id ); int position = columns.size() + 1; if ( beforeColumn != null || afterColumn != null ) { if ( beforeColumn != null ) { @@ -525,7 +525,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo throw new NotNullAndDefaultValueException(); } - if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsColumn( catalogTable.id, columnName ) ) { + if ( catalog.getSnapshot().rel().checkIfExistsColumn( catalogTable.id, columnName ) ) { throw new ColumnAlreadyExistsException( columnName, catalogTable.name ); } // @@ -550,20 +550,19 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo // Add default value addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); - LogicalColumn addedColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ); + LogicalColumn addedColumn = catalog.getSnapshot().rel().getColumn( columnId ); // Ask router on which stores this column shall be placed List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); // Add column on underlying data stores and insert default value for ( DataStore store : stores ) { - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( store.getAdapterId(), catalogTable.id ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( store.getAdapterId(), catalogTable.id ); catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( allocation.id, addedColumn.id, // Will be set later PlacementType.AUTOMATIC, // Will be set later - null, // Will be set later - null, null, position );//Not a valid partitionID --> placeholder + position );//Not a valid partitionID --> placeholder AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); } @@ -576,12 +575,12 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } List referencesIds = new LinkedList<>(); for ( String columnName : refColumnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( refTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( refTable.id, columnName ); referencesIds.add( logicalColumn.id ); } catalog.getLogicalRel( catalogTable.namespaceId ).addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); @@ -592,7 +591,7 @@ public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, Lis public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } @@ -604,7 +603,7 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List 0 ) { for ( long columnId : columnIds ) { - if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { + if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { hasAllColumns = false; } } @@ -639,11 +638,11 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List 0 ) { for ( long columnId : columnIds ) { - if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { + if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( dataPlacement.adapterId, columnId ) ) { hasAllColumns = false; } } @@ -652,7 +651,7 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnIds, IndexType type ) throws MissingColumnPlacementException, UnknownIndexMethodException, GenericCatalogException { // Check if all required columns are present on this store for ( long columnId : columnIds ) { - if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { - throw new MissingColumnPlacementException( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ).name ); + if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { + throw new MissingColumnPlacementException( catalog.getSnapshot().rel().getColumn( columnId ).name ); } } @@ -709,8 +708,8 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam location.addIndex( statement.getPrepareContext(), - catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndex( indexId ), - catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( location.getAdapterId(), catalogTable.id ) ); + catalog.getSnapshot().rel().getIndex( indexId ), + catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( location.getAdapterId(), catalogTable.id ) ); } @@ -718,7 +717,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName indexName = indexName.toLowerCase(); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } @@ -730,7 +729,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName } // Check if there is already an index with this name for this table - if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsIndex( catalogTable.id, indexName ) ) { + if ( catalog.getSnapshot().rel().checkIfExistsIndex( catalogTable.id, indexName ) ) { throw new IndexExistsException(); } @@ -763,7 +762,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName type, indexName ); - IndexManager.getInstance().addIndex( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndex( indexId ), statement ); + IndexManager.getInstance().addIndex( catalog.getSnapshot().rel().getIndex( indexId ), statement ); } @@ -773,15 +772,15 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L List tempPartitionGroupList = new ArrayList<>(); - if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacement( catalogTable.id, dataStore.getAdapterId() ) == null ) { + if ( catalog.getSnapshot().alloc().getDataPlacement( catalogTable.id, dataStore.getAdapterId() ) == null ) { throw new PlacementAlreadyExistsException(); } else { - catalog.getAllocRel( catalogTable.namespaceId ).addDataPlacement( dataStore.getAdapterId(), catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).createAlloctionTable( dataStore.getAdapterId(), catalogTable.id ); } // Check whether the list is empty (this is a shorthand for a full placement) if ( columnIds.size() == 0 ) { - columnIds = ImmutableList.copyOf( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ).stream().map( c -> c.id ).collect( Collectors.toList() ); + columnIds = ImmutableList.copyOf( catalog.getSnapshot().rel().getColumns( catalogTable.id ) ).stream().map( c -> c.id ).collect( Collectors.toList() ); } // Select partitions to create on this placement @@ -790,11 +789,11 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L // Needed to ensure that column placements on the same store contain all the same partitions // Check if this column placement is the first on the data placement // If this returns null this means that this is the first placement and partition list can therefore be specified - List currentPartList = catalog.getSnapshot().getAllocSnapshot().getPartitionGroupsOnDataPlacement( dataStore.getAdapterId(), catalogTable.id ); + List currentPartList = catalog.getSnapshot().alloc().getPartitionGroupsOnDataPlacement( dataStore.getAdapterId(), catalogTable.id ); isDataPlacementPartitioned = !currentPartList.isEmpty(); - PartitionProperty property = catalog.getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = catalog.getSnapshot().alloc().getPartitionProperty( catalogTable.id ); if ( !partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { @@ -822,7 +821,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L + dataStore.getUniqueName() + "' already contains manually specified partitions: " + currentPartList + ". Use 'ALTER TABLE ... MODIFY PARTITIONS...' instead" ); } - List catalogPartitionGroups = catalog.getSnapshot().getAllocSnapshot().getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getSnapshot().alloc().getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNames ) { boolean isPartOfTable = false; for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { @@ -834,7 +833,7 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L } if ( !isPartOfTable ) { throw new RuntimeException( "Specified Partition-Name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getSnapshot().getAllocSnapshot().getPartitionGroupNames( tableId ) ) ); + + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getSnapshot().alloc().getPartitionGroupNames( tableId ) ) ); } } @@ -856,30 +855,28 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { List partitionIds = new ArrayList<>(); // Gather all partitions relevant to add depending on the specified partitionGroup - tempPartitionGroupList.forEach( pg -> catalog.getSnapshot().getAllocSnapshot().getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); + tempPartitionGroupList.forEach( pg -> catalog.getSnapshot().alloc().getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( dataStore.getAdapterId(), catalogTable.id ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( dataStore.getAdapterId(), catalogTable.id ); // Create column placements for ( long cid : columnIds ) { catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( allocation.id, cid, PlacementType.MANUAL, - null, - null, null, 0 ); - addedColumns.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( cid ) ); + 0 ); + addedColumns.add( catalog.getSnapshot().rel().getColumn( cid ) ); } // Check if placement includes primary key columns - CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( long cid : primaryKey.columnIds ) { if ( !columnIds.contains( cid ) ) { catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( allocation.id, cid, PlacementType.AUTOMATIC, - null, - null, null, 0 ); - addedColumns.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( cid ) ); + 0 ); + addedColumns.add( catalog.getSnapshot().rel().getColumn( cid ) ); } } @@ -914,32 +911,31 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, checkModelLogic( catalogTable ); - CatalogPrimaryKey oldPk = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey oldPk = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } catalog.getLogicalRel( catalogTable.namespaceId ).addPrimaryKey( catalogTable.id, columnIds ); // Add new column placements long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores - List oldPkPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumnId ); + List oldPkPlacements = catalog.getSnapshot().alloc().getColumnPlacements( pkColumnId ); for ( CatalogColumnPlacement ccp : oldPkPlacements ) { for ( long columnId : columnIds ) { - if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( ccp.adapterId, catalogTable.id ); + if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( ccp.adapterId, catalogTable.id ); catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( allocation.id, columnId, // Will be set later - PlacementType.AUTOMATIC, // Will be set later - null, // Will be set later - null, null, 0 ); + PlacementType.AUTOMATIC, + 0 ); AdapterManager.getInstance().getStore( ccp.adapterId ).addColumn( statement.getPrepareContext(), - catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTable( ccp.tableId ), - catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( columnId ) ); + catalog.getSnapshot().rel().getTable( ccp.tableId ), + catalog.getSnapshot().rel().getColumn( columnId ) ); } } } @@ -957,7 +953,7 @@ public void addUniqueConstraint( LogicalTable catalogTable, List columnN try { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); columnIds.add( logicalColumn.id ); } catalog.getLogicalRel( catalogTable.namespaceId ).addUniqueConstraint( catalogTable.id, constraintName, columnIds ); @@ -969,7 +965,7 @@ public void addUniqueConstraint( LogicalTable catalogTable, List columnN @Override public void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { - List columns = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = catalog.getSnapshot().rel().getColumns( catalogTable.id ); if ( columns.size() < 2 ) { throw new RuntimeException( "Cannot drop sole column of table " + catalogTable.name ); } @@ -982,7 +978,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement LogicalColumn column = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); - LogicalRelSnapshot snapshot = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ); + LogicalRelSnapshot snapshot = catalog.getSnapshot().rel(); // Check if column is part of a key for ( CatalogKey key : snapshot.getTableKeys( catalogTable.id ) ) { @@ -1007,12 +1003,12 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( dp.adapterId, dp.columnId, true ); }*/ - for ( AllocationEntity table : catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ) ) { + for ( AllocationEntity table : catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ) ) { for ( CatalogColumnPlacement placement : table.unwrap( AllocationTable.class ).placements ) { if ( catalogTable.entityType == EntityType.ENTITY ) { AdapterManager.getInstance().getStore( table.adapterId ).dropColumn( statement.getPrepareContext(), placement ); } - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( placement.adapterId, catalogTable.id ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( placement.adapterId, catalogTable.id ); catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, placement.columnId, true ); } } @@ -1055,7 +1051,7 @@ public void dropConstraint( LogicalTable catalogTable, String constraintName ) t checkIfDdlPossible( catalogTable.entityType ); try { - CatalogConstraint constraint = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getConstraint( catalogTable.id, constraintName ); + CatalogConstraint constraint = catalog.getSnapshot().rel().getConstraint( catalogTable.id, constraintName ); catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); } catch ( GenericCatalogException | UnknownConstraintException e ) { throw new RuntimeException( e ); @@ -1069,7 +1065,7 @@ public void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) t checkIfDdlPossible( catalogTable.entityType ); try { - CatalogForeignKey foreignKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getForeignKey( catalogTable.id, foreignKeyName ); + CatalogForeignKey foreignKey = catalog.getSnapshot().rel().getForeignKey( catalogTable.id, foreignKeyName ); catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } catch ( GenericCatalogException | UnknownForeignKeyException e ) { throw new RuntimeException( e ); @@ -1083,13 +1079,13 @@ public void dropIndex( LogicalTable catalogTable, String indexName, Statement st checkIfDdlPossible( catalogTable.entityType ); try { - CatalogIndex index = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndex( catalogTable.id, indexName ); + CatalogIndex index = catalog.getSnapshot().rel().getIndex( catalogTable.id, indexName ); if ( index.location == 0 ) { IndexManager.getInstance().deleteIndex( index ); } else { DataStore storeInstance = AdapterManager.getInstance().getStore( index.location ); - storeInstance.dropIndex( statement.getPrepareContext(), index, catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + storeInstance.dropIndex( statement.getPrepareContext(), index, catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); @@ -1102,11 +1098,11 @@ public void dropIndex( LogicalTable catalogTable, String indexName, Statement st @Override public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, LastPlacementException { // Check whether this placement exists - if ( catalog.getSnapshot().getAllocSnapshot().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) == null ) { + if ( catalog.getSnapshot().alloc().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) == null ) { throw new PlacementNotExistsException(); } - CatalogDataPlacement dataPlacement = catalog.getSnapshot().getAllocSnapshot().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + CatalogDataPlacement dataPlacement = catalog.getSnapshot().alloc().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), dataPlacement.columnPlacementsOnAdapter, dataPlacement.getAllPartitionIds() ) ) { @@ -1114,7 +1110,7 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc } // Drop all indexes on this store - for ( CatalogIndex index : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() ) { if ( index.location == 0 ) { // Delete polystore index @@ -1124,17 +1120,17 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc AdapterManager.getInstance().getStore( index.location ).dropIndex( statement.getPrepareContext(), index, - catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } // Delete index in catalog catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); } } // Physically delete the data from the store - storeInstance.dropTable( statement.getPrepareContext(), catalogTable, catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); + storeInstance.dropTable( statement.getPrepareContext(), catalogTable, catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); // Remove physical stores afterwards - catalog.getAllocRel( catalogTable.namespaceId ).removeDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1171,11 +1167,11 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT type.scale, type.dimension, type.cardinality ); - for ( CatalogColumnPlacement placement : catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( logicalColumn.id ) ) { + for ( CatalogColumnPlacement placement : catalog.getSnapshot().alloc().getColumnPlacements( logicalColumn.id ) ) { AdapterManager.getInstance().getStore( placement.adapterId ).updateColumnType( statement.getPrepareContext(), placement, - catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( logicalColumn.id ), + catalog.getSnapshot().rel().getColumn( logicalColumn.id ), logicalColumn.type ); } @@ -1220,7 +1216,7 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str if ( logicalColumn.id == refColumn.id ) { throw new RuntimeException( "Same column!" ); } - List columns = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = catalog.getSnapshot().rel().getColumns( catalogTable.id ); if ( targetPosition < logicalColumn.position ) { // Walk from last column to first column for ( int i = columns.size(); i >= 1; i-- ) { if ( i < logicalColumn.position && i >= targetPosition ) { @@ -1300,7 +1296,7 @@ public void dropDefaultValue( LogicalTable catalogTable, String columnName, Stat @Override public void modifyDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException { - CatalogDataPlacement placements = statement.getDataContext().getSnapshot().getAllocSnapshot().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + CatalogDataPlacement placements = statement.getDataContext().getSnapshot().alloc().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); // Check whether this placement already exists if ( placements == null ) { throw new PlacementNotExistsException(); @@ -1311,11 +1307,11 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds List columnsToRemove = new ArrayList<>(); - LogicalRelSnapshot snapshot = statement.getTransaction().getSnapshot().getRelSnapshot( catalogTable.namespaceId ); + LogicalRelSnapshot snapshot = statement.getTransaction().getSnapshot().rel(); // Checks before physically removing of placement that the partition distribution is still valid and sufficient // Identifies which columns need to be removed - for ( CatalogColumnPlacement placement : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { + for ( CatalogColumnPlacement placement : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { if ( !columnIds.contains( placement.columnId ) ) { // Check whether there are any indexes located on the store requiring this column for ( CatalogIndex index : snapshot.getIndexes( catalogTable.id, false ) ) { @@ -1349,15 +1345,15 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Remove columns physically for ( long columnId : columnsToRemove ) { // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); // Drop column placement - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, columnId, true ); } List tempPartitionGroupList = new ArrayList<>(); - PartitionProperty partition = statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty partition = statement.getTransaction().getSnapshot().alloc().getPartitionProperty( catalogTable.id ); // Select partitions to create on this placement if ( partition.isPartitioned ) { @@ -1378,7 +1374,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds } // If name partitions are specified else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { - List catalogPartitionGroups = catalog.getSnapshot().getAllocSnapshot().getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getSnapshot().alloc().getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNames ) { boolean isPartOfTable = false; for ( CatalogPartitionGroup catalogPartitionGroup : catalogPartitionGroups ) { @@ -1390,7 +1386,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { } if ( !isPartOfTable ) { throw new RuntimeException( "Specified partition name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getSnapshot().getAllocSnapshot().getPartitionGroupNames( tableId ) ) ); + + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getSnapshot().alloc().getPartitionGroupNames( tableId ) ) ); } } } else if ( partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { @@ -1407,27 +1403,26 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { List intendedPartitionIds = new ArrayList<>(); // Gather all partitions relevant to add depending on the specified partitionGroup - tempPartitionGroupList.forEach( pg -> catalog.getSnapshot().getAllocSnapshot().getPartitions( pg ).forEach( p -> intendedPartitionIds.add( p.id ) ) ); + tempPartitionGroupList.forEach( pg -> catalog.getSnapshot().alloc().getPartitions( pg ).forEach( p -> intendedPartitionIds.add( p.id ) ) ); // Which columns to add List addedColumns = new LinkedList<>(); for ( long cid : columnIds ) { - if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { - CatalogColumnPlacement placement = catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), cid ); + if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { + CatalogColumnPlacement placement = catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), cid ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); } } else { - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Create column placement catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( allocation.id, cid, PlacementType.MANUAL, - null, - null, null, 0 ); + 0 ); // Add column on store storeInstance.addColumn( statement.getPrepareContext(), catalogTable, snapshot.getColumn( cid ) ); // Add to list of columns for which we need to copy data @@ -1435,7 +1430,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { } } - CatalogDataPlacement dataPlacement = catalog.getSnapshot().getAllocSnapshot().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); + CatalogDataPlacement dataPlacement = catalog.getSnapshot().alloc().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); List removedPartitionIdsFromDataPlacement = new ArrayList<>(); // Removed Partition Ids for ( long partitionId : dataPlacement.getAllPartitionIds() ) { @@ -1484,12 +1479,12 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part List newPartitions = new ArrayList<>(); List removedPartitions = new ArrayList<>(); - List currentPartitionGroupsOnStore = catalog.getSnapshot().getAllocSnapshot().getPartitionGroupsOnDataPlacement( storeId, catalogTable.id ); + List currentPartitionGroupsOnStore = catalog.getSnapshot().alloc().getPartitionGroupsOnDataPlacement( storeId, catalogTable.id ); // Get PartitionGroups that have been removed for ( long partitionGroupId : currentPartitionGroupsOnStore ) { if ( !partitionGroupIds.contains( partitionGroupId ) ) { - catalog.getSnapshot().getAllocSnapshot().getPartitions( partitionGroupId ).forEach( p -> removedPartitions.add( p.id ) ); + catalog.getSnapshot().alloc().getPartitions( partitionGroupId ).forEach( p -> removedPartitions.add( p.id ) ); } } @@ -1500,7 +1495,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part // Get PartitionGroups that have been newly added for ( Long partitionGroupId : partitionGroupIds ) { if ( !currentPartitionGroupsOnStore.contains( partitionGroupId ) ) { - catalog.getSnapshot().getAllocSnapshot().getPartitions( partitionGroupId ).forEach( p -> newPartitions.add( p.id ) ); + catalog.getSnapshot().alloc().getPartitions( partitionGroupId ).forEach( p -> newPartitions.add( p.id ) ); } } @@ -1521,11 +1516,11 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); - catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ).forEach( cp -> necessaryColumns.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( cp.columnId ) ) ); + catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ).forEach( cp -> necessaryColumns.add( catalog.getSnapshot().rel().getColumn( cp.columnId ) ) ); dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( storeId ), necessaryColumns, newPartitions ); // Add indexes on this new Partition Placement if there is already an index - for ( CatalogIndex currentIndex : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex currentIndex : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( currentIndex.location == storeId ) { storeInstance.addIndex( statement.getPrepareContext(), currentIndex, newPartitions ); } @@ -1534,7 +1529,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part if ( removedPartitions.size() > 0 ) { // Remove indexes - for ( CatalogIndex currentIndex : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex currentIndex : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( currentIndex.location == storeId ) { storeInstance.dropIndex( statement.getPrepareContext(), currentIndex, removedPartitions ); } @@ -1555,15 +1550,15 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da throw new UnknownAdapterException( "" ); } // Check whether this placement already exists - if ( !snapshot.getAllocSnapshot().adapterHasPlacement( storeInstance.getAdapterId(), catalogTable.id ) ) { + if ( !snapshot.alloc().adapterHasPlacement( storeInstance.getAdapterId(), catalogTable.id ) ) { throw new PlacementNotExistsException(); } LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Make sure that this store does not contain a placement of this column - if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { - CatalogColumnPlacement placement = catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); + if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { + CatalogColumnPlacement placement = catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( @@ -1574,20 +1569,19 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da throw new PlacementAlreadyExistsException(); } } else { - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Create column placement catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( allocation.id, logicalColumn.id, PlacementType.MANUAL, - null, - null, null, 0 ); + 0 ); // Add column on store storeInstance.addColumn( statement.getPrepareContext(), catalogTable, logicalColumn ); // Copy the data to the newly added column placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( storeInstance.getAdapterId() ), - ImmutableList.of( logicalColumn ), catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); + ImmutableList.of( logicalColumn ), catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); } // Reset query plan cache, implementation cache & routing cache @@ -1603,18 +1597,18 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D Snapshot snapshot = statement.getTransaction().getSnapshot(); // Check whether this placement already exists - if ( !snapshot.getAllocSnapshot().adapterHasPlacement( storeInstance.getAdapterId(), catalogTable.id ) ) { + if ( !snapshot.alloc().adapterHasPlacement( storeInstance.getAdapterId(), catalogTable.id ) ) { throw new PlacementNotExistsException(); } LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Check whether this store actually contains a placement of this column - if ( !catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { + if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { throw new PlacementNotExistsException(); } // Check whether there are any indexes located on the store requiring this column - for ( CatalogIndex index : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, false ) ) { + for ( CatalogIndex index : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( logicalColumn.id ) ) { throw new IndexPreventsRemovalException( index.name, columnName ); } @@ -1625,13 +1619,13 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D } // Check whether the column to drop is a primary key - CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); if ( primaryKey.columnIds.contains( logicalColumn.id ) ) { throw new PlacementIsPrimaryException(); } // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Drop column placement catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, logicalColumn.id, false ); @@ -1648,7 +1642,7 @@ public void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) th @Override public void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException { - if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsEntity( newTableName ) ) { + if ( catalog.getSnapshot().rel().checkIfExistsEntity( newTableName ) ) { throw new EntityAlreadyExistsException(); } // Check if views are dependent from this view @@ -1672,7 +1666,7 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme public void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException { LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); - if ( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).checkIfExistsColumn( logicalColumn.tableId, newColumnName ) ) { + if ( catalog.getSnapshot().rel().checkIfExistsColumn( logicalColumn.tableId, newColumnName ) ) { throw new ColumnAlreadyExistsException( newColumnName, logicalColumn.getTableName() ); } // Check if views are dependent from this view @@ -1692,10 +1686,10 @@ public void renameColumn( LogicalTable catalogTable, String columnName, String n public void createView( String viewName, long namespaceId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) throws EntityAlreadyExistsException { viewName = adjustNameIfNeeded( viewName, namespaceId ); - if ( catalog.getSnapshot().getRelSnapshot( namespaceId ).checkIfExistsEntity( viewName ) ) { + if ( catalog.getSnapshot().rel().checkIfExistsEntity( viewName ) ) { if ( replace ) { try { - dropView( catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( viewName ), statement ); + dropView( catalog.getSnapshot().rel().getTable( namespaceId, viewName ), statement ); } catch ( DdlOnSourceException e ) { throw new RuntimeException( "Unable tp drop the existing View with this name." ); } @@ -1772,7 +1766,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a Map> underlying = findUnderlyingTablesOfView( algRoot.alg, underlyingTables, fieldList ); Snapshot snapshot = statement.getTransaction().getSnapshot(); - LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( namespaceId ); + LogicalRelSnapshot relSnapshot = snapshot.rel(); // add check if underlying table is of model document -> mql, relational -> sql underlying.keySet().forEach( tableId -> checkModelLangCompatibility( language, namespaceId, tableId ) ); @@ -1827,13 +1821,12 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a for ( DataStore s : stores ) { long adapterId = s.getAdapterId(); - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( adapterId, tableId ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapterId, tableId ); catalog.getAllocRel( namespaceId ).addColumnPlacement( allocation.id, columnId, placementType, - null, - null, null, 0 ); + 0 ); List logicalColumns; if ( addedColumns.containsKey( adapterId ) ) { @@ -1849,7 +1842,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a // Sets previously created primary key catalog.getLogicalRel( namespaceId ).addPrimaryKey( tableId, columnIds ); - CatalogMaterializedView catalogMaterializedView = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ).unwrap( CatalogMaterializedView.class ); + CatalogMaterializedView catalogMaterializedView = catalog.getSnapshot().rel().getTable( tableId ).unwrap( CatalogMaterializedView.class ); Catalog.getInstance().getSnapshot(); for ( DataStore store : stores ) { @@ -1857,7 +1850,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a catalogMaterializedView.namespaceId, store.getAdapterId(), tableId, - snapshot.getAllocSnapshot().getPartitionProperty( catalogMaterializedView.id ).partitionIds.get( 0 ), + snapshot.alloc().getPartitionProperty( catalogMaterializedView.id ).partitionIds.get( 0 ), PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); @@ -1871,7 +1864,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a private void checkModelLangCompatibility( QueryLanguage language, long namespaceId, Long tableId ) { - LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ); + LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( tableId ); if ( catalogTable.getNamespaceType() != language.getNamespaceType() ) { throw new RuntimeException( String.format( @@ -1918,10 +1911,10 @@ public long addGraphPlacement( long graphId, List stores, boolean onl throw new RuntimeException(); } - LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( graphId ).getGraph( graphId ); + LogicalGraph graph = catalog.getSnapshot().graph().getGraph( graphId ); Snapshot snapshot = statement.getTransaction().getSnapshot(); - List preExistingPlacements = snapshot.getAllocSnapshot().getGraphPlacements( graphId ) + List preExistingPlacements = snapshot.alloc().getGraphPlacements( graphId ) .stream() .filter( p -> !stores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p.adapterId ) ) .map( p -> p.adapterId ) @@ -1950,7 +1943,7 @@ public long addGraphPlacement( long graphId, List stores, boolean onl @Override public void removeGraphDatabasePlacement( long graphId, DataStore store, Statement statement ) { - CatalogGraphPlacement placement = statement.getTransaction().getSnapshot().getAllocSnapshot().getGraphPlacement( graphId, store.getAdapterId() ); + CatalogGraphPlacement placement = statement.getTransaction().getSnapshot().alloc().getGraphPlacement( graphId, store.getAdapterId() ); store.dropGraph( statement.getPrepareContext(), placement ); @@ -1966,10 +1959,10 @@ public void removeGraphDatabasePlacement( long graphId, DataStore store, Stateme private void afterGraphDropLogistics( DataStore store, long graphId ) { /*CatalogGraphMapping mapping = catalog.getLogicalRel( graphId ).getGraphMapping( graphId ); - catalog.getAllocGraph( graphId ).removeDataPlacement( store.getAdapterId(), mapping.nodesId ); - catalog.getAllocGraph( graphId ).removeDataPlacement( store.getAdapterId(), mapping.nodesPropertyId ); - catalog.getAllocGraph( catalogTable.namespaceId ).removeDataPlacement( store.getAdapterId(), mapping.edgesId ); - catalog.getAllocGraph( catalogTable.namespaceId ).removeDataPlacement( store.getAdapterId(), mapping.edgesPropertyId );*/ // replace + catalog.getAllocGraph( graphId ).deleteAllocation( store.getAdapterId(), mapping.nodesId ); + catalog.getAllocGraph( graphId ).deleteAllocation( store.getAdapterId(), mapping.nodesPropertyId ); + catalog.getAllocGraph( catalogTable.namespaceId ).deleteAllocation( store.getAdapterId(), mapping.edgesId ); + catalog.getAllocGraph( catalogTable.namespaceId ).deleteAllocation( store.getAdapterId(), mapping.edgesPropertyId );*/ // replace } @@ -1980,10 +1973,10 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { LogicalTable edges = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.edgesId ); LogicalTable edgeProperty = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.edgesPropertyId ); - catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), mapping.nodesId ); - catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), mapping.nodesPropertyId ); - catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), mapping.edgesId ); - catalog.getLogicalRel( catalogTable.namespaceId ).addDataPlacement( store.getAdapterId(), mapping.edgesPropertyId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), mapping.nodesId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), mapping.nodesPropertyId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), mapping.edgesId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), mapping.edgesPropertyId ); catalog.getLogicalRel( catalogTable.namespaceId ).addPartitionPlacement( nodes.namespaceId, @@ -2057,7 +2050,7 @@ public void replaceGraphAlias( long graphId, String oldAlias, String alias ) { @Override public void removeGraph( long graphId, boolean ifExists, Statement statement ) { - LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( graphId ).getGraph( graphId ); + LogicalGraph graph = catalog.getSnapshot().graph().getGraph( graphId ); if ( graph == null ) { if ( !ifExists ) { @@ -2065,7 +2058,7 @@ public void removeGraph( long graphId, boolean ifExists, Statement statement ) { } return; } - AllocSnapshot allocSnapshot = catalog.getSnapshot().getAllocSnapshot(); + AllocSnapshot allocSnapshot = catalog.getSnapshot().alloc(); for ( CatalogGraphPlacement placement : allocSnapshot.getGraphPlacements( graphId ) ) { AdapterManager.getInstance().getStore( placement.adapterId ).dropGraph( statement.getPrepareContext(), placement ); } @@ -2151,7 +2144,7 @@ private Map> findUnderlyingTablesOfView( AlgNode algNode, Map getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList ) { LogicalTable table = algNode.getEntity().unwrap( LogicalTable.class ); - List columns = Catalog.getInstance().getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( table.id ); + List columns = Catalog.getInstance().getSnapshot().rel().getColumns( table.id ); List logicalColumnNames = columns.stream().map( c -> c.name ).collect( Collectors.toList() ); List underlyingColumns = new ArrayList<>(); for ( int i = 0; i < columns.size(); i++ ) { @@ -2166,8 +2159,7 @@ private List getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList } - @Override - public void createTable( long namespaceId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { + public void createTableOld( long namespaceId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { name = adjustNameIfNeeded( name, namespaceId ); try { @@ -2200,28 +2192,30 @@ public void createTable( long namespaceId, String name, List f stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); } - long tableId = catalog.getLogicalRel( namespaceId ).addTable( + LogicalTable table = catalog.getLogicalRel( namespaceId ).addTable( name, EntityType.ENTITY, true ); // Initially create DataPlacement containers on every store the table should be placed. - stores.forEach( store -> catalog.getAllocRel( namespaceId ).addDataPlacement( store.getAdapterId(), tableId ) ); + // stores.forEach( store -> catalog.getAllocRel( namespaceId ).createAlloctionTable( store.getAdapterId(), tableId ) ); + + // catalog.updateSnapshot(); for ( FieldInformation information : fields ) { - addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, tableId, information.position, stores, placementType ); + addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, table.id, information.position, stores, placementType ); } for ( ConstraintInformation constraint : constraints ) { - addConstraint( namespaceId, constraint.name, constraint.type, constraint.columnNames, tableId ); + addConstraint( namespaceId, constraint.name, constraint.type, constraint.columnNames, table.id ); } Snapshot snapshot = statement.getTransaction().getSnapshot(); - LogicalTable catalogTable = snapshot.getRelSnapshot( namespaceId ).getTable( tableId ); + LogicalTable catalogTable = snapshot.rel().getTable( table.id ); // Trigger rebuild of schema; triggers schema creation on adapters - Catalog.getInstance().getSnapshot(); + catalog.updateSnapshot(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); for ( DataStore store : stores ) { catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( @@ -2232,8 +2226,6 @@ public void createTable( long namespaceId, String name, List f PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); - catalog.getPhysical( catalogTable.namespaceId ).addPhysicalEntity( - store.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ) ); } } catch ( GenericCatalogException | UnknownColumnException | UnknownCollationException e ) { @@ -2242,6 +2234,49 @@ public void createTable( long namespaceId, String name, List f } + @Override + public void createTable( long namespaceId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) { + String adjustedName = adjustNameIfNeeded( name, namespaceId ); + + if ( stores == null ) { + // Ask router on which store(s) the table should be placed + stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); + } + + // addLTable + LogicalTable logical = catalog.getLogicalRel( namespaceId ).addTable( + adjustedName, + EntityType.ENTITY, + true ); + + // addLColumns + + List ids = new ArrayList<>(); + for ( FieldInformation information : fields ) { + ids.add( addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, logical.id, information.position, stores, placementType ) ); + } + catalog.updateSnapshot(); + + // addATable + for ( DataStore store : stores ) { + AllocationTable alloc = catalog.getAllocRel( namespaceId ).createAlloctionTable( store.getAdapterId(), logical.id ); + int i = 0; + for ( Long id : ids ) { + alloc = catalog.getAllocRel( namespaceId ).addColumnPlacement( alloc.id, id, PlacementType.AUTOMATIC, i ); + i++; + } + + catalog.getPhysical( namespaceId ).addEntities( store.createPhysicalTable( statement.getPrepareContext(), logical, alloc ) ); + } + + catalog.updateSnapshot(); + + // addPTable + // via update? + + } + + @Override public void createCollection( long namespaceId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { name = adjustNameIfNeeded( name, namespaceId ); @@ -2269,7 +2304,7 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists true ); // Initially create DataPlacement containers on every store the table should be placed. - LogicalCollection catalogCollection = catalog.getSnapshot().getDocSnapshot( namespaceId ).getCollection( collectionId ); + LogicalCollection catalogCollection = catalog.getSnapshot().doc().getCollection( collectionId ); // Trigger rebuild of schema; triggers schema creation on adapters Catalog.getInstance().getSnapshot(); @@ -2291,7 +2326,7 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists private boolean assertEntityExists( long namespaceId, String name, boolean ifNotExists ) throws EntityAlreadyExistsException { // Check if there is already an entity with this name - if ( catalog.getSnapshot().getRelSnapshot( namespaceId ).checkIfExistsEntity( name ) ) { + if ( catalog.getSnapshot().rel().checkIfExistsEntity( name ) ) { if ( ifNotExists ) { // It is ok that there is already a table with this name because "IF NOT EXISTS" was specified return true; @@ -2307,7 +2342,7 @@ private boolean assertEntityExists( long namespaceId, String name, boolean ifNot public void dropCollection( LogicalCollection catalogCollection, Statement statement ) { AdapterManager manager = AdapterManager.getInstance(); - for ( CatalogCollectionPlacement placement : statement.getTransaction().getSnapshot().getAllocSnapshot().getCollectionPlacements( catalogCollection.id ) ) { + for ( CatalogCollectionPlacement placement : statement.getTransaction().getSnapshot().alloc().getCollectionPlacements( catalogCollection.id ) ) { DataStore store = (DataStore) manager.getAdapter( placement.adapterId ); store.dropCollection( statement.getPrepareContext(), catalogCollection ); @@ -2319,7 +2354,7 @@ public void dropCollection( LogicalCollection catalogCollection, Statement state public void removeDocumentLogistics( LogicalCollection catalogCollection, Statement statement ) { // CatalogCollectionMapping mapping = catalog.getAllocDoc( catalogCollection.namespaceId ).getCollectionMapping( catalogCollection.id ); - // LogicalTable table = catalog.getSnapshot().getRelSnapshot( catalogCollection.namespaceId ).getTable( mapping.collectionId ); + // LogicalTable table = catalog.getSnapshot().rel().getTable( mapping.collectionId ); // catalog.getLogicalRel( catalogCollection.namespaceId ).deleteTable( table.id ); } @@ -2330,7 +2365,7 @@ public void addCollectionPlacement( long namespaceId, String name, List column @Override public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { Snapshot snapshot = statement.getTransaction().getSnapshot(); - LogicalColumn logicalColumn = snapshot.getRelSnapshot( partitionInfo.table.namespaceId ).getColumn( partitionInfo.table.id, partitionInfo.columnName ); + LogicalColumn logicalColumn = snapshot.rel().getColumn( partitionInfo.table.id, partitionInfo.columnName ); PartitionType actualPartitionType = PartitionType.getByName( partitionInfo.typeName ); @@ -2540,7 +2575,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List List partitionIds = new ArrayList<>(); //get All PartitionGroups and then get all partitionIds for each PG and add them to completeList of partitionIds //catalog.getLogicalRel( catalogTable.namespaceId ).getPartitionGroups( partitionInfo.table.id ).forEach( pg -> partitionIds.forEach( p -> partitionIds.add( p ) ) ); - partitionGroupIds.forEach( pg -> catalog.getSnapshot().getAllocSnapshot().getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); + partitionGroupIds.forEach( pg -> catalog.getSnapshot().alloc().getPartitions( pg ).forEach( p -> partitionIds.add( p.id ) ) ); PartitionProperty partitionProperty; if ( actualPartitionType == PartitionType.TEMPERATURE ) { @@ -2572,7 +2607,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // -1 because one partition is already created in COLD List partitionsForHot = new ArrayList<>(); - catalog.getSnapshot().getAllocSnapshot().getPartitions( partitionGroupIds.get( 0 ) ).forEach( p -> partitionsForHot.add( p.id ) ); + catalog.getSnapshot().alloc().getPartitions( partitionGroupIds.get( 0 ) ).forEach( p -> partitionsForHot.add( p.id ) ); // -1 because one partition is already created in HOT for ( int i = 0; i < numberOfPartitionsInHot - 1; i++ ) { @@ -2586,7 +2621,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // -1 because one partition is already created in COLD List partitionsForCold = new ArrayList<>(); - catalog.getSnapshot().getAllocSnapshot().getPartitions( partitionGroupIds.get( 1 ) ).forEach( p -> partitionsForCold.add( p.id ) ); + catalog.getSnapshot().alloc().getPartitions( partitionGroupIds.get( 1 ) ).forEach( p -> partitionsForCold.add( p.id ) ); for ( int i = 0; i < numberOfPartitionsInCold - 1; i++ ) { long tempId; @@ -2630,7 +2665,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Get primary key of table and use PK to find all DataPlacements of table long pkid = partitionInfo.table.primaryKey; - LogicalRelSnapshot relSnapshot = catalog.getSnapshot().getRelSnapshot( partitionInfo.table.namespaceId ); + LogicalRelSnapshot relSnapshot = catalog.getSnapshot().rel(); List pkColumnIds = relSnapshot.getPrimaryKey( pkid ).columnIds; // Basically get first part of PK even if its compound of PK it is sufficient LogicalColumn pkColumn = relSnapshot.getColumn( pkColumnIds.get( 0 ) ); @@ -2641,7 +2676,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List stores = new ArrayList<>(); fillStores = true; } - List catalogColumnPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); + List catalogColumnPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { if ( fillStores ) { // Ask router on which store(s) the table should be placed @@ -2673,7 +2708,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Get only columns that are actually on that store // Every store of a newly partitioned table, initially will hold all partitions List necessaryColumns = new LinkedList<>(); - catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( relSnapshot.getColumn( cp.columnId ) ) ); + catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), partitionedTable.id ).forEach( cp -> necessaryColumns.add( relSnapshot.getColumn( cp.columnId ) ) ); // Copy data from the old partition to new partitions dataMigrator.copyPartitionData( @@ -2682,8 +2717,8 @@ public void addPartitioning( PartitionInformation partitionInfo, List unPartitionedTable, partitionedTable, necessaryColumns, - snapshot.getAllocSnapshot().getPartitionProperty( unPartitionedTable.id ).partitionIds, - snapshot.getAllocSnapshot().getPartitionProperty( partitionedTable.id ).partitionIds ); + snapshot.alloc().getPartitionProperty( unPartitionedTable.id ).partitionIds, + snapshot.alloc().getPartitionProperty( partitionedTable.id ).partitionIds ); } // Adjust indexes @@ -2691,7 +2726,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List for ( CatalogIndex index : indexes ) { // Remove old index DataStore ds = ((DataStore) AdapterManager.getInstance().getAdapter( index.location )); - ds.dropIndex( statement.getPrepareContext(), index, snapshot.getAllocSnapshot().getPartitionProperty( unPartitionedTable.id ).partitionIds ); + ds.dropIndex( statement.getPrepareContext(), index, snapshot.alloc().getPartitionProperty( unPartitionedTable.id ).partitionIds ); catalog.getLogicalRel( partitionInfo.table.namespaceId ).deleteIndex( index.id ); // Add new index long newIndexId = catalog.getLogicalRel( partitionInfo.table.namespaceId ).addIndex( @@ -2709,13 +2744,13 @@ public void addPartitioning( PartitionInformation partitionInfo, List ds.addIndex( statement.getPrepareContext(), relSnapshot.getIndex( newIndexId ), - catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); + catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); } } // Remove old tables - stores.forEach( store -> store.dropTable( statement.getPrepareContext(), unPartitionedTable, snapshot.getAllocSnapshot().getPartitionProperty( unPartitionedTable.id ).partitionIds ) ); - catalog.getAllocRel( partitionInfo.table.namespaceId ).deletePartitionGroup( unPartitionedTable.id, unPartitionedTable.namespaceId, snapshot.getAllocSnapshot().getPartitionProperty( unPartitionedTable.id ).partitionIds.get( 0 ) ); + stores.forEach( store -> store.dropTable( statement.getPrepareContext(), unPartitionedTable, snapshot.alloc().getPartitionProperty( unPartitionedTable.id ).partitionIds ) ); + catalog.getAllocRel( partitionInfo.table.namespaceId ).deletePartitionGroup( unPartitionedTable.id, unPartitionedTable.namespaceId, snapshot.alloc().getPartitionProperty( unPartitionedTable.id ).partitionIds.get( 0 ) ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -2732,9 +2767,9 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme partitionedTable.name, partitionedTable.id, snapshot.getNamespace( partitionedTable.namespaceId ) ); } - LogicalRelSnapshot relSnapshot = catalog.getSnapshot().getRelSnapshot( partitionedTable.namespaceId ); + LogicalRelSnapshot relSnapshot = catalog.getSnapshot().rel(); - PartitionProperty partition = snapshot.getAllocSnapshot().getPartitionProperty( partitionedTable.id ); + PartitionProperty partition = snapshot.alloc().getPartitionProperty( partitionedTable.id ); // Need to gather the partitionDistribution before actually merging // We need a columnPlacement for every partition @@ -2757,7 +2792,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme LogicalColumn pkColumn = relSnapshot.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) - List catalogColumnPlacements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumn.id ); + List catalogColumnPlacements = catalog.getSnapshot().alloc().getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { // Ask router on which store(s) the table should be placed Adapter adapter = AdapterManager.getInstance().getAdapter( ccp.adapterId ); @@ -2770,7 +2805,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // For merge create only full placements on the used stores. Otherwise partition constraints might not hold for ( DataStore store : stores ) { - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( mergedTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( mergedTable.id ); // Need to create partitionPlacements first in order to trigger schema creation on PolySchemaBuilder catalog.getAllocRel( partitionedTable.namespaceId ).addPartitionPlacement( mergedTable.namespaceId, @@ -2785,13 +2820,13 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); - catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( relSnapshot.getColumn( cp.columnId ) ) ); + catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), mergedTable.id ).forEach( cp -> necessaryColumns.add( relSnapshot.getColumn( cp.columnId ) ) ); // TODO @HENNLO Check if this can be omitted catalog.getAllocRel( partitionedTable.namespaceId ).updateDataPlacement( store.getAdapterId(), mergedTable.id, - catalog.getSnapshot().getAllocSnapshot().getDataPlacement( store.getAdapterId(), mergedTable.id ).columnPlacementsOnAdapter, + catalog.getSnapshot().alloc().getDataPlacement( store.getAdapterId(), mergedTable.id ).columnPlacementsOnAdapter, property.partitionIds ); // @@ -2810,7 +2845,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme for ( CatalogIndex index : indexes ) { // Remove old index DataStore ds = (DataStore) AdapterManager.getInstance().getAdapter( index.location ); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( partitionedTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( partitionedTable.id ); ds.dropIndex( statement.getPrepareContext(), index, property.partitionIds ); catalog.getLogicalRel( partitionedTable.namespaceId ).deleteIndex( index.id ); // Add new index @@ -2829,14 +2864,14 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme ds.addIndex( statement.getPrepareContext(), relSnapshot.getIndex( newIndexId ), - catalog.getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); + catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); } } // Needs to be separated from loop above. Otherwise we loose data for ( DataStore store : stores ) { List partitionIdsOnStore = new ArrayList<>(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( mergedTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( mergedTable.id ); // Otherwise everything will be dropped again, leaving the table inaccessible partitionIdsOnStore.remove( property.partitionIds.get( 0 ) ); @@ -2845,7 +2880,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme } // Loop over **old.partitionIds** to delete all partitions which are part of table // Needs to be done separately because partitionPlacements will be recursively dropped in `deletePartitionGroup` but are needed in dropTable - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( partitionedTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( partitionedTable.id ); for ( long partitionGroupId : property.partitionGroupIds ) { catalog.getAllocRel( partitionedTable.namespaceId ).deletePartitionGroup( tableId, partitionedTable.namespaceId, partitionGroupId ); } @@ -2855,8 +2890,8 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme } - private void addColumn( long namespaceId, String columnName, ColumnTypeInformation typeInformation, Collation collation, String defaultValue, long tableId, int position, List stores, PlacementType placementType ) throws GenericCatalogException, UnknownCollationException, UnknownColumnException { - columnName = adjustNameIfNeeded( columnName, catalog.getSnapshot().getRelSnapshot( namespaceId ).getTable( tableId ).namespaceId ); + private long addColumn( long namespaceId, String columnName, ColumnTypeInformation typeInformation, Collation collation, String defaultValue, long tableId, int position, List stores, PlacementType placementType ) throws GenericCatalogException, UnknownCollationException, UnknownColumnException { + columnName = adjustNameIfNeeded( columnName, namespaceId ); long addedColumnId = catalog.getLogicalRel( namespaceId ).addColumn( columnName, tableId, @@ -2874,15 +2909,16 @@ private void addColumn( long namespaceId, String columnName, ColumnTypeInformati // Add default value addDefaultValue( namespaceId, defaultValue, addedColumnId ); - for ( DataStore s : stores ) { - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( s.getAdapterId(), tableId ); + /*for ( DataStore s : stores ) { + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( s.getAdapterId(), tableId ); catalog.getAllocRel( namespaceId ).addColumnPlacement( allocation.id, addedColumnId, placementType, null, null, null, position ); - } + }*/ + return addedColumnId; } @@ -2890,7 +2926,7 @@ private void addColumn( long namespaceId, String columnName, ColumnTypeInformati public void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( namespaceId ).getColumn( tableId, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( tableId, columnName ); columnIds.add( logicalColumn.id ); } if ( constraintType == ConstraintType.PRIMARY ) { @@ -2905,21 +2941,21 @@ public void addConstraint( long namespaceId, String constraintName, ConstraintTy @Override - public void dropNamespace( String schemaName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException { - schemaName = schemaName.toLowerCase(); + public void dropNamespace( String namespaceName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException { + namespaceName = namespaceName.toLowerCase(); // Check if there is a schema with this name - if ( catalog.getSnapshot().checkIfExistsNamespace( schemaName ) ) { - LogicalNamespace logicalNamespace = catalog.getSnapshot().getNamespace( schemaName ); + if ( catalog.getSnapshot().checkIfExistsNamespace( namespaceName ) ) { + LogicalNamespace logicalNamespace = catalog.getSnapshot().getNamespace( namespaceName ); // Drop all collections in this namespace - List collections = catalog.getSnapshot().getDocSnapshot( logicalNamespace.id ).getCollections( null ); + List collections = catalog.getSnapshot().doc().getLogicalCollections( logicalNamespace.id, null ); for ( LogicalCollection collection : collections ) { dropCollection( collection, statement ); } // Drop all tables in this schema - List catalogEntities = catalog.getSnapshot().getRelSnapshot( logicalNamespace.id ).getTables( , null ); + List catalogEntities = catalog.getSnapshot().rel().getTables( Pattern.of( namespaceName ), null ); for ( LogicalTable catalogTable : catalogEntities ) { dropTable( catalogTable, statement ); } @@ -2953,7 +2989,7 @@ public void dropView( LogicalTable catalogView, Statement statement ) throws Ddl // Delete columns - for ( LogicalColumn column : snapshot.getRelSnapshot( catalogView.namespaceId ).getColumns( catalogView.id ) ) { + for ( LogicalColumn column : snapshot.rel().getColumns( catalogView.id ) ) { catalog.getLogicalRel( catalogView.namespaceId ).deleteColumn( column.id ); } @@ -2987,8 +3023,7 @@ public void dropMaterializedView( LogicalTable materializedView, Statement state } - @Override - public void dropTable( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException { + public void dropTableOld( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException { Snapshot snapshot = catalog.getSnapshot(); // Make sure that this is a table of type TABLE (and not SOURCE) //checkIfDdlPossible( catalogEntity.tableType ); @@ -2998,7 +3033,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Check if there are foreign keys referencing this table List selfRefsToDelete = new LinkedList<>(); - LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( catalogTable.namespaceId ); + LogicalRelSnapshot relSnapshot = snapshot.rel(); List exportedKeys = relSnapshot.getExportedKeys( catalogTable.id ); if ( exportedKeys.size() > 0 ) { for ( CatalogForeignKey foreignKey : exportedKeys ) { @@ -3012,7 +3047,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D } // Make sure that all adapters are of type store (and not source) - List placements = snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ); + List placements = snapshot.alloc().getDataPlacements( catalogTable.id ); for ( CatalogDataPlacement placement : placements ) { getDataStoreInstance( placement.adapterId ); } @@ -3027,7 +3062,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D AdapterManager.getInstance().getStore( index.location ).dropIndex( statement.getPrepareContext(), index, - snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); + snapshot.alloc().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } // Delete index in catalog catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); @@ -3035,19 +3070,19 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D // Delete data from the stores and remove the column placement catalog.getLogicalRel( catalogTable.namespaceId ).flagTableForDeletion( catalogTable.id, true ); - List p = snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ); + List p = snapshot.alloc().getDataPlacements( catalogTable.id ); List columns; for ( CatalogDataPlacement placement : p ) { // Delete table on store List partitionIdsOnStore = new ArrayList<>(); - snapshot.getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( placement.adapterId, catalogTable.id ).forEach( pl -> partitionIdsOnStore.add( pl.partitionId ) ); + snapshot.alloc().getPartitionPlacementsByTableOnAdapter( placement.adapterId, catalogTable.id ).forEach( pl -> partitionIdsOnStore.add( pl.partitionId ) ); AdapterManager.getInstance().getStore( placement.adapterId ).dropTable( statement.getPrepareContext(), catalogTable, partitionIdsOnStore ); // Delete column placement in catalog - columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + columns = snapshot.rel().getColumns( catalogTable.id ); for ( LogicalColumn column : columns ) { - if ( catalog.getSnapshot().getAllocSnapshot().checkIfExistsColumnPlacement( placement.adapterId, column.id ) ) { - AllocationEntity allocation = catalog.getSnapshot().getAllocSnapshot().getAllocation( placement.getAdapterId(), catalogTable.id ); + if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( placement.adapterId, column.id ) ) { + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( placement.getAdapterId(), catalogTable.id ); catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, column.id, false ); } } @@ -3089,7 +3124,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D } // Delete columns - columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + columns = snapshot.rel().getColumns( catalogTable.id ); for ( LogicalColumn column : columns ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteColumn( column.id ); } @@ -3108,6 +3143,29 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D } + @Override + public void dropTable( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException { + // Make sure that all adapters are of type store (and not source) + Snapshot snapshot = catalog.getSnapshot(); + + // delete all allocs and physicals + List allocations = snapshot.alloc().getAllocationsFromLogical( catalogTable.id ); + for ( AllocationEntity allocation : allocations ) { + for ( PhysicalEntity physical : snapshot.physical().fromAlloc( allocation.id ) ) { + catalog.getPhysical( catalogTable.namespaceId ).deleteEntity( physical.id ); + } + catalog.getAllocRel( allocation.namespaceId ).deleteAllocation( allocation.id ); + } + + // delete logical + + catalog.getLogicalRel( catalogTable.namespaceId ).deleteTable( catalogTable.id ); + + catalog.updateSnapshot(); + + } + + @Override public void truncate( LogicalTable catalogTable, Statement statement ) { // Make sure that the table can be modified @@ -3119,7 +3177,7 @@ public void truncate( LogicalTable catalogTable, Statement statement ) { prepareMonitoring( statement, Kind.TRUNCATE, catalogTable ); // Execute truncate on all placements - List placements = statement.getTransaction().getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ); + List placements = statement.getTransaction().getSnapshot().alloc().getDataPlacements( catalogTable.id ); placements.forEach( placement -> { AdapterManager.getInstance().getAdapter( placement.adapterId ).truncate( statement.getPrepareContext(), catalogTable ); } ); diff --git a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java index 1df636ba97..6fc8aa4176 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java @@ -44,8 +44,8 @@ public abstract class AbstractPartitionManager implements PartitionManager { @Override public boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ) { // Check for the specified columnId if we still have a ColumnPlacement for every partitionGroup - for ( Long partitionGroupId : Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).partitionGroupIds ) { - List ccps = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); + for ( Long partitionGroupId : Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).partitionGroupIds ) { + List ccps = catalog.getSnapshot().alloc().getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); if ( ccps.size() <= threshold ) { for ( CatalogColumnPlacement placement : ccps ) { if ( placement.adapterId == storeId ) { @@ -66,11 +66,11 @@ public Map> getRelevantPlacements( LogicalTab if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { - CatalogPartition catalogPartition = catalog.getSnapshot().getAllocSnapshot().getPartition( partitionId ); + CatalogPartition catalogPartition = catalog.getSnapshot().alloc().getPartition( partitionId ); List relevantCcps = new ArrayList<>(); - for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { - List ccps = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, column.id ); + for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( catalogTable.id ) ) { + List ccps = catalog.getSnapshot().alloc().getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, column.id ); ccps.removeIf( ccp -> excludedAdapters.contains( ccp.adapterId ) ); if ( !ccps.isEmpty() ) { // Get first column placement which contains partition @@ -131,13 +131,13 @@ public Map>> getAllPlacements( Logi Map>> adapterPlacements = new HashMap<>(); // adapterId -> partitionId ; placements if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { - List adapters = catalog.getSnapshot().getAllocSnapshot().getAdaptersByPartitionGroup( catalogTable.id, partitionId ); + List adapters = catalog.getSnapshot().alloc().getAdaptersByPartitionGroup( catalogTable.id, partitionId ); for ( CatalogAdapter adapter : adapters ) { if ( !adapterPlacements.containsKey( adapter.id ) ) { adapterPlacements.put( adapter.id, new HashMap<>() ); } - List placements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapter.id, catalogTable.id ); + List placements = catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapter.id, catalogTable.id ); adapterPlacements.get( adapter.id ).put( partitionId, placements ); } } diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index e42ece4ca8..f5d3d5a6d3 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -127,7 +127,7 @@ private void processAllPeriodicTables() { List periodicTables = catalog.getSnapshot().getTablesForPeriodicProcessing(); // Retrieve all Tables which rely on periodic processing for ( LogicalTable table : periodicTables ) { - if ( catalog.getSnapshot().getAllocSnapshot().getPartitionProperty( table.id ).partitionType == PartitionType.TEMPERATURE ) { + if ( catalog.getSnapshot().alloc().getPartitionProperty( table.id ).partitionType == PartitionType.TEMPERATURE ) { determinePartitionFrequency( table, invocationTimestamp ); } } @@ -159,7 +159,7 @@ private void determinePartitionDistribution( LogicalTable table ) { log.debug( "Determine access frequency of partitions of table: {}", table.name ); } - PartitionProperty property = catalog.getSnapshot().getAllocSnapshot().getPartitionProperty( table.id ); + PartitionProperty property = catalog.getSnapshot().alloc().getPartitionProperty( table.id ); // Get percentage of tables which can remain in HOT long numberOfPartitionsInHot = (property.partitionIds.size() * ((TemperaturePartitionProperty) property).getHotAccessPercentageIn()) / 100; @@ -220,7 +220,7 @@ private void determinePartitionDistribution( LogicalTable table ) { // Which of those are currently in cold --> action needed - List currentHotPartitions = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitions( ((TemperaturePartitionProperty) property).getHotPartitionGroupId() ); + List currentHotPartitions = Catalog.getInstance().getSnapshot().alloc().getPartitions( ((TemperaturePartitionProperty) property).getHotPartitionGroupId() ); for ( CatalogPartition catalogPartition : currentHotPartitions ) { // Remove partitions from List if they are already in HOT (not necessary to send to DataMigrator) @@ -270,10 +270,10 @@ private void redistributePartitions( LogicalTable table, List partitionsFr DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); Snapshot snapshot = transaction.getSnapshot(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); - List adaptersWithHot = snapshot.getAllocSnapshot().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) property).getHotPartitionGroupId() ); - List adaptersWithCold = snapshot.getAllocSnapshot().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) property).getColdPartitionGroupId() ); + List adaptersWithHot = snapshot.alloc().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) property).getHotPartitionGroupId() ); + List adaptersWithCold = snapshot.alloc().getAdaptersByPartitionGroup( table.id, ((TemperaturePartitionProperty) property).getColdPartitionGroupId() ); log.debug( "Get adapters to create physical tables" ); // Validate that partition does not already exist on store @@ -336,7 +336,7 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT // If this store contains both Groups HOT {@literal &} COLD do nothing if ( hotPartitionsToCreate.size() != 0 ) { - Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); + Catalog.getInstance().getSnapshot().alloc().getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); for ( long partitionId : hotPartitionsToCreate ) { catalog.getAllocRel( table.namespaceId ).addPartitionPlacement( @@ -351,7 +351,7 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT store.createPhysicalTable( statement.getPrepareContext(), table, null ); List logicalColumns = new ArrayList<>(); - catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumn( cp.columnId ) ) ); + catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getSnapshot().rel().getColumn( cp.columnId ) ) ); dataMigrator.copyData( statement.getTransaction(), @@ -386,7 +386,7 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT */ private List filterList( long namespaceId, long adapterId, long tableId, List partitionsToFilter ) { // Remove partition from list if it's already contained on the store - for ( long partitionId : Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionsOnDataPlacement( adapterId, tableId ) ) { + for ( long partitionId : Catalog.getInstance().getSnapshot().alloc().getPartitionsOnDataPlacement( adapterId, tableId ) ) { partitionsToFilter.remove( partitionId ); } return partitionsToFilter; @@ -403,7 +403,7 @@ private List filterList( long namespaceId, long adapterId, long tableId, L @Override public void determinePartitionFrequency( LogicalTable table, long invocationTimestamp ) { Snapshot snapshot = catalog.getSnapshot(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); Timestamp queryStart = new Timestamp( invocationTimestamp - ((TemperaturePartitionProperty) property).getFrequencyInterval() * 1000 ); accessCounter = new HashMap<>(); diff --git a/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java index 4bbefc407c..449399e4f1 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/HashPartitionManager.java @@ -45,7 +45,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue hashValue *= -1; } - PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( catalogTable.id ); // Get designated HASH partition based on number of internal partitions int partitionIndex = (int) (hashValue % property.partitionIds.size()); diff --git a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java index d47027c26a..61bcdecd51 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/ListPartitionManager.java @@ -45,7 +45,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue long selectedPartitionId = -1; // Process all accumulated CatalogPartitions - for ( CatalogPartition catalogPartition : Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionsByTable( catalogTable.id ) ) { + for ( CatalogPartition catalogPartition : Catalog.getInstance().getSnapshot().alloc().getPartitionsByTable( catalogTable.id ) ) { if ( catalogPartition.isUnbound ) { unboundPartitionId = catalogPartition.id; break; diff --git a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java index 283866c544..3acc5c2c6c 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/RangePartitionManager.java @@ -47,7 +47,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue long selectedPartitionId = -1; // Process all accumulated CatalogPartitions - for ( CatalogPartition catalogPartition : Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionsByTable( catalogTable.id ) ) { + for ( CatalogPartition catalogPartition : Catalog.getInstance().getSnapshot().alloc().getPartitionsByTable( catalogTable.id ) ) { if ( unboundPartitionId == -1 && catalogPartition.isUnbound ) { unboundPartitionId = catalogPartition.id; break; diff --git a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java index 73465a4e9a..704d67315d 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java @@ -43,7 +43,7 @@ public class TemperatureAwarePartitionManager extends AbstractPartitionManager { public long getTargetPartitionId( LogicalTable catalogTable, String columnValue ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( ((TemperaturePartitionProperty) property).getInternalPartitionFunction() ); @@ -56,7 +56,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( ((TemperaturePartitionProperty) property).getInternalPartitionFunction() ); @@ -69,7 +69,7 @@ public Map> getRelevantPlacements( LogicalTab public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( ((TemperaturePartitionProperty) property).getInternalPartitionFunction() ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index acea08eddf..4eeb00b650 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -1327,7 +1327,7 @@ private Map> getAccessedPartitionsPerScan( AlgNode alg, Map< "TableID: {} is partitioned on column: {} - {}", catalogTable.id, catalogTable.partitionProperty.partitionColumnId, - Catalog.getInstance().getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); + Catalog.getInstance().getSnapshot().rel().getColumn( catalogTable.partitionProperty.partitionColumnId ).name ); } List identifiedPartitions = new ArrayList<>(); diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index ce1d0d41fd..382a2255f9 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -202,7 +202,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final List foreignKeys; final List exportedKeys; table = root.getEntity().unwrap( LogicalTable.class ); - LogicalRelSnapshot snapshot = statement.getTransaction().getSnapshot().getRelSnapshot( table.namespaceId ); + LogicalRelSnapshot snapshot = statement.getTransaction().getSnapshot().rel(); primaryKey = snapshot.getPrimaryKey( table.primaryKey ); constraints = new ArrayList<>( snapshot.getConstraints( table.id ) ); foreignKeys = snapshot.getForeignKeys( table.id ); @@ -330,7 +330,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final RexBuilder rexBuilder = root.getCluster().getRexBuilder(); for ( final CatalogForeignKey foreignKey : foreignKeys ) { - final LogicalTable entity = statement.getDataContext().getSnapshot().getRelSnapshot( foreignKey.getNamespaceId() ).getTable( foreignKey.referencedKeyTableId ); + final LogicalTable entity = statement.getDataContext().getSnapshot().rel().getTable( foreignKey.referencedKeyTableId ); final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), entity ); RexNode joinCondition = rexBuilder.makeLiteral( true ); builder.push( input ); @@ -650,7 +650,7 @@ private boolean testConstraintsValid() { .getSnapshot() .getNamespaces( null ) .stream() - .flatMap( n -> Catalog.getInstance().getSnapshot().getRelSnapshot( n.id ).getTables( , null ).stream() ) + .flatMap( n -> Catalog.getInstance().getSnapshot().rel().getTables( n.id, null ).stream() ) .filter( t -> t.entityType == EntityType.ENTITY && t.getNamespaceType() == NamespaceType.RELATIONAL ) .collect( Collectors.toList() ); Transaction transaction = this.manager.startTransaction( Catalog.defaultUserId, false, "ConstraintEnforcement" ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 35ec13aa33..338c02003d 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -168,14 +168,14 @@ private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGra @Override public void copyData( Transaction transaction, CatalogAdapter store, List columns, List partitionIds ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( columns.get( 0 ).namespaceId ); + LogicalRelSnapshot relSnapshot = snapshot.rel(); LogicalTable table = relSnapshot.getTable( columns.get( 0 ).tableId ); CatalogPrimaryKey primaryKey = relSnapshot.getPrimaryKey( table.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( Catalog.getInstance().getSnapshot().alloc().getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); @@ -190,7 +190,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List> placementDistribution = new HashMap<>(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); if ( property.isPartitioned ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); @@ -209,7 +209,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List selectColumnList, AlgRoot sourceAl @Override public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { - PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalSnapshot().getPhysicalTable( partitionId ); + PhysicalTable physical = statement.getTransaction().getSnapshot().physical().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); AlgOptCluster cluster = AlgOptCluster.create( @@ -332,7 +332,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : to ) { - LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( ccp.namespaceId ).getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } @@ -356,7 +356,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { - PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalSnapshot().getPhysicalTable( partitionId ); + PhysicalTable physical = statement.getTransaction().getSnapshot().physical().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); AlgOptCluster cluster = AlgOptCluster.create( @@ -371,7 +371,7 @@ public AlgRoot buildInsertStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); for ( CatalogColumnPlacement ccp : placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( ccp.namespaceId ).getColumn( ccp.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); } @@ -393,7 +393,7 @@ public AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ) { - PhysicalTable physical = statement.getTransaction().getSnapshot().getPhysicalSnapshot().getPhysicalTable( partitionId ); + PhysicalTable physical = statement.getTransaction().getSnapshot().physical().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); AlgOptCluster cluster = AlgOptCluster.create( @@ -406,11 +406,11 @@ private AlgRoot buildUpdateStatement( Statement statement, List selectSourcePlacements( LogicalTable Snapshot snapshot = catalog.getSnapshot(); long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : snapshot.getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : snapshot.alloc().getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getKey() != excludingAdapterId && entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -486,12 +486,12 @@ public static List selectSourcePlacements( LogicalTable // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); - for ( LogicalColumn column : snapshot.getRelSnapshot( table.namespaceId ).getColumns( table.id ) ) { + for ( LogicalColumn column : snapshot.rel().getColumns( table.id ) ) { if ( columnIds.contains( column.id ) ) { - if ( snapshot.getAllocSnapshot().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( snapshot.getAllocSnapshot().getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); + if ( snapshot.alloc().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { + placementList.add( snapshot.alloc().getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); } else { - for ( CatalogColumnPlacement placement : snapshot.getAllocSnapshot().getColumnPlacements( column.id ) ) { + for ( CatalogColumnPlacement placement : snapshot.alloc().getColumnPlacements( column.id ) ) { if ( placement.adapterId != excludingAdapterId ) { placementList.add( placement ); break; @@ -519,8 +519,8 @@ public static List selectSourcePlacements( LogicalTable */ @Override public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { - CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); - AllocSnapshot snapshot = Catalog.getInstance().getSnapshot().getAllocSnapshot(); + CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getSnapshot().rel().getPrimaryKey( sourceTable.primaryKey ); + AllocSnapshot snapshot = Catalog.getInstance().getSnapshot().alloc(); // Check Lists List targetColumnPlacements = new LinkedList<>(); @@ -532,7 +532,7 @@ public void copySelectiveData( Transaction transaction, CatalogAdapter store, Lo // Add primary keys to select column list for ( long cid : sourcePrimaryKey.columnIds ) { - LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( sourceTable.namespaceId ).getColumn( cid ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } @@ -622,28 +622,28 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo throw new RuntimeException( "Unsupported migration scenario. Table ID mismatch" ); } Snapshot snapshot = Catalog.getInstance().getSnapshot(); - CatalogPrimaryKey primaryKey = snapshot.getRelSnapshot( sourceTable.namespaceId ).getPrimaryKey( sourceTable.primaryKey ); + CatalogPrimaryKey primaryKey = snapshot.rel().getPrimaryKey( sourceTable.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( snapshot.getAllocSnapshot().getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( snapshot.alloc().getColumnPlacement( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { - LogicalColumn logicalColumn = snapshot.getRelSnapshot( sourceTable.namespaceId ).getColumn( cid ); + LogicalColumn logicalColumn = snapshot.rel().getColumn( cid ); if ( !selectColumnList.contains( logicalColumn ) ) { selectColumnList.add( logicalColumn ); } } - PartitionProperty targetProperty = snapshot.getAllocSnapshot().getPartitionProperty( targetTable.id ); + PartitionProperty targetProperty = snapshot.alloc().getPartitionProperty( targetTable.id ); // Add partition columns to select column list long partitionColumnId = targetProperty.partitionColumnId; - LogicalColumn partitionColumn = snapshot.getRelSnapshot( sourceTable.namespaceId ).getColumn( partitionColumnId ); + LogicalColumn partitionColumn = snapshot.rel().getColumn( partitionColumnId ); if ( !selectColumnList.contains( partitionColumn ) ) { selectColumnList.add( partitionColumn ); } @@ -653,7 +653,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo //We need a columnPlacement for every partition Map> placementDistribution = new HashMap<>(); - PartitionProperty sourceProperty = snapshot.getAllocSnapshot().getPartitionProperty( sourceTable.id ); + PartitionProperty sourceProperty = snapshot.alloc().getPartitionProperty( sourceTable.id ); placementDistribution.put( sourceProperty.partitionIds.get( 0 ), selectSourcePlacements( sourceTable, selectColumnList, -1 ) ); Statement sourceStatement = transaction.createStatement(); @@ -667,7 +667,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo Map targetAlgs = new HashMap<>(); AlgRoot sourceAlg = getSourceIterator( sourceStatement, placementDistribution ); - if ( Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( store.id, sourceTable.id ).size() == columns.size() ) { + if ( Catalog.getInstance().getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( store.id, sourceTable.id ).size() == columns.size() ) { // There have been no placements for this table on this store before. Build insert statement targetPartitionIds.forEach( id -> targetAlgs.put( id, buildInsertStatement( targetStatements.get( id ), targetColumnPlacements, id ) ) ); } else { diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index e35514f608..c25961fd0e 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -98,20 +98,20 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P if ( proposedRoutingPlan.getPhysicalPlacementsOfPartitions() != null ) { proposedRoutingPlan.getPhysicalPlacementsOfPartitions().forEach( ( k, v ) -> { - CatalogPartition catalogPartition = snapshot.getAllocSnapshot().getPartition( k ); + CatalogPartition catalogPartition = snapshot.alloc().getPartition( k ); LogicalTable catalogTable = Catalog.getInstance().getSnapshot().getLogicalEntity( catalogPartition.tableId ).unwrap( LogicalTable.class ); - CatalogPartitionGroup catalogPartitionGroup = snapshot.getAllocSnapshot().getPartitionGroup( catalogPartition.partitionGroupId ); + CatalogPartitionGroup catalogPartitionGroup = snapshot.alloc().getPartitionGroup( catalogPartition.partitionGroupId ); v.forEach( p -> { - CatalogColumnPlacement catalogColumnPlacement = snapshot.getAllocSnapshot().getColumnPlacement( p.left, p.right ); - CatalogPartitionPlacement catalogPartitionPlacement = snapshot.getAllocSnapshot().getPartitionPlacement( p.left, k ); - LogicalColumn logicalColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogColumnPlacement.columnId ); + CatalogColumnPlacement catalogColumnPlacement = snapshot.alloc().getColumnPlacement( p.left, p.right ); + CatalogPartitionPlacement catalogPartitionPlacement = snapshot.alloc().getPartitionPlacement( p.left, k ); + LogicalColumn logicalColumn = snapshot.rel().getColumn( catalogColumnPlacement.columnId ); table.addRow( snapshot.getNamespace( catalogTable.namespaceId ) + "." + catalogTable.name, logicalColumn.name, catalogPartitionGroup.partitionGroupName + " --> " + catalogPartition.id, catalogPartitionPlacement.adapterUniqueName, - catalogColumnPlacement.physicalSchemaName + "." + catalogPartitionPlacement.physicalTableName + "." + catalogColumnPlacement.physicalColumnName ); + /*catalogColumnPlacement.physicalSchemaName + "." +*/ catalogPartitionPlacement.physicalTableName /*+ "." + catalogColumnPlacement.physicalColumnName */ ); } ); } ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 9b8f8e1c03..611dbe7e5f 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -224,12 +224,12 @@ protected List buildSelect( AlgNode node, List 1 ) { + if ( snapshot.alloc().getAllocationsFromLogical( catalogTable.id ).size() > 1 ) { return handleVerticalPartitioningOrReplication( node, catalogTable, statement, logicalTable, builders, cluster, queryInformation ); } return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 7fb833996d..2dfabb93eb 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -112,7 +112,7 @@ protected static Map> selectPlacement( Logica // Find the adapter with the most column placements long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : snapshot.getAllocSnapshot().getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : snapshot.alloc().getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -121,12 +121,12 @@ protected static Map> selectPlacement( Logica // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); - for ( LogicalColumn column : snapshot.getRelSnapshot( table.namespaceId ).getColumns( table.id ) ) { - placementList.add( snapshot.getAllocSnapshot().getColumnPlacements( column.id ).get( 0 ) ); + for ( LogicalColumn column : snapshot.rel().getColumns( table.id ) ) { + placementList.add( snapshot.alloc().getColumnPlacements( column.id ).get( 0 ) ); } return new HashMap<>() {{ - List allocs = snapshot.getAllocSnapshot().getAllocationsFromLogical( table.id ); + List allocs = snapshot.alloc().getAllocationsFromLogical( table.id ); put( allocs.get( 0 ).id, placementList ); }}; } @@ -171,7 +171,7 @@ public RoutedAlgBuilder handleScan( Statement statement, long allocId ) { - List physicals = snapshot.getPhysicalSnapshot().fromAlloc( allocId ); + List physicals = snapshot.physical().fromAlloc( allocId ); PhysicalEntity physical = physicals.get( 0 ); AlgNode node = builder.scan( physical ).build(); @@ -283,7 +283,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List // List ccps = placementsByAdapter.values().iterator().next(); // CatalogColumnPlacement ccp = ccps.get( 0 ); // CatalogPartitionPlacement cpp = catalog.getPartitionPlacement( ccp.adapterId, partitionId ); - partitionId = snapshot.getAllocSnapshot().getAllocation( partitionId, currentPlacements.get( 0 ).tableId ).id; + partitionId = snapshot.alloc().getAllocation( partitionId, currentPlacements.get( 0 ).tableId ).id; builder = handleScan( builder, @@ -296,7 +296,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List // We need to join placements on different adapters // Get primary key - LogicalRelSnapshot relSnapshot = snapshot.getRelSnapshot( currentPlacements.get( 0 ).namespaceId ); + LogicalRelSnapshot relSnapshot = snapshot.rel().namespaceId ); long pkid = relSnapshot.getTable( currentPlacements.get( 0 ).tableId ).primaryKey; List pkColumnIds = relSnapshot.getPrimaryKey( pkid ).columnIds; List pkColumns = new LinkedList<>(); @@ -307,7 +307,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List // Add primary key for ( Entry> entry : placementsByAdapter.entrySet() ) { for ( LogicalColumn pkColumn : pkColumns ) { - CatalogColumnPlacement pkPlacement = Catalog.getInstance().getSnapshot().getAllocSnapshot().getColumnPlacements( pkColumn.id ).get( 0 ); + CatalogColumnPlacement pkPlacement = Catalog.getInstance().getSnapshot().alloc().getColumnPlacements( pkColumn.id ).get( 0 ); if ( !entry.getValue().contains( pkPlacement ) ) { entry.getValue().add( pkPlacement ); } @@ -318,7 +318,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List boolean first = true; for ( List ccps : placementsByAdapter.values() ) { CatalogColumnPlacement ccp = ccps.get( 0 ); - CatalogPartitionPlacement cpp = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionPlacement( ccp.adapterId, partitionId ); + CatalogPartitionPlacement cpp = Catalog.getInstance().getSnapshot().alloc().getPartitionPlacement( ccp.adapterId, partitionId ); handleScan( builder, @@ -387,7 +387,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List private void buildFinalProject( RoutedAlgBuilder builder, AllocationTable entity ) { List rexNodes = new ArrayList<>(); /*List placementList = currentPlacements.stream() - .map( col -> snapshot.getRelSnapshot( currentPlacements.get( 0 ).namespaceId ).getColumn( col.columnId ) ) + .map( col -> snapshot.rel().namespaceId ).getColumn( col.columnId ) ) .sorted( Comparator.comparingInt( col -> col.position ) ) .collect( Collectors.toList() ); for ( LogicalColumn logicalColumn : placementList ) { @@ -417,13 +417,13 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab List scans = new ArrayList<>(); - List placements = snapshot.getAllocSnapshot().getGraphPlacements( catalogGraph.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); + List placements = snapshot.alloc().getGraphPlacements( catalogGraph.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); if ( placementId != null ) { placements = List.of( placementId ); } for ( long adapterId : placements ) { - PhysicalGraph graph = snapshot.getPhysicalSnapshot().getPhysicalGraph( catalogGraph.id, adapterId ); + PhysicalGraph graph = snapshot.physical().getPhysicalGraph( catalogGraph.id, adapterId ); if ( !(graph instanceof TranslatableEntity) ) { // needs substitution later on @@ -445,7 +445,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = snapshot.getRelSnapshot( namespace.id ).getTables( , null ); + List tables = snapshot.rel().getTables( namespace.id, null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, null ) ) ) .collect( Collectors.toList() ); @@ -459,7 +459,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace na private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List collections = snapshot.getDocSnapshot( namespace.id ).getCollections( null ); + List collections = snapshot.doc().getCollections( namespace.id, null ); List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); @@ -542,7 +542,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st List scans = new ArrayList<>(); - List placements = snapshot.getAllocSnapshot().getCollectionPlacements( collection.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); + List placements = snapshot.alloc().getCollectionPlacements( collection.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); if ( adapterId != null ) { placements = List.of( adapterId ); } @@ -559,7 +559,7 @@ protected RoutedAlgBuilder handleDocumentScan( DocumentScan alg, Statement st // CatalogCollectionPlacement placement = catalog.getAllocDoc( alg.entity ).getCollectionPlacement( collection.id, placementId ); // String namespaceName = PolySchemaBuilder.buildAdapterSchemaName( adapter.uniqueName, collection.getNamespaceName(), placement.physicalNamespaceName ); // String collectionName = collection.name + "_" + placement.id; - PhysicalTable collectionTable = snapshot.getPhysicalSnapshot().getPhysicalTable( collection.id, adapterId ); + PhysicalTable collectionTable = snapshot.physical().getPhysicalTable( collection.id, adapterId ); // we might previously have pushed the non-native transformer builder.clear(); return builder.push( LogicalDocumentScan.create( alg.getCluster(), collectionTable ) ); @@ -589,7 +589,7 @@ private RoutedAlgBuilder handleTransformerDocScan( DocumentScan alg, Statemen @NotNull private RoutedAlgBuilder handleDocumentOnRelational( DocumentScan node, Long adapterId, Statement statement, RoutedAlgBuilder builder ) { - List columns = statement.getTransaction().getSnapshot().getRelSnapshot( node.entity.namespaceId ).getColumns( node.entity.id ); + List columns = statement.getTransaction().getSnapshot().rel().getColumns( node.entity.id ); AlgTraitSet out = node.getTraitSet().replace( ModelTrait.RELATIONAL ); CatalogEntity subTable = getSubstitutionTable( statement, node.entity.id, columns.get( 0 ).id, adapterId ); builder.scan( subTable ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index ca3579f6c1..fc3c970afa 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -63,13 +63,13 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build if ( node instanceof LogicalRelScan && node.getEntity() != null ) { LogicalTable catalogTable = node.getEntity().unwrap( LogicalTable.class ); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); List partitionIds = property.partitionIds; Map> placement = new HashMap<>(); for ( long partition : partitionIds ) { if ( cachedPlan.physicalPlacementsOfPartitions.get( partition ) != null ) { List colPlacements = cachedPlan.physicalPlacementsOfPartitions.get( partition ).stream() - .map( placementInfo -> catalog.getSnapshot().getAllocSnapshot().getColumnPlacement( placementInfo.left, placementInfo.right ) ) + .map( placementInfo -> catalog.getSnapshot().alloc().getColumnPlacement( placementInfo.left, placementInfo.right ) ) .collect( Collectors.toList() ); placement.put( partition, colPlacements ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index c91ff143a8..5925f6ddf5 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -149,20 +149,20 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { } long pkid = catalogTable.primaryKey; - List pkColumnIds = snapshot.getRelSnapshot( modify.entity.namespaceId ).getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = snapshot.getRelSnapshot( modify.entity.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); + List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); // Essentially gets a list of all stores where this table resides - List pkPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); if ( property.isPartitioned && log.isDebugEnabled() ) { log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, property.partitionGroupIds ); for ( CatalogColumnPlacement dataPlacement : pkPlacements ) { log.debug( "\t\t -> '{}' {}\t{}", dataPlacement.adapterId, - snapshot.getAllocSnapshot().getPartitionGroupsOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ), - snapshot.getAllocSnapshot().getPartitionGroupsIndexOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ) ); + snapshot.alloc().getPartitionGroupsOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ), + snapshot.alloc().getPartitionGroupsIndexOnDataPlacement( dataPlacement.adapterId, dataPlacement.tableId ) ); } } @@ -181,12 +181,12 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { // Get placements on store - List placementsOnAdapter = snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); + List placementsOnAdapter = snapshot.alloc().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); // If this is an update, check whether we need to execute on this store at all List updateColumnList = modify.getUpdateColumnList(); List sourceExpressionList = modify.getSourceExpressionList(); - List columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = snapshot.rel().getColumns( catalogTable.id ); if ( placementsOnAdapter.size() != columns.size() ) { if ( modify.getOperation() == Modify.Operation.UPDATE ) { @@ -197,8 +197,8 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { while ( updateColumnListIterator.hasNext() ) { String columnName = updateColumnListIterator.next(); sourceExpressionListIterator.next(); - LogicalColumn logicalColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); - if ( !snapshot.getAllocSnapshot().checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { + LogicalColumn logicalColumn = snapshot.rel().getColumn( catalogTable.id, columnName ); + if ( !snapshot.alloc().checkIfExistsColumnPlacement( pkPlacement.adapterId, logicalColumn.id ) ) { updateColumnListIterator.remove(); sourceExpressionListIterator.remove(); } @@ -260,7 +260,7 @@ public AlgNode visit( LogicalFilter filter ) { int index = 0; for ( String cn : updateColumnList ) { - if ( snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, cn ).id == property.partitionColumnId ) { + if ( snapshot.rel().getColumn( catalogTable.id, cn ).id == property.partitionColumnId ) { if ( log.isDebugEnabled() ) { log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", property.partitionColumnId, index ); } @@ -370,7 +370,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { for ( Map.Entry>> partitionMapping : tuplesOnPartition.entrySet() ) { Long currentPartitionId = partitionMapping.getKey(); - if ( !snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( currentPartitionId ) ) { + if ( !snapshot.alloc().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( currentPartitionId ) ) { continue; } @@ -386,13 +386,13 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, currentPartitionId ), + snapshot.alloc().getPartitionPlacement( pkPlacement.adapterId, currentPartitionId ), statement, cluster, true, statement.getDataContext().getParameterValues() ).build(); - PhysicalTable physical = snapshot.getPhysicalSnapshot().getPhysicalTable( currentPartitionId ); + PhysicalTable physical = snapshot.physical().getPhysicalTable( currentPartitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML @@ -414,7 +414,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { } else if ( modify.getInput() instanceof LogicalProject && ((LogicalProject) modify.getInput()).getInput() instanceof LogicalValues ) { - String partitionColumnName = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( property.partitionColumnId ).name; + String partitionColumnName = snapshot.rel().getColumn( property.partitionColumnId ).name; List fieldNames = modify.getInput().getRowType().getFieldNames(); LogicalRelModify ltm = modify; @@ -443,7 +443,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { tempPartitionId = partitionManager.getTargetPartitionId( catalogTable, currentRow.get( partitionValueIndex ).toString() ); - if ( !snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( tempPartitionId ) ) { + if ( !snapshot.alloc().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( tempPartitionId ) ) { continue; } @@ -469,13 +469,13 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, entry.getKey() ), + snapshot.alloc().getPartitionPlacement( pkPlacement.adapterId, entry.getKey() ), statement, cluster, false, entry.getValue() ).build(); - PhysicalTable physical = snapshot.getPhysicalSnapshot().getPhysicalTable( entry.getKey() ); + PhysicalTable physical = snapshot.physical().getPhysicalTable( entry.getKey() ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); // Build DML @@ -515,8 +515,8 @@ else if ( identifiedPartitionForSetValue != -1 ) { } if ( log.isDebugEnabled() ) { - String partitionColumnName = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( property.partitionColumnId ).name; - String partitionName = snapshot.getAllocSnapshot().getPartitionGroup( identPart ).partitionGroupName; + String partitionColumnName = snapshot.rel().getColumn( property.partitionColumnId ).name; + String partitionName = snapshot.alloc().getPartitionGroup( identPart ).partitionGroupName; log.debug( "INSERT: partitionColumn-value: '{}' should be put on partition: {} ({}), which is partitioned with column {}", partitionValue, identPart, partitionName, partitionColumnName ); } @@ -552,11 +552,11 @@ else if ( identifiedPartitionForSetValue != -1 ) { for ( long partitionId : accessedPartitionList ) { - if ( !snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( partitionId ) ) { + if ( !snapshot.alloc().getPartitionsOnDataPlacement( pkPlacement.adapterId, catalogTable.id ).contains( partitionId ) ) { continue; } - PhysicalTable physical = snapshot.getPhysicalSnapshot().getPhysicalTable( partitionId ); + PhysicalTable physical = snapshot.physical().getPhysicalTable( partitionId ); // Build DML Modify adjustedModify; @@ -565,7 +565,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { RoutedAlgBuilder.create( statement, cluster ), catalogTable, placementsOnAdapter, - snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, partitionId ), + snapshot.alloc().getPartitionPlacement( pkPlacement.adapterId, partitionId ), statement, cluster, false, @@ -686,16 +686,16 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, List modifies = new ArrayList<>(); - List placements = snapshot.getAllocSnapshot().getCollectionPlacements( collection.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); + List placements = snapshot.alloc().getCollectionPlacements( collection.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ); if ( adapterId != null ) { placements = List.of( adapterId ); } for ( long placementId : placements ) { CatalogAdapter adapter = snapshot.getAdapter( placementId ); - CatalogCollectionPlacement placement = snapshot.getAllocSnapshot().getCollectionPlacement( collection.id, placementId ); + CatalogCollectionPlacement placement = snapshot.alloc().getCollectionPlacement( collection.id, placementId ); - PhysicalCollection document = snapshot.getPhysicalSnapshot().getPhysicalCollection( placement.id ); + PhysicalCollection document = snapshot.physical().getPhysicalCollection( placement.id ); if ( !adapter.supportedNamespaces.contains( NamespaceType.DOCUMENT ) ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, statement, placementId, queryInformation ) ); @@ -726,7 +726,7 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement ) { List placements = statement .getTransaction() .getSnapshot() - .getAllocSnapshot() + .alloc() .getCollectionPlacements( catalogGraph.id ).stream().map( c -> c.adapterId ) .collect( Collectors.toList() ); return routeGraphDml( alg, statement, catalogGraph, placements ); @@ -743,9 +743,9 @@ public AlgNode routeGraphDml( LogicalLpgModify alg, Statement statement, Logical for ( long adapterId : placements ) { CatalogAdapter adapter = snapshot.getAdapter( adapterId ); - CatalogGraphPlacement graphPlacement = snapshot.getAllocSnapshot().getGraphPlacement( catalogGraph.id, adapterId ); + CatalogGraphPlacement graphPlacement = snapshot.alloc().getGraphPlacement( catalogGraph.id, adapterId ); - PhysicalGraph graph = snapshot.getPhysicalSnapshot().getPhysicalGraph( catalogGraph.id, adapterId ); + PhysicalGraph graph = snapshot.physical().getPhysicalGraph( catalogGraph.id, adapterId ); if ( graph == null ) { // move "slower" updates in front modifies.add( 0, attachRelationalModify( alg, adapterId, statement ) ); @@ -1230,7 +1230,7 @@ private AlgBuilder buildDml( builder = super.handleValues( values, builder ); - List columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = snapshot.rel().getColumns( catalogTable.id ); if ( columns.size() == placements.size() ) { // full placement, no additional checks required return builder; } else if ( node.getRowType().toString().equals( "RecordType(INTEGER ZERO)" ) ) { @@ -1244,8 +1244,8 @@ private AlgBuilder buildDml( return builder.project( rexNodes ); } } else if ( node instanceof LogicalProject ) { - List columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + List columns = snapshot.rel().getColumns( catalogTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); if ( columns.size() == placements.size() ) { // full placement, generic handling is sufficient if ( property.isPartitioned && remapParameterValues ) { // && ((LogicalProject) node).getInput().getRowType().toString().equals( "RecordType(INTEGER ZERO)" ) return remapParameterizedDml( node, builder, statement, parameterValues ); @@ -1277,7 +1277,7 @@ private AlgBuilder buildDml( } } } else if ( node instanceof LogicalFilter ) { - List columns = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ); + List columns = snapshot.rel().getColumns( catalogTable.id ); if ( columns.size() != placements.size() ) { // partitioned, check if there is a illegal condition RexCall call = ((RexCall) ((LogicalFilter) node).getCondition()); @@ -1296,23 +1296,23 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical LogicalTable fromTable = catalogTable; // Select from other table snapshot = statement.getDataContext().getSnapshot(); - if ( snapshot.getAllocSnapshot().isPartitioned( fromTable.id ) ) { + if ( snapshot.alloc().isPartitioned( fromTable.id ) ) { throw new UnsupportedOperationException( "DMLs from other partitioned tables is not supported" ); } long pkid = fromTable.primaryKey; - List pkColumnIds = snapshot.getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); + List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); + List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); List nodes = new ArrayList<>(); for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { - snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); + snapshot.alloc().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( fromTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( fromTable.id ); - CatalogPartitionPlacement partition = snapshot.getAllocSnapshot().getPartitionPlacement( pkPlacement.adapterId, property.partitionIds.get( 0 ) ); + CatalogPartitionPlacement partition = snapshot.alloc().getPartitionPlacement( pkPlacement.adapterId, property.partitionIds.get( 0 ) ); nodes.add( super.handleScan( builder, @@ -1355,8 +1355,8 @@ private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, L } else { throw new RuntimeException( "Invalid column name: " + field.getName() ); } - column = snapshot.getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); - if ( !snapshot.getAllocSnapshot().checkIfExistsColumnPlacement( placements.get( 0 ).adapterId, column.id ) ) { + column = snapshot.rel().getColumn( catalogTable.id, columnName ); + if ( !snapshot.alloc().checkIfExistsColumnPlacement( placements.get( 0 ).adapterId, column.id ) ) { throw new RuntimeException( "Current implementation of vertical partitioning does not allow conditions on partitioned columns. " ); // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // TODO: Use indexes diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index c113a636c3..f498a91304 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -109,10 +109,10 @@ protected List handleNonePartitioning( List newBuilders = new ArrayList<>(); /*for ( List placementCombination : placements ) { Map> currentPlacementDistribution = new HashMap<>(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id );*/ + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id );*/ //currentPlacementDistribution.put( property.partitionIds.get( 0 ), placementCombination ); - List allocationEntities = snapshot.getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ); + List allocationEntities = snapshot.alloc().getAllocationsFromLogical( catalogTable.id ); for ( RoutedAlgBuilder builder : builders ) { RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); @@ -131,7 +131,7 @@ protected List handleNonePartitioning( protected Collection>> selectPlacementHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); // Utilize scanId to retrieve Partitions being accessed @@ -148,7 +148,7 @@ protected Set> selectPlacement( LogicalTable catalo List usedColumns = queryInformation.getAllColumnsPerTable( catalogTable.id ); // Filter for placements by adapters - List adapters = snapshot.getAllocSnapshot().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() + List adapters = snapshot.alloc().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() .stream() .filter( elem -> elem.getValue().containsAll( usedColumns ) ) .map( Entry::getKey ) @@ -157,7 +157,7 @@ protected Set> selectPlacement( LogicalTable catalo final Set> result = new HashSet<>(); for ( long adapterId : adapters ) { List placements = usedColumns.stream() - .map( colId -> snapshot.getAllocSnapshot().getColumnPlacement( adapterId, colId ) ) + .map( colId -> snapshot.alloc().getColumnPlacement( adapterId, colId ) ) .collect( Collectors.toList() ); if ( !placements.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index d4b6bc7741..93cdb680d6 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -72,7 +72,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa if ( builders.size() == 1 && builders.get( 0 ).getPhysicalPlacementsOfPartitions().isEmpty() ) { for ( List currentPlacement : placements ) { final Map> currentPlacementDistribution = new HashMap<>(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); currentPlacementDistribution.put( property.partitionIds.get( 0 ), currentPlacement ); final RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builders.get( 0 ) ); @@ -90,7 +90,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa for ( List currentPlacement : placements ) { final Map> currentPlacementDistribution = new HashMap<>(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); currentPlacementDistribution.put( property.partitionIds.get( 0 ), currentPlacement ); // AdapterId for all col placements same diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java index 342b860c21..4374e02ea1 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java @@ -56,7 +56,7 @@ protected List handleVerticalPartitioningOrReplication( AlgNod protected List handleNonePartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // Get placements and convert into placement distribution // final Map> placements = selectPlacement( catalogTable ); - List entities = snapshot.getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ); + List entities = snapshot.alloc().getAllocationsFromLogical( catalogTable.id ); // Only one builder available // builders.get( 0 ).addPhysicalInfo( placements ); @@ -69,7 +69,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa @Override protected List handleHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( catalogTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); // Utilize scanId to retrieve Partitions being accessed diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java index 2cf9d65135..9f8d150cc4 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateAllPlacementStrategy.java @@ -35,8 +35,8 @@ public class CreateAllPlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalTable catalogTable = snapshot.getRelSnapshot( addedColumn.namespaceId ).getTable( addedColumn.tableId ); - List dataPlacements = snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ); + LogicalTable catalogTable = snapshot.rel().getTable( addedColumn.tableId ); + List dataPlacements = snapshot.alloc().getDataPlacements( catalogTable.id ); return dataPlacements.stream() .map( elem -> AdapterManager.getInstance().getStore( elem.adapterId ) ) .collect( Collectors.toList() ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index 74120c7da9..bdc256f949 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -33,8 +33,8 @@ public class CreateSinglePlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalTable catalogTable = snapshot.getRelSnapshot( addedColumn.namespaceId ).getTable( addedColumn.tableId ); - List dataPlacement = snapshot.getAllocSnapshot().getDataPlacements( catalogTable.id ); + LogicalTable catalogTable = snapshot.rel().getTable( addedColumn.tableId ); + List dataPlacement = snapshot.alloc().getDataPlacements( catalogTable.id ); return ImmutableList.of( AdapterManager.getInstance().getStore( dataPlacement.get( 0 ).adapterId ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 566c0b8ef5..64167164d3 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -269,7 +269,7 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { relevantPartitions = accessedPartitions.get( p.getId() ); } else if ( table != null ) { if ( table.namespaceType == NamespaceType.RELATIONAL ) { - List allocations = Catalog.getInstance().getSnapshot().getAllocSnapshot().getAllocationsFromLogical( table.id ); + List allocations = Catalog.getInstance().getSnapshot().alloc().getAllocationsFromLogical( table.id ); relevantPartitions = allocations.stream().map( a -> a.id ).collect( Collectors.toList() ); } else { relevantPartitions = List.of(); @@ -325,7 +325,7 @@ private void attachGraph( AlgNode p ) { private void extractWriteConstraints( LogicalEntity logicalTable ) { for ( long constraintTable : logicalTable.getConstraintIds() ) { - PartitionProperty property = Catalog.getInstance().getSnapshot().getAllocSnapshot().getPartitionProperty( logicalTable.id ); + PartitionProperty property = Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( logicalTable.id ); for ( long constraintPartitionIds : property.partitionIds ) { EntityIdentifier id = new EntityIdentifier( constraintTable, constraintPartitionIds, NamespaceLevel.ENTITY_LEVEL ); diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 2831424cc7..c308c21118 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -183,7 +183,7 @@ public void addTables( Transaction transaction, List tableIds ) { if ( tableIds.size() > 1 ) { snapshot = Catalog.getInstance().getSnapshot(); LogicalNamespace namespace = snapshot.getNamespace( tableIds.get( 0 ) ); - LogicalTable catalogTable = snapshot.getRelSnapshot( namespace.id ).getTable( tableIds.get( 1 ) ); + LogicalTable catalogTable = snapshot.rel().getTable( tableIds.get( 1 ) ); long id = catalogTable.id; if ( !catalogTable.getConnectedViews().isEmpty() ) { updateCandidates.put( transaction.getXid(), id ); @@ -213,11 +213,11 @@ public void updateCommittedXid( PolyXid xid ) { */ public void materializedUpdate( Long potentialInteresting ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalTable catalogTable = snapshot.getNamespaces( null ).stream().map( n -> snapshot.getRelSnapshot( n.id ).getTable( potentialInteresting ) ).filter( Objects::nonNull ).findFirst().orElse( null ); + LogicalTable catalogTable = snapshot.getNamespaces( null ).stream().map( n -> snapshot.rel().getTable( potentialInteresting ) ).filter( Objects::nonNull ).findFirst().orElse( null ); List connectedViews = catalogTable.getConnectedViews(); for ( long id : connectedViews ) { - LogicalTable view = snapshot.getRelSnapshot( catalogTable.namespaceId ).getTable( id ); + LogicalTable view = snapshot.rel().getTable( id ); if ( view.entityType == EntityType.MATERIALIZED_VIEW ) { MaterializedCriteria materializedCriteria = materializedInfo.get( view.id ); if ( materializedCriteria.getCriteriaType() == CriteriaType.UPDATE ) { @@ -312,16 +312,16 @@ public void addData( Transaction transaction, List stores, Map columnPlacements = new LinkedList<>(); DataMigrator dataMigrator = transaction.getDataMigrator(); - List dataPlacements = transaction.getSnapshot().getAllocSnapshot().getDataPlacements( materializedView.id ); + List dataPlacements = transaction.getSnapshot().alloc().getDataPlacements( materializedView.id ); for ( CatalogDataPlacement placement : dataPlacements ) { Statement sourceStatement = transaction.createStatement(); prepareSourceRel( sourceStatement, materializedView.getAlgCollation(), algRoot.alg ); Statement targetStatement = transaction.createStatement(); columnPlacements.clear(); - columns.get( placement.adapterId ).forEach( column -> columnPlacements.add( snapshot.getAllocSnapshot().getColumnPlacement( placement.adapterId, column.id ) ) ); + columns.get( placement.adapterId ).forEach( column -> columnPlacements.add( snapshot.alloc().getColumnPlacement( placement.adapterId, column.id ) ) ); // If partitions should be allowed for materialized views this needs to be changed that all partitions are considered - AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( placement.adapterId, materializedView.id ).get( 0 ) ); + AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, snapshot.alloc().getPartitionsOnDataPlacement( placement.adapterId, materializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( placement.adapterId ), algRoot, sourceStatement, targetStatement, targetRel, true, materializedView.isOrdered() ); } @@ -345,15 +345,15 @@ public void updateData( Transaction transaction, Long materializedId ) { List ids = new ArrayList<>(); if ( snapshot.getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { CatalogMaterializedView catalogMaterializedView = snapshot.getLogicalEntity( materializedId ).unwrap( CatalogMaterializedView.class ); - List dataPlacements = snapshot.getAllocSnapshot().getDataPlacements( catalogMaterializedView.id ); + List dataPlacements = snapshot.alloc().getDataPlacements( catalogMaterializedView.id ); for ( CatalogDataPlacement placement : dataPlacements ) { ids.add( placement.adapterId ); List logicalColumns = new ArrayList<>(); int localAdapterIndex = dataPlacements.indexOf( placement ); - snapshot.getAllocSnapshot().getDataPlacement( dataPlacements.stream().map( p -> p.adapterId ).collect( Collectors.toList() ).get( localAdapterIndex ), catalogMaterializedView.id ) + snapshot.alloc().getDataPlacement( dataPlacements.stream().map( p -> p.adapterId ).collect( Collectors.toList() ).get( localAdapterIndex ), catalogMaterializedView.id ) .columnPlacementsOnAdapter.forEach( col -> - logicalColumns.add( snapshot.getRelSnapshot( catalogMaterializedView.namespaceId ).getColumn( col ) ) ); + logicalColumns.add( snapshot.rel().getColumn( col ) ) ); columns.put( placement.adapterId, logicalColumns ); } @@ -367,7 +367,7 @@ public void updateData( Transaction transaction, Long materializedId ) { columnPlacements.clear(); - columns.get( id ).forEach( column -> columnPlacements.add( snapshot.getAllocSnapshot().getColumnPlacement( id, column.id ) ) ); + columns.get( id ).forEach( column -> columnPlacements.add( snapshot.alloc().getColumnPlacement( id, column.id ) ) ); // Build {@link AlgNode} to build delete Statement from materialized view AlgBuilder deleteAlgBuilder = AlgBuilder.create( deleteStatement ); @@ -382,7 +382,7 @@ public void updateData( Transaction transaction, Long materializedId ) { targetRel = dataMigrator.buildDeleteStatement( targetStatementDelete, columnPlacements, - snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); + snapshot.alloc().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( id ), AlgRoot.of( deleteRel, Kind.SELECT ), @@ -398,7 +398,7 @@ public void updateData( Transaction transaction, Long materializedId ) { targetRel = dataMigrator.buildInsertStatement( targetStatementInsert, columnPlacements, - snapshot.getAllocSnapshot().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); + snapshot.alloc().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); dataMigrator.executeQuery( columns.get( id ), AlgRoot.of( insertRel, Kind.SELECT ), diff --git a/dbms/src/test/java/org/polypheny/db/TestHelper.java b/dbms/src/test/java/org/polypheny/db/TestHelper.java index c06368426b..17c9a727b7 100644 --- a/dbms/src/test/java/org/polypheny/db/TestHelper.java +++ b/dbms/src/test/java/org/polypheny/db/TestHelper.java @@ -32,12 +32,15 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.stream.Collectors; import kong.unirest.HttpRequest; import kong.unirest.HttpResponse; @@ -536,4 +539,38 @@ public void close() throws SQLException { } + + @SafeVarargs + public static void executeSql( SqlBiConsumer... queries ) { + try ( JdbcConnection jdbcConnection = new JdbcConnection( false ) ) { + Connection connection = jdbcConnection.getConnection(); + try ( Statement statement = connection.createStatement() ) { + for ( BiConsumer query : queries ) { + query.accept( connection, statement ); + } + } + } catch ( SQLException e ) { + fail( e.getMessage() ); + throw new RuntimeException( e ); + } + } + + + @FunctionalInterface + public interface SqlBiConsumer extends BiConsumer { + + @Override + default void accept( final C elemC, final T elemT ) { + try { + acceptThrows( elemC, elemT ); + } catch ( final SQLException e ) { + throw new RuntimeException( e ); + } + } + + void acceptThrows( C elemC, T elem ) throws SQLException; + + } + + } diff --git a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java index 4949b05996..9e3e40ab8f 100644 --- a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java @@ -68,7 +68,7 @@ public void addPlacementTest() throws SQLException { execute( "CREATE DATABASE " + graphName ); LogicalNamespace namespace = catalog.getSnapshot().getNamespace( graphName ); - LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); + LogicalGraph graph = catalog.getSnapshot().graph().getGraph( namespace.id ); assertEquals( 1, graph.getPlacements().size() ); @@ -77,7 +77,7 @@ public void addPlacementTest() throws SQLException { execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); namespace = catalog.getSnapshot().getNamespace( graphName ); - graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); + graph = catalog.getSnapshot().graph().getGraph( namespace.id ); assertEquals( 2, graph.getPlacements().size() ); @@ -98,15 +98,15 @@ public void initialPlacementTest() throws SQLException { addStore( "store1" ); execute( String.format( "CREATE DATABASE %s ON STORE %s", graphName, "store1" ) ); - LogicalNamespace namespace = catalog.getSnapshot().getNamespace(graphName); - LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( graphName ); + LogicalGraph graph = catalog.getSnapshot().graph().getGraph( namespace.id ); assertEquals( 1, graph.getPlacements().size() ); execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "hsqldb" ), graphName ); - namespace = catalog.getSnapshot().getNamespace(graphName); - graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); + namespace = catalog.getSnapshot().getNamespace( graphName ); + graph = catalog.getSnapshot().graph().getGraph( namespace.id ); assertEquals( 1, graph.getPlacements().size() ); assertEquals( 2, graph.getPlacements().size() ); @@ -128,8 +128,8 @@ public void deletePlacementTest() throws SQLException { execute( "CREATE DATABASE " + graphName ); - LogicalNamespace namespace = catalog.getSnapshot().getNamespace(graphName); - LogicalGraph graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( graphName ); + LogicalGraph graph = catalog.getSnapshot().graph().getGraph( namespace.id ); assertEquals( 1, graph.getPlacements().size() ); @@ -137,8 +137,8 @@ public void deletePlacementTest() throws SQLException { execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); - namespace = catalog.getSnapshot().getNamespace(graphName); - graph = catalog.getSnapshot().getGraphSnapshot( namespace.id ).getGraph( namespace.id ); + namespace = catalog.getSnapshot().getNamespace( graphName ); + graph = catalog.getSnapshot().graph().getGraph( namespace.id ); assertEquals( 2, graph.getPlacements().size() ); diff --git a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java index 6275230a91..77d16706a5 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/HorizontalPartitioningTest.java @@ -47,6 +47,7 @@ import org.polypheny.db.monitoring.core.MonitoringServiceProvider; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; +import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.partition.properties.TemperaturePartitionProperty; import org.polypheny.db.util.background.BackgroundTask.TaskSchedulingType; @@ -705,33 +706,35 @@ public void temperaturePartitionTest() throws SQLException { try { LogicalTable table = Catalog.snapshot().rel().getTables( null, new Pattern( "temperaturetest" ) ).get( 0 ); + PartitionProperty partitionProperty = Catalog.snapshot().alloc().getPartitionProperty( table.id ); + // Check if partition properties are correctly set and parsed - Assert.assertEquals( 600, ((TemperaturePartitionProperty) table.partitionProperty).getFrequencyInterval() ); - Assert.assertEquals( 12, ((TemperaturePartitionProperty) table.partitionProperty).getHotAccessPercentageIn() ); - Assert.assertEquals( 14, ((TemperaturePartitionProperty) table.partitionProperty).getHotAccessPercentageOut() ); - Assert.assertEquals( PartitionType.HASH, ((TemperaturePartitionProperty) table.partitionProperty).getInternalPartitionFunction() ); + Assert.assertEquals( 600, ((TemperaturePartitionProperty) partitionProperty).getFrequencyInterval() ); + Assert.assertEquals( 12, ((TemperaturePartitionProperty) partitionProperty).getHotAccessPercentageIn() ); + Assert.assertEquals( 14, ((TemperaturePartitionProperty) partitionProperty).getHotAccessPercentageOut() ); + Assert.assertEquals( PartitionType.HASH, ((TemperaturePartitionProperty) partitionProperty).getInternalPartitionFunction() ); - Assert.assertEquals( 2, table.partitionProperty.getPartitionGroupIds().size() ); - Assert.assertEquals( 20, table.partitionProperty.getPartitionIds().size() ); + Assert.assertEquals( 2, partitionProperty.getPartitionGroupIds().size() ); + Assert.assertEquals( 20, partitionProperty.getPartitionIds().size() ); // Check if initially as many partitionPlacements are created as requested and stored in the partition property - Assert.assertEquals( table.partitionProperty.getPartitionIds().size(), Catalog.getInstance().getAllPartitionPlacementsByTable( table.id ).size() ); + Assert.assertEquals( partitionProperty.getPartitionIds().size(), Catalog.snapshot().alloc().getAllPartitionPlacementsByTable( table.id ).size() ); // Retrieve partition distribution // Get percentage of tables which can remain in HOT - long numberOfPartitionsInHot = (table.partitionProperty.partitionIds.size() * ((TemperaturePartitionProperty) table.partitionProperty).getHotAccessPercentageIn()) / 100; + long numberOfPartitionsInHot = (partitionProperty.partitionIds.size() * ((TemperaturePartitionProperty) partitionProperty).getHotAccessPercentageIn()) / 100; //These are the tables than can remain in HOT - long allowedTablesInHot = (table.partitionProperty.partitionIds.size() * ((TemperaturePartitionProperty) table.partitionProperty).getHotAccessPercentageOut()) / 100; + long allowedTablesInHot = (partitionProperty.partitionIds.size() * ((TemperaturePartitionProperty) partitionProperty).getHotAccessPercentageOut()) / 100; if ( numberOfPartitionsInHot == 0 ) { numberOfPartitionsInHot = 1; } if ( allowedTablesInHot == 0 ) { allowedTablesInHot = 1; } - long numberOfPartitionsInCold = table.partitionProperty.partitionIds.size() - numberOfPartitionsInHot; + long numberOfPartitionsInCold = partitionProperty.partitionIds.size() - numberOfPartitionsInHot; - List hotPartitions = Catalog.getInstance().getPartitions( ((TemperaturePartitionProperty) table.partitionProperty).getHotPartitionGroupId() ); - List coldPartitions = Catalog.getInstance().getPartitions( ((TemperaturePartitionProperty) table.partitionProperty).getColdPartitionGroupId() ); + List hotPartitions = Catalog.snapshot().alloc().getPartitions( ((TemperaturePartitionProperty) partitionProperty).getHotPartitionGroupId() ); + List coldPartitions = Catalog.snapshot().alloc().getPartitions( ((TemperaturePartitionProperty) partitionProperty).getColdPartitionGroupId() ); Assert.assertTrue( (numberOfPartitionsInHot == hotPartitions.size()) || (numberOfPartitionsInHot == allowedTablesInHot) ); @@ -761,15 +764,17 @@ public void temperaturePartitionTest() throws SQLException { // This should execute two DML INSERTS on the target PartitionId and therefore redistribute the data // Verify that the partition is now in HOT and was not before - LogicalTable updatedTable = Catalog.getInstance().getTables( null, new Pattern( "temperaturetest" ) ).get( 0 ); + LogicalTable updatedTable = Catalog.snapshot().rel().getTables( null, new Pattern( "temperaturetest" ) ).get( 0 ); + + PartitionProperty updatedProperty = Catalog.snapshot().alloc().getPartitionProperty( updatedTable.id ); // Manually get the target partitionID of query PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( table.partitionProperty.partitionType ); + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( partitionProperty.partitionType ); long targetId = partitionManager.getTargetPartitionId( table, partitionValue ); - List hotPartitionsAfterChange = Catalog.getInstance().getPartitions( ((TemperaturePartitionProperty) updatedTable.partitionProperty).getHotPartitionGroupId() ); - Assert.assertTrue( hotPartitionsAfterChange.contains( Catalog.getInstance().getPartition( targetId ) ) ); + List hotPartitionsAfterChange = Catalog.snapshot().alloc().getPartitions( ((TemperaturePartitionProperty) updatedProperty).getHotPartitionGroupId() ); + Assert.assertTrue( hotPartitionsAfterChange.contains( Catalog.snapshot().alloc().getPartition( targetId ) ) ); //Todo @Hennlo check number of access } finally { @@ -1176,17 +1181,18 @@ public void dataPlacementTest() throws SQLException { + "WITH (foo, bar, foobar, barfoo) " ); try { - LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "horizontaldataplacementtest" ) ).get( 0 ); + LogicalTable table = Catalog.snapshot().rel().getTables( null, new Pattern( "horizontaldataplacementtest" ) ).get( 0 ); // Check if sufficient PartitionPlacements have been created // Check if initially as many DataPlacements are created as requested // One for each store - Assert.assertEquals( 1, table.dataPlacements.size() ); - CatalogDataPlacement dataPlacement = Catalog.getInstance().getDataPlacement( table.dataPlacements.get( 0 ), table.id ); + Assert.assertEquals( 1, Catalog.snapshot().alloc().getDataPlacements( table.id ).size() ); + + CatalogDataPlacement dataPlacement = Catalog.snapshot().alloc().getDataPlacements( table.id ).get( 0 ); // Check how many columnPlacements are added to the one DataPlacement - Assert.assertEquals( table.fieldIds.size(), dataPlacement.columnPlacementsOnAdapter.size() ); + Assert.assertEquals( table.getColumnIds().size(), dataPlacement.columnPlacementsOnAdapter.size() ); // Check how many partitionPlacements are added to the one DataPlacement Assert.assertEquals( partitionsToCreate, dataPlacement.getAllPartitionIds().size() ); @@ -1199,15 +1205,15 @@ public void dataPlacementTest() throws SQLException { statement.executeUpdate( "ALTER TABLE \"horizontalDataPlacementTest\" ADD PLACEMENT ON STORE \"anotherstore\"" ); // Check if we now have two dataPlacements in table - table = Catalog.getInstance().getTable( table.id ); - Assert.assertEquals( 2, Catalog.getInstance().getDataPlacements( table.id ).size() ); + table = Catalog.snapshot().rel().getTable( table.id ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getDataPlacements( table.id ).size() ); // Modify partitions on second store statement.executeUpdate( "ALTER TABLE \"horizontalDataPlacementTest\" MODIFY PARTITIONS (\"foo\") ON STORE anotherstore" ); - List dataPlacements = Catalog.getInstance().getDataPlacements( table.id ); + List dataPlacements = Catalog.snapshot().alloc().getDataPlacements( table.id ); - int adapterId = -1; - int initialAdapterId = -1; + long adapterId = -1; + long initialAdapterId = -1; for ( CatalogDataPlacement dp : dataPlacements ) { if ( dp.getAdapterName().equals( "anotherstore" ) ) { adapterId = dp.adapterId; @@ -1222,24 +1228,24 @@ public void dataPlacementTest() throws SQLException { statement.executeUpdate( "ALTER TABLE \"horizontalDataPlacementTest\" MODIFY PLACEMENT (tinteger) " + "ON STORE anotherstore WITH partitions (\"bar\", \"barfoo\", \"foo\") " ); - dataPlacements = Catalog.getInstance().getDataPlacements( table.id ); + dataPlacements = Catalog.snapshot().alloc().getDataPlacements( table.id ); for ( CatalogDataPlacement dp : dataPlacements ) { if ( dp.adapterId == adapterId ) { Assert.assertEquals( 2, dp.columnPlacementsOnAdapter.size() ); Assert.assertEquals( 3, dp.getAllPartitionIds().size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); - Assert.assertEquals( 3, Catalog.getInstance().getPartitionsOnDataPlacement( adapterId, table.id ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); + Assert.assertEquals( 3, Catalog.snapshot().alloc().getPartitionsOnDataPlacement( adapterId, table.id ).size() ); } else if ( dp.adapterId == initialAdapterId ) { Assert.assertEquals( 3, dp.columnPlacementsOnAdapter.size() ); Assert.assertEquals( 4, dp.getAllPartitionIds().size() ); - Assert.assertEquals( 3, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); - Assert.assertEquals( 4, Catalog.getInstance().getPartitionsOnDataPlacement( initialAdapterId, table.id ).size() ); + Assert.assertEquals( 3, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); + Assert.assertEquals( 4, Catalog.snapshot().alloc().getPartitionsOnDataPlacement( initialAdapterId, table.id ).size() ); } } // After MERGE should only hold one partition statement.executeUpdate( "ALTER TABLE \"horizontalDataPlacementTest\" MERGE PARTITIONS" ); - dataPlacements = Catalog.getInstance().getDataPlacements( table.id ); + dataPlacements = Catalog.snapshot().alloc().getDataPlacements( table.id ); for ( CatalogDataPlacement dp : dataPlacements ) { Assert.assertEquals( 1, dp.getAllPartitionIds().size() ); @@ -1250,7 +1256,7 @@ public void dataPlacementTest() throws SQLException { // DROP STORE and verify number of dataPlacements statement.executeUpdate( "ALTER TABLE \"horizontalDataPlacementTest\" DROP PLACEMENT ON STORE \"anotherstore\"" ); - Assert.assertEquals( 1, Catalog.getInstance().getDataPlacements( table.id ).size() ); + Assert.assertEquals( 1, Catalog.snapshot().alloc().getDataPlacements( table.id ).size() ); } finally { // Drop tables and stores diff --git a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java index f6209f60cc..8340bb6a41 100644 --- a/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java +++ b/dbms/src/test/java/org/polypheny/db/misc/VerticalPartitioningTest.java @@ -167,15 +167,15 @@ public void dataPlacementTest() throws SQLException { + "PRIMARY KEY (tprimary) )" ); try { - LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); + LogicalTable table = Catalog.snapshot().rel().getTables( null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); // Check if initially as many DataPlacements are created as requested (one for each store) - Assert.assertEquals( 1, table.dataPlacements.size() ); + Assert.assertEquals( 1, Catalog.snapshot().alloc().getDataPlacements( table.id ).size() ); - CatalogDataPlacement dataPlacement = Catalog.getInstance().getDataPlacement( table.dataPlacements.get( 0 ), table.id ); + CatalogDataPlacement dataPlacement = Catalog.snapshot().alloc().getDataPlacements( table.id ).get( 0 ); // Check how many columnPlacements are added to the one DataPlacement - Assert.assertEquals( table.fieldIds.size(), dataPlacement.columnPlacementsOnAdapter.size() ); + Assert.assertEquals( table.getColumnIds().size(), dataPlacement.columnPlacementsOnAdapter.size() ); // Check how many partitionPlacements are added to the one DataPlacement Assert.assertEquals( 1, dataPlacement.getAllPartitionIds().size() ); @@ -188,20 +188,20 @@ public void dataPlacementTest() throws SQLException { statement.executeUpdate( "ALTER TABLE \"verticalDataPlacementTest\" ADD PLACEMENT ON STORE \"anotherstore\"" ); // Check if we now have two dataPlacements in table - table = Catalog.getInstance().getTable( table.id ); - Assert.assertEquals( 2, Catalog.getInstance().getDataPlacements( table.id ).size() ); + table = Catalog.snapshot().rel().getTable( table.id ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getDataPlacements( table.id ).size() ); // Modify columns on second store statement.executeUpdate( "ALTER TABLE \"verticalDataPlacementTest\" MODIFY PLACEMENT (tprimary) ON STORE anotherstore" ); - List dataPlacements = Catalog.getInstance().getDataPlacements( table.id ); + List dataPlacements = Catalog.snapshot().alloc().getDataPlacements( table.id ); - int adapterId = -1; - int initialAdapterId = -1; + long adapterId = -1; + long initialAdapterId = -1; for ( CatalogDataPlacement dp : dataPlacements ) { if ( dp.getAdapterName().equals( "anotherstore" ) ) { Assert.assertEquals( 1, dp.columnPlacementsOnAdapter.size() ); adapterId = dp.adapterId; - Assert.assertEquals( 1, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); + Assert.assertEquals( 1, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); } else { initialAdapterId = dp.adapterId; } @@ -209,31 +209,31 @@ public void dataPlacementTest() throws SQLException { // MODIFY by adding single column on second store statement.executeUpdate( "ALTER TABLE \"verticalDataPlacementTest\" MODIFY PLACEMENT (tprimary, tvarchar) ON STORE anotherstore" ); - dataPlacements = Catalog.getInstance().getDataPlacements( table.id ); + dataPlacements = Catalog.snapshot().alloc().getDataPlacements( table.id ); for ( CatalogDataPlacement dp : dataPlacements ) { if ( dp.adapterId == adapterId ) { Assert.assertEquals( 2, dp.columnPlacementsOnAdapter.size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); } else if ( dp.adapterId == initialAdapterId ) { Assert.assertEquals( 3, dp.columnPlacementsOnAdapter.size() ); - Assert.assertEquals( 3, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); + Assert.assertEquals( 3, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); } } // MODIFY by adding single column on first store statement.executeUpdate( "ALTER TABLE \"verticalDataPlacementTest\" MODIFY PLACEMENT (tinteger) ON STORE hsqldb" ); - dataPlacements = Catalog.getInstance().getDataPlacements( table.id ); + dataPlacements = Catalog.snapshot().alloc().getDataPlacements( table.id ); for ( CatalogDataPlacement dp : dataPlacements ) { if ( dp.adapterId == adapterId ) { Assert.assertEquals( 2, dp.columnPlacementsOnAdapter.size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsByAdapter( table.id ).get( adapterId ).size() ); - Assert.assertEquals( 1, Catalog.getInstance().getPhysicalsOnAdapter( table.id ).get( adapterId ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsByAdapter( table.id ).get( adapterId ).size() ); + Assert.assertEquals( 1, Catalog.snapshot().physical().getPhysicalsOnAdapter( adapterId ).size() ); } else if ( dp.adapterId == initialAdapterId ) { Assert.assertEquals( 2, dp.columnPlacementsOnAdapter.size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsByAdapter( table.id ).get( initialAdapterId ).size() ); - Assert.assertEquals( 1, Catalog.getInstance().getPhysicalsOnAdapter( table.id ).get( initialAdapterId ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsByAdapter( table.id ).get( initialAdapterId ).size() ); + Assert.assertEquals( 1, Catalog.snapshot().physical().getPhysicalsOnAdapter( initialAdapterId ).size() ); } } @@ -249,16 +249,16 @@ public void dataPlacementTest() throws SQLException { // ADD single column on second store statement.executeUpdate( "ALTER TABLE \"verticalDataPlacementTest\" MODIFY PLACEMENT ADD COLUMN tinteger ON STORE anotherstore" ); - dataPlacements = Catalog.getInstance().getDataPlacements( table.id ); + dataPlacements = Catalog.snapshot().alloc().getDataPlacements( table.id ); for ( CatalogDataPlacement dp : dataPlacements ) { if ( dp.adapterId == adapterId ) { Assert.assertEquals( 3, dp.columnPlacementsOnAdapter.size() ); - Assert.assertEquals( 3, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); - Assert.assertEquals( 3, Catalog.getInstance().getColumnPlacementsByAdapter( table.id ).get( adapterId ).size() ); + Assert.assertEquals( 3, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); + Assert.assertEquals( 3, Catalog.snapshot().alloc().getColumnPlacementsByAdapter( table.id ).get( adapterId ).size() ); } else if ( dp.adapterId == initialAdapterId ) { Assert.assertEquals( 2, dp.columnPlacementsOnAdapter.size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsByAdapter( table.id ).get( initialAdapterId ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsByAdapter( table.id ).get( initialAdapterId ).size() ); } } @@ -267,26 +267,26 @@ public void dataPlacementTest() throws SQLException { // REMOVE single column on second store statement.executeUpdate( "ALTER TABLE \"verticalDataPlacementTest\" MODIFY PLACEMENT DROP COLUMN tvarchar ON STORE anotherstore" ); - dataPlacements = Catalog.getInstance().getDataPlacements( table.id ); + dataPlacements = Catalog.snapshot().alloc().getDataPlacements( table.id ); for ( CatalogDataPlacement dp : dataPlacements ) { if ( dp.adapterId == adapterId ) { Assert.assertEquals( 2, dp.columnPlacementsOnAdapter.size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); - Assert.assertEquals( 2, Catalog.getInstance().getColumnPlacementsByAdapter( table.id ).get( adapterId ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); + Assert.assertEquals( 2, Catalog.snapshot().alloc().getColumnPlacementsByAdapter( table.id ).get( adapterId ).size() ); } else if ( dp.adapterId == initialAdapterId ) { Assert.assertEquals( 3, dp.columnPlacementsOnAdapter.size() ); - Assert.assertEquals( 3, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); - Assert.assertEquals( 3, Catalog.getInstance().getColumnPlacementsByAdapter( table.id ).get( initialAdapterId ).size() ); + Assert.assertEquals( 3, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( initialAdapterId, table.id ).size() ); + Assert.assertEquals( 3, Catalog.snapshot().alloc().getColumnPlacementsByAdapter( table.id ).get( initialAdapterId ).size() ); } } Assert.assertEquals( 2, dataPlacements.size() ); // DROP STORE and verify number of dataPlacements statement.executeUpdate( "ALTER TABLE \"verticalDataPlacementTest\" DROP PLACEMENT ON STORE \"anotherstore\"" ); - Assert.assertEquals( 1, Catalog.getInstance().getDataPlacements( table.id ).size() ); + Assert.assertEquals( 1, Catalog.snapshot().alloc().getDataPlacements( table.id ).size() ); //Check also if ColumnPlacements have been correctly removed - Assert.assertEquals( 0, Catalog.getInstance().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); + Assert.assertEquals( 0, Catalog.snapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, table.id ).size() ); } finally { // Drop tables and stores statement.executeUpdate( "DROP TABLE IF EXISTS verticalDataPlacementTest" ); @@ -310,9 +310,6 @@ public void dataDistributionTest() throws SQLException { + "PRIMARY KEY (tprimary) )" ); try { - LogicalTable table = Catalog.getInstance().getTables( null, new Pattern( "verticaldataplacementtest" ) ).get( 0 ); - - CatalogDataPlacement dataPlacement = Catalog.getInstance().getDataPlacement( table.dataPlacements.get( 0 ), table.id ); // ADD adapter statement.executeUpdate( "ALTER ADAPTERS ADD \"anotherstore\" USING 'Hsqldb' AS 'Store'" diff --git a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java index bba1ebd76a..ab87956d3f 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java @@ -36,6 +36,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.excluded.CassandraExcluded; import org.polypheny.db.webui.models.Result; @@ -54,24 +55,24 @@ public void removeCollection() { @Test public void addCollectionTest() throws UnknownSchemaException { - Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.snapshot(); String name = "testCollection"; - LogicalNamespace namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); + LogicalNamespace namespace = snapshot.getNamespace( database ); - int size = catalog.getCollections( namespace.id, null ).size(); + int size = snapshot.doc().getCollections( namespace.id, null ).size(); execute( "db.createCollection(\"" + name + "\")" ); - assertEquals( size + 1, catalog.getCollections( namespace.id, null ).size() ); + assertEquals( size + 1, snapshot.doc().getCollections( namespace.id, null ).size() ); execute( String.format( "db.%s.drop()", name ) ); - assertEquals( size, catalog.getCollections( namespace.id, null ).size() ); + assertEquals( size, snapshot.doc().getCollections( namespace.id, null ).size() ); execute( "db.createCollection(\"" + name + "\")" ); - assertEquals( size + 1, catalog.getCollections( namespace.id, null ).size() ); + assertEquals( size + 1, snapshot.doc().getCollections( namespace.id, null ).size() ); execute( String.format( "db.%s.drop()", name ) ); } @@ -79,28 +80,28 @@ public void addCollectionTest() throws UnknownSchemaException { @Test public void addPlacementTest() throws UnknownSchemaException, SQLException { - Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.snapshot(); String placement = "store1"; try { - LogicalNamespace namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); + LogicalNamespace namespace = snapshot.getNamespace( database ); - List collectionNames = catalog.getCollections( namespace.id, null ).stream().map( c -> c.name ).collect( Collectors.toList() ); + List collectionNames = snapshot.doc().getCollections( namespace.id, null ).stream().map( c -> c.name ).collect( Collectors.toList() ); collectionNames.forEach( n -> execute( String.format( "db.%s.drop()", n ) ) ); execute( "db.createCollection(\"" + collectionName + "\")" ); - LogicalCollection collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); + LogicalCollection collection = snapshot.doc().getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); - assertEquals( collection.placements.size(), 1 ); + assertEquals( snapshot.alloc().getDataPlacements( collection.id ).size(), 1 ); addStore( placement ); execute( String.format( "db.%s.addPlacement(\"%s\")", collectionName, placement ) ); - collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); + collection = snapshot.doc().getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); - assertEquals( collection.placements.size(), 2 ); + assertEquals( Catalog.snapshot().alloc().getDataPlacements( collection.id ).size(), 2 ); } finally { execute( String.format( "db.%s.drop()", collectionName ) ); @@ -112,32 +113,32 @@ public void addPlacementTest() throws UnknownSchemaException, SQLException { @Test public void deletePlacementTest() throws UnknownSchemaException, SQLException { - Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.snapshot(); String placement = "store1"; try { execute( "db.createCollection(\"" + collectionName + "\")" ); - LogicalNamespace namespace = catalog.getSchema( Catalog.defaultDatabaseId, database ); + LogicalNamespace namespace = snapshot.getNamespace( database ); - LogicalCollection collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); + LogicalCollection collection = snapshot.doc().getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); - assertEquals( collection.placements.size(), 1 ); + assertEquals( Catalog.snapshot().alloc().getDataPlacements( collection.id ).size(), 1 ); addStore( placement ); execute( String.format( "db.%s.addPlacement(\"%s\")", collectionName, placement ) ); - collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); + collection = snapshot.doc().getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); - assertEquals( collection.placements.size(), 2 ); + assertEquals( Catalog.snapshot().alloc().getDataPlacements( collection.id ).size(), 2 ); execute( String.format( "db.%s.deletePlacement(\"%s\")", collectionName, placement ) ); - collection = catalog.getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); + collection = snapshot.doc().getCollections( namespace.id, new Pattern( collectionName ) ).get( 0 ); - assertEquals( collection.placements.size(), 1 ); + assertEquals( Catalog.snapshot().alloc().getDataPlacements( collection.id ).size(), 1 ); execute( String.format( "db.%s.drop()", collectionName ) ); diff --git a/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java b/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java index 149e118b19..38659cb949 100644 --- a/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java +++ b/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java @@ -22,13 +22,17 @@ import java.sql.Statement; import java.util.List; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.excluded.CottontailExcluded; import org.polypheny.db.excluded.FileExcluded; +import org.polypheny.db.util.Template; +import org.polypheny.db.util.TestUtil; +@SuppressWarnings({ "SqlDialectInspection", "SqlNoDataSourceInspection" }) public class SimpleSqlTest { @BeforeClass @@ -46,18 +50,33 @@ private static void insertData() { @Test - public void select() throws SQLException { - try ( JdbcConnection jdbcConnection = new JdbcConnection( false ) ) { - Connection connection = jdbcConnection.getConnection(); - try ( Statement statement = connection.createStatement() ) { - statement.executeUpdate( "CREATE TABLE TableA(ID INTEGER NOT NULL, NAME VARCHAR(20), AGE INTEGER, PRIMARY KEY (ID))" ); - statement.executeUpdate( "INSERT INTO TableA VALUES (12, 'Name1', 60)" ); - statement.executeUpdate( "INSERT INTO TableA VALUES (15, 'Name2', 24)" ); - statement.executeUpdate( "INSERT INTO TableA VALUES (99, 'Name3', 11)" ); + @Ignore + public void createTable() { + TestHelper.executeSql( + ( c, s ) -> s.executeUpdate( "CREATE TABLE TableA(ID INTEGER NOT NULL, NAME VARCHAR(20), AGE INTEGER, PRIMARY KEY (ID))" ) + ); + } + - connection.commit(); - } - } + @Test + public void dropTable() { + TestHelper.executeSql( + ( c, s ) -> s.executeUpdate( "CREATE TABLE TableA(ID INTEGER NOT NULL, NAME VARCHAR(20), AGE INTEGER, PRIMARY KEY (ID))" ), + ( c, s ) -> s.executeUpdate( "DROP TABLE TableA" ) + ); + } + + + @Test + public void insert() throws SQLException { + TestHelper.executeSql( + ( c, s ) -> s.executeUpdate( "CREATE TABLE TableA(ID INTEGER NOT NULL, NAME VARCHAR(20), AGE INTEGER, PRIMARY KEY (ID))" ), + ( c, s ) -> s.executeUpdate( "INSERT INTO TableA VALUES (12, 'Name1', 60)" ), + ( c, s ) -> s.executeUpdate( "INSERT INTO TableA VALUES (15, 'Name2', 24)" ), + ( c, s ) -> s.executeUpdate( "INSERT INTO TableA VALUES (99, 'Name3', 11)" ), + ( c, s ) -> s.executeUpdate( "DROP TABLE TableA" ), + ( c, s ) -> c.commit() + ); } diff --git a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java index 632f977241..74ca539d17 100644 --- a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java +++ b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java @@ -35,7 +35,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.Snapshot; @@ -256,9 +255,8 @@ public void testSimpleRowCount() throws SQLException { waiter.await( 20, TimeUnit.SECONDS ); try { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalNamespace namespace = snapshot.getNamespace( "statisticschema" ); - LogicalTable catalogTableNation = snapshot.getRelSnapshot( namespace.id ).getTable( "nation" ); - LogicalTable catalogTableRegion = snapshot.getRelSnapshot( namespace.id ).getTable( "region" ); + LogicalTable catalogTableNation = snapshot.rel().getTable( "statisticschema", "nation" ); + LogicalTable catalogTableRegion = snapshot.rel().getTable( "statisticschema", "region" ); Integer rowCountNation = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); Integer rowCountRegion = StatisticsManager.getInstance().rowCountPerTable( catalogTableRegion.id ); @@ -312,14 +310,13 @@ private void assertStatisticsConvertTo( int maxSeconds, int target ) { while ( !successfull && count < maxSeconds ) { waiter.await( 1, TimeUnit.SECONDS ); Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalNamespace namespace = snapshot.getNamespace( "statisticschema" ); - if ( snapshot.getRelSnapshot( namespace.id ).getTable( "nationdelete" ) == null ) { + if ( snapshot.rel().getTable( "statisticschema", "nationdelete" ) == null ) { count++; inCatalog = false; continue; } inCatalog = true; - LogicalTable catalogTableNation = snapshot.getRelSnapshot(namespace.id).getTable( "nationdelete" ); + LogicalTable catalogTableNation = snapshot.rel().getTable( "statisticschema", "nationdelete" ); Integer rowCount = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); // potentially table exists not yet in statistics but in catalog if ( rowCount != null && rowCount == target ) { diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java index cc4e64e259..8e3d1bd1eb 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/QueryResult.java @@ -42,7 +42,7 @@ class QueryResult { public static QueryResult fromCatalogColumn( LogicalColumn column ) { - return new QueryResult( Catalog.getInstance().getSnapshot().getRelSnapshot( column.namespaceId ).getTable( column.tableId ), column ); + return new QueryResult( Catalog.getInstance().getSnapshot().rel().getTable( column.tableId ), column ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java index a5d69a10b9..28fe5b6eb0 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java @@ -86,7 +86,7 @@ public StatisticColumn( long schemaId, long tableId, long columnId, PolyType typ this.type = type; this.columnType = columnType; - LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ); + LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().rel(); if ( snapshot.getTable( tableId ) != null ) { this.schema = Catalog.getInstance().getSnapshot().getNamespace( schemaId ).name; this.table = snapshot.getTable( tableId ).name; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 2717de3a48..8798a80567 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -22,6 +22,7 @@ import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; +import org.apache.calcite.avatica.Meta.Pat; import org.polypheny.db.PolyImplementation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; @@ -34,6 +35,7 @@ import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.Authenticator; @@ -42,6 +44,7 @@ import org.polypheny.db.transaction.Transaction.MultimediaFlavor; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.transaction.TransactionManager; +import org.polypheny.db.util.Pair; @Slf4j @@ -49,7 +52,6 @@ public class StatisticQueryProcessor { @Getter private final TransactionManager transactionManager; - private final long databaseId; private final long userId; @@ -57,15 +59,14 @@ public class StatisticQueryProcessor { * LowCostQueries can be used to retrieve short answered queries * Idea is to expose a selected list of sql operations with a small list of results and not impact performance */ - public StatisticQueryProcessor( final TransactionManager transactionManager, long userId, long databaseId ) { + public StatisticQueryProcessor( final TransactionManager transactionManager, long userId ) { this.transactionManager = transactionManager; - this.databaseId = databaseId; this.userId = userId; } public StatisticQueryProcessor( TransactionManager transactionManager, Authenticator authenticator ) { - this( transactionManager, Catalog.defaultUserId, Catalog.defaultDatabaseId ); + this( transactionManager, Catalog.defaultUserId ); } @@ -93,10 +94,10 @@ public List> getSchemaTree() { List schemas = snapshot.getNamespaces( null ); for ( LogicalNamespace schema : schemas ) { List tables = new ArrayList<>(); - List childTables = snapshot.getRelSnapshot( schema.id ).getTables( , null ); + List childTables = snapshot.rel().getTables( new Pattern( schema.name ), null ); for ( LogicalTable childTable : childTables ) { List table = new ArrayList<>(); - List columns = snapshot.getRelSnapshot( schema.id ).getColumns( childTable.id ); + List columns = snapshot.rel().getColumns( childTable.id ); for ( LogicalColumn logicalColumn : columns ) { table.add( schema.name + "." + childTable.name + "." + logicalColumn.name ); } @@ -121,7 +122,7 @@ public List getAllColumns() { return snapshot.getNamespaces( null ) .stream() .filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( , null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> snapshot.getRelSnapshot( n.id ).getColumns( t.id ).stream() ) ) + .flatMap( n -> snapshot.rel().getTables( Pattern.of( n.name ), null ).stream().filter( t -> t.entityType != EntityType.VIEW ).flatMap( t -> snapshot.rel().getColumns( t.id ).stream() ) ) .map( QueryResult::fromCatalogColumn ) .collect( Collectors.toList() ); } @@ -135,7 +136,7 @@ public List getAllColumns() { public List getAllTable() { Snapshot snapshot = Catalog.getInstance().getSnapshot(); return snapshot.getNamespaces( null ).stream().filter( n -> n.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( n -> snapshot.getRelSnapshot( n.id ).getTables( , null ).stream().filter( t -> t.entityType != EntityType.VIEW ) ).collect( Collectors.toList() ); + .flatMap( n -> snapshot.rel().getTables( Pattern.of( n.name ), null ).stream().filter( t -> t.entityType != EntityType.VIEW ) ).collect( Collectors.toList() ); } @@ -146,7 +147,7 @@ public List getAllTable() { */ public List getAllColumns( Long tableId ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - return snapshot.getNamespaces( null ).stream().flatMap( n -> snapshot.getRelSnapshot( n.id ).getColumns( tableId ).stream() ).map( QueryResult::fromCatalogColumn ).collect( Collectors.toList() ); + return snapshot.getNamespaces( null ).stream().flatMap( n -> snapshot.rel().getColumns( tableId ).stream() ).map( QueryResult::fromCatalogColumn ).collect( Collectors.toList() ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index 0e8df34f8c..dd8c413744 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -78,7 +78,7 @@ public StatisticTable( Long tableId ) { LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); this.table = catalogTable.name; this.namespaceType = catalogTable.namespaceType; - //this.dataPlacements = ImmutableList.copyOf( catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).stream().map( c -> c.adapterId ).collect( Collectors.toList() ) ); + //this.dataPlacements = ImmutableList.copyOf( catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ).stream().map( c -> c.adapterId ).collect( Collectors.toList() ) ); this.entityType = catalogTable.entityType; } calls = new TableCalls( tableId, 0, 0, 0, 0 ); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 9239925eef..e03aa540fa 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -233,7 +233,7 @@ public void restart( Config c ) { private void resetAllIsFull() { this.statisticSchemaMap.values().forEach( s -> s.values().forEach( t -> t.values().forEach( c -> { - assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getSnapshot().getRelSnapshot( c.getSchemaId() ).getColumn( c.getColumnId() ) ), NodeType.UNIQUE_VALUE ) ); + assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getSnapshot().rel().getColumn( c.getColumnId() ) ), NodeType.UNIQUE_VALUE ) ); } ) ) ); } @@ -242,7 +242,7 @@ private void resetAllIsFull() { * Reset all statistics and reevaluate them. */ private void reevaluateAllStatistics() { - if ( statisticQueryInterface == null ) { + if ( true || statisticQueryInterface == null ) { return; } log.debug( "Resetting StatisticManager." ); @@ -894,7 +894,7 @@ private void handleDrop( long tableId, Map> changedValues, long sc private void handleTruncate( long tableId, long schemaId, Catalog catalog ) { LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); - for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( catalogTable.id ) ) { PolyType polyType = column.type; QueryResult queryResult = new QueryResult( catalogTable, column ); if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( column.id ) != null ) { @@ -924,20 +924,20 @@ private void handleInsert( long tableId, Map> changedValues, long LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); if ( this.statisticSchemaMap.get( schemaId ) != null ) { if ( this.statisticSchemaMap.get( schemaId ).get( tableId ) != null ) { - for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( schemaId ).getColumns( tableId ) ) { + for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( tableId ) ) { PolyType polyType = column.type; QueryResult queryResult = new QueryResult( catalogTable, column ); if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( column.id ) != null && changedValues.get( (long) column.position ) != null ) { - handleInsertColumn( tableId, changedValues, schemaId, catalog.getSnapshot().getRelSnapshot( schemaId ).getColumns( tableId ).stream().map( c -> c.id ).collect( Collectors.toList() ), column.position, queryResult ); + handleInsertColumn( tableId, changedValues, schemaId, catalog.getSnapshot().rel().getColumns( tableId ).stream().map( c -> c.id ).collect( Collectors.toList() ), column.position, queryResult ); } else { addNewColumnStatistics( changedValues, column.position, polyType, queryResult ); } } } else { - addInserts( changedValues, catalogTable, catalog.getSnapshot().getRelSnapshot( schemaId ).getColumns( tableId ) ); + addInserts( changedValues, catalogTable, catalog.getSnapshot().rel().getColumns( tableId ) ); } } else { - addInserts( changedValues, catalogTable, catalog.getSnapshot().getRelSnapshot( schemaId ).getColumns( tableId ) ); + addInserts( changedValues, catalogTable, catalog.getSnapshot().rel().getColumns( tableId ) ); } } @@ -1185,7 +1185,7 @@ public > Object getTableStatistic( long schemaId, long t } else if ( v.getType().getFamily() == PolyTypeFamily.CHARACTER ) { alphabeticInfo.add( (AlphabeticStatisticColumn) v ); statisticTable.setAlphabeticColumn( (List) alphabeticInfo ); - } else if ( PolyType.DATETIME_TYPES.contains( Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ).getColumn( k ).type ) ) { + } else if ( PolyType.DATETIME_TYPES.contains( Catalog.getInstance().getSnapshot().rel().getColumn( k ).type ) ) { temporalInfo.add( (TemporalStatisticColumn) v ); statisticTable.setTemporalColumn( (List) temporalInfo ); } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/AvaticaInterfacePlugin.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/AvaticaInterfacePlugin.java index b69eff26ad..c5ea9ce242 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/AvaticaInterfacePlugin.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/AvaticaInterfacePlugin.java @@ -93,7 +93,7 @@ public static class AvaticaInterface extends QueryInterface implements PropertyC private final HttpServerDispatcher httpServerDispatcher; - public AvaticaInterface( TransactionManager transactionManager, Authenticator authenticator, int ifaceId, String uniqueName, Map settings ) { + public AvaticaInterface( TransactionManager transactionManager, Authenticator authenticator, long ifaceId, String uniqueName, Map settings ) { super( transactionManager, authenticator, ifaceId, uniqueName, settings, true, true ); metricsSystemConfiguration = NoopMetricsSystemConfiguration.getInstance(); metricsSystem = NoopMetricsSystem.getInstance(); diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 27e6becc0c..63fc9f7217 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -308,7 +308,7 @@ private List getLogicalTables( Pattern schemaPattern, Pattern tabl return namespaces .stream() .flatMap( - n -> catalog.getSnapshot().getRelSnapshot( n.id ).getTables( , tablePattern ).stream() ).collect( Collectors.toList() ); + n -> catalog.getSnapshot().rel().getTables( Pattern.of( n.name ), tablePattern ).stream() ).collect( Collectors.toList() ); } @@ -319,7 +319,7 @@ public MetaResultSet getColumns( final ConnectionHandle ch, final String databas if ( log.isTraceEnabled() ) { log.trace( "getColumns( ConnectionHandle {}, String {}, Pat {}, Pat {}, Pat {} )", ch, database, schemaPattern, tablePattern, columnPattern ); } - final List columns = getLogicalTables( schemaPattern, tablePattern ).stream().flatMap( t -> catalog.getSnapshot().getRelSnapshot( t.namespaceId ).getColumns( + final List columns = getLogicalTables( schemaPattern, tablePattern ).stream().flatMap( t -> catalog.getSnapshot().rel().getColumns( (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ), (columnPattern == null || columnPattern.s == null) ? null : new Pattern( columnPattern.s ) ).stream() ).collect( Collectors.toList() ); @@ -532,7 +532,7 @@ public MetaResultSet getPrimaryKeys( final ConnectionHandle ch, final String dat List primaryKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.primaryKey != null ) { - final CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + final CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); primaryKeyColumns.addAll( primaryKey.getCatalogPrimaryKeyColumns() ); } } @@ -568,7 +568,7 @@ public MetaResultSet getImportedKeys( final ConnectionHandle ch, final String da final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List importedKeys = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); + List importedKeys = catalog.getSnapshot().rel().getForeignKeys( catalogTable.id ); importedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -611,7 +611,7 @@ public MetaResultSet getExportedKeys( final ConnectionHandle ch, final String da final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List exportedKeys = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getExportedKeys( catalogTable.id ); + List exportedKeys = catalog.getSnapshot().rel().getExportedKeys( catalogTable.id ); exportedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -727,7 +727,7 @@ public MetaResultSet getIndexInfo( final ConnectionHandle ch, final String datab final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List catalogIndexColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List catalogIndexInfos = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, unique ); + List catalogIndexInfos = catalog.getSnapshot().rel().getIndexes( catalogTable.id, unique ); catalogIndexInfos.forEach( info -> catalogIndexColumns.addAll( info.getCatalogIndexColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java index 1664983e6f..39efd63ce2 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java @@ -57,7 +57,7 @@ public static ColumnIndex createIndex( String inDatabase, String schemaName, Str log.debug( "Creating ColumnIndex." ); Catalog catalog = Catalog.getInstance(); LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schemaName ); - LogicalColumn column = catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumn( tableName, columnName ); + LogicalColumn column = catalog.getSnapshot().rel().getColumn( tableName, columnName ); return new ColumnIndex( column, schemaName, tableName, columnName ); } catch ( UnknownTableException | UnknownSchemaException | UnknownColumnException e ) { log.error( "Cannot find a underlying column for the specified column name: {}.{}.{}.", schemaName, tableName, columnName, e ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java index 02f2060f47..ecd362a921 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Combiner.java @@ -150,7 +150,7 @@ private static String[] getColumnsToJoinOn( TableIndex left, TableIndex right, S LogicalTable rightCatalogTable = right.catalogTable; List columnList = Arrays.asList( columnStrs ); - LogicalRelSnapshot relSnapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( leftCatalogTable.namespaceId ); + LogicalRelSnapshot relSnapshot = Catalog.getInstance().getSnapshot().rel(); List lColumnNames = relSnapshot.getColumns( leftCatalogTable.id ).stream().map( c -> c.name ).collect( Collectors.toList() ); List rColumnNames = relSnapshot.getColumns( rightCatalogTable.id ).stream().map( c -> c.name ).collect( Collectors.toList() ); if ( !new HashSet<>( lColumnNames ).containsAll( columnList ) || !new HashSet<>( rColumnNames ).containsAll( columnList ) ) { @@ -170,7 +170,7 @@ private static String[] getCommonColumns( TableIndex table1, TableIndex table2 ) if ( log.isDebugEnabled() ) { log.debug( "Getting Common Columns between '{}' and '{}'.", table1.fullyQualifiedName, table2.fullyQualifiedName ); } - LogicalRelSnapshot relSnapshot = Catalog.getInstance().getSnapshot().getRelSnapshot( table1.catalogTable.namespaceId ); + LogicalRelSnapshot relSnapshot = Catalog.getInstance().getSnapshot().rel(); List table1Columns = relSnapshot.getColumns( table1.catalogTable.id ).stream().map( c -> c.name ).collect( Collectors.toList() ); List table2Columns = relSnapshot.getColumns( table2.catalogTable.id ).stream().map( c -> c.name ).collect( Collectors.toList() ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java index f898846511..bc18689dfe 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/Cql2RelConverter.java @@ -195,7 +195,7 @@ private AlgBuilder generateProjections( AlgBuilder algBuilder, RexBuilder rexBui TableIndex tableIndex = treeNode.getExternalNode(); String columnNamePrefix = tableIndex.fullyQualifiedName + "."; LogicalTable catalogTable = tableIndex.catalogTable; - for ( LogicalColumn column : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( catalogTable.id ) ) { int ordinal = tableScanColumnOrdinalities.size(); RexNode inputRef = rexBuilder.makeInputRef( baseNode, ordinal ); inputRefs.add( inputRef ); diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java index f5b0f39111..8f3b211952 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java @@ -46,8 +46,7 @@ public TableIndex( final LogicalTable catalogTable, final String schemaName, fin public static TableIndex createIndex( String schemaName, String tableName ) throws UnknownIndexException { log.debug( "Creating TableIndex." ); Catalog catalog = Catalog.getInstance(); - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schemaName ); - LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( tableName ); + LogicalTable table = catalog.getSnapshot().rel().getTable( schemaName, tableName ); return new TableIndex( table, schemaName, tableName ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 25e9bfe52c..a7b46842f8 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -41,6 +41,7 @@ import java.util.List; import java.util.Map; import lombok.Getter; +import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeSystem; @@ -88,15 +89,18 @@ public PhysicalTable createCsvTable( long id, LogicalTable catalogTable, Allocat final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List fieldTypes = new LinkedList<>(); List fieldIds = new ArrayList<>( allocationTable.placements.size() ); + + List columns = csvSource.getExportedColumns().get( catalogTable.name ); + for ( CatalogColumnPlacement placement : allocationTable.placements ) { - LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( allocationTable.namespaceId ).getColumn( placement.columnId ); + LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( placement.columnId ); AlgDataType sqlType = sqlType( typeFactory, logicalColumn.type, logicalColumn.length, logicalColumn.scale, null ); - fieldInfo.add( logicalColumn.name, placement.physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); + fieldInfo.add( logicalColumn.name, columns.get( (int) placement.position ).physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); fieldTypes.add( CsvFieldType.getCsvFieldType( logicalColumn.type ) ); - fieldIds.add( (int) placement.physicalPosition ); + fieldIds.add( (int) placement.position ); } - String csvFileName = allocationTable.placements.get( 0 ).physicalSchemaName; + String csvFileName = columns.get( 0 ).physicalSchemaName; Source source; try { source = Sources.of( new URL( directoryUrl, csvFileName ) ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 7a64673906..b8c58c4c27 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -162,7 +162,7 @@ public Map> getExportedColumns() { List placements = Catalog .getInstance() .getSnapshot() - .getPhysicalSnapshot() + .physical() .getPhysicalsOnAdapter( getAdapterId() ); fileNames = new HashSet<>(); for ( PhysicalEntity ccp : placements ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index da23e35f2e..d6e882e44d 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -107,7 +107,7 @@ public AlgRoot convert( CypherNode query, ExtendedQueryParameters parameters, Al databaseId = parameters.getDatabaseId(); } - LogicalGraph graph = this.snapshot.getGraphSnapshot( databaseId ).getGraph( databaseId ); + LogicalGraph graph = this.snapshot.graph().getGraph( databaseId ); if ( parameters.isFullGraph() ) { // simple full graph scan diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java index e3dcf660e8..4f0171ada7 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java @@ -63,7 +63,7 @@ public void execute( Context context, Statement statement, QueryParameters param AdapterManager adapterManager = AdapterManager.getInstance(); - List graphs = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( this.database ) ).stream().map( g -> statement.getTransaction().getSnapshot().getGraphSnapshot( g.id ).getGraph( g.id ) ).collect( Collectors.toList() ); + List graphs = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( this.database ) ).stream().map( g -> statement.getTransaction().getSnapshot().graph().getGraph( g.id ) ).collect( Collectors.toList() ); List dataStores = Stream.of( store ) .map( store -> (DataStore) adapterManager.getAdapter( store ) ) diff --git a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java index 33dc5360db..786e9a96db 100644 --- a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java +++ b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java @@ -59,7 +59,7 @@ public ExploreQueryProcessor( final TransactionManager transactionManager, long public ExploreQueryProcessor( final TransactionManager transactionManager, Authenticator authenticator ) { - this( transactionManager, Catalog.defaultUserId, Catalog.defaultDatabaseId ); + this( transactionManager, Catalog.defaultUserId, Catalog.defaultNamespaceId ); } diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 150125418c..51cce5bfcd 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -120,9 +120,9 @@ public Namespace getCurrentSchema() { @Override public void addIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { - List ccps = context.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); + List ccps = context.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().getAllocSnapshot().getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().alloc().getPartitionPlacement( getAdapterId(), id ) ) ); String physicalIndexName = getPhysicalIndexName( catalogIndex.key.tableId, catalogIndex.id ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { @@ -160,7 +160,7 @@ public void addIndex( Context context, CatalogIndex catalogIndex, List par @Override public void dropIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( catalog.getSnapshot().getAllocSnapshot().getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( catalog.getSnapshot().alloc().getPartitionPlacement( getAdapterId(), id ) ) ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { StringBuilder builder = new StringBuilder(); diff --git a/plugins/http-interface/src/main/java/org/polypheny/db/http/HttpInterfacePlugin.java b/plugins/http-interface/src/main/java/org/polypheny/db/http/HttpInterfacePlugin.java index 1ac9f13c8c..81434c7846 100644 --- a/plugins/http-interface/src/main/java/org/polypheny/db/http/HttpInterfacePlugin.java +++ b/plugins/http-interface/src/main/java/org/polypheny/db/http/HttpInterfacePlugin.java @@ -111,7 +111,7 @@ public static class HttpInterface extends QueryInterface { private static Javalin server; - public HttpInterface( TransactionManager transactionManager, Authenticator authenticator, int ifaceId, String uniqueName, Map settings ) { + public HttpInterface( TransactionManager transactionManager, Authenticator authenticator, long ifaceId, String uniqueName, Map settings ) { super( transactionManager, authenticator, ifaceId, uniqueName, settings, true, false ); this.uniqueName = uniqueName; this.port = Integer.parseInt( settings.get( "port" ) ); @@ -181,7 +181,7 @@ public void anyQuery( QueryLanguage language, final Context ctx ) { query, transactionManager, Catalog.defaultUserId, - Catalog.defaultDatabaseId, + Catalog.defaultNamespaceId, null ); ctx.json( results.toArray( new Result[0] ) ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 92999d8346..0f4fad98fd 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -122,8 +122,8 @@ public void truncate( Context context, LogicalTable catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. - String physicalTableName = context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalTableName; - String physicalSchemaName = context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalSchemaName; + String physicalTableName = context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalTableName; + String physicalSchemaName = context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ).get( 0 ).physicalSchemaName; StringBuilder builder = new StringBuilder(); builder.append( "TRUNCATE TABLE " ) .append( dialect.quoteIdentifier( physicalSchemaName ) ) diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 31db524945..d86d7fb691 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -19,6 +19,7 @@ import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import lombok.SneakyThrows; @@ -125,7 +126,7 @@ public void createUdfs() { @Override - public PhysicalTable createPhysicalTable( Context context, LogicalTable logicalTable, AllocationTable allocationTable ) { + public List createPhysicalTable( Context context, LogicalTable logicalTable, AllocationTable allocationTable ) { String physicalTableName = getPhysicalTableName( logicalTable.id, allocationTable.id ); if ( log.isDebugEnabled() ) { @@ -137,7 +138,7 @@ public PhysicalTable createPhysicalTable( Context context, LogicalTable logicalT } executeUpdate( query, context ); - return this.currentJdbcSchema.createJdbcTable( IdBuilder.getInstance().getNewPhysicalId(), logicalTable, allocationTable ); + return Collections.singletonList( JdbcSchema.create( logicalTable.id, catalog.getSnapshot(), logicalTable.getNamespaceName(), connectionFactory, dialect, this ).createJdbcTable( IdBuilder.getInstance().getNewPhysicalId(), logicalTable, allocationTable ) ); //return new PhysicalTable( allocationTable, getDefaultPhysicalSchemaName(), physicalTableName, allocationTable.getColumns().values().stream().map( c -> getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ) ); } @@ -151,7 +152,7 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica .append( " ( " ); boolean first = true; for ( CatalogColumnPlacement placement : allocationTable.placements ) { - LogicalColumn logicalColumn = allocationTable.getColumns().get( placement.columnId ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( placement.columnId ); if ( !first ) { builder.append( ", " ); } @@ -168,7 +169,7 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica @Override public void addColumn( Context context, LogicalTable catalogTable, LogicalColumn logicalColumn ) { String physicalColumnName = getPhysicalColumnName( logicalColumn.id ); - for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { + for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { String physicalTableName = partitionPlacement.physicalTableName; String physicalSchemaName = partitionPlacement.physicalSchemaName; StringBuilder query = buildAddColumnQuery( physicalSchemaName, physicalTableName, physicalColumnName, catalogTable, logicalColumn ); @@ -267,13 +268,13 @@ public void updateColumnType( Context context, CatalogColumnPlacement columnPlac if ( !this.dialect.supportsNestedArrays() && logicalColumn.collectionsType != null ) { return; } - for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { + for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) .append( "." ) .append( dialect.quoteIdentifier( partitionPlacement.physicalTableName ) ); - builder.append( " ALTER COLUMN " ).append( dialect.quoteIdentifier( columnPlacement.physicalColumnName ) ); + //builder.append( " ALTER COLUMN " ).append( dialect.quoteIdentifier( columnPlacement.physicalColumnName ) ); builder.append( " " ).append( getTypeString( logicalColumn.type ) ); if ( logicalColumn.length != null ) { builder.append( "(" ); @@ -297,7 +298,7 @@ public void dropTable( Context context, LogicalTable catalogTable, List pa String physicalSchemaName; List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().getAllocSnapshot().getPartitionPlacement( getAdapterId(), id ) ) ); + partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().alloc().getPartitionPlacement( getAdapterId(), id ) ) ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { catalog.getAllocRel( catalogTable.namespaceId ).deletePartitionPlacement( getAdapterId(), partitionPlacement.partitionId ); @@ -321,13 +322,13 @@ public void dropTable( Context context, LogicalTable catalogTable, List pa @Override public void dropColumn( Context context, CatalogColumnPlacement columnPlacement ) { - for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { + for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) .append( "." ) .append( dialect.quoteIdentifier( partitionPlacement.physicalTableName ) ); - builder.append( " DROP " ).append( dialect.quoteIdentifier( columnPlacement.physicalColumnName ) ); + //builder.append( " DROP " ).append( dialect.quoteIdentifier( columnPlacement.physicalColumnName ) ); executeUpdate( builder, context ); } } @@ -338,7 +339,7 @@ public void truncate( Context context, LogicalTable catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. - for ( CatalogPartitionPlacement partitionPlacement : catalog.getSnapshot().getAllocSnapshot().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ) ) { + for ( CatalogPartitionPlacement partitionPlacement : catalog.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ) ) { String physicalTableName = partitionPlacement.physicalTableName; String physicalSchemaName = partitionPlacement.physicalSchemaName; StringBuilder builder = new StringBuilder(); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java index 8dde6f9b03..f86479e2ce 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MqlProcessorImpl.java @@ -106,7 +106,7 @@ public boolean needsDdlGeneration( Node query, QueryParameters parameters ) { return Catalog.getInstance() .getSnapshot() .getNamespaces( Pattern.of( ((MqlQueryParameters) parameters).getDatabase() ) ) - .stream().flatMap( n -> Catalog.getInstance().getSnapshot().getDocSnapshot( n.id ).getCollections( null ).stream() ) + .stream().flatMap( n -> Catalog.getInstance().getSnapshot().doc().getCollections( n.id, null ).stream() ) .noneMatch( t -> t.name.equals( ((MqlCollectionStatement) query).getCollection() ) ); } return false; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java index 5b8b55ae01..67d7b84c6a 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlAddPlacement.java @@ -46,7 +46,7 @@ public void execute( Context context, Statement statement, QueryParameters param long namespaceId; namespaceId = context.getSnapshot().getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; - List collections = context.getSnapshot().getDocSnapshot( namespaceId ).getCollections( new Pattern( getCollection() ) ); + List collections = context.getSnapshot().doc().getCollections( namespaceId, new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { throw new RuntimeException( "Error while adding new collection placement, collection not found." ); @@ -57,7 +57,7 @@ public void execute( Context context, Statement statement, QueryParameters param .map( store -> (DataStore) adapterManager.getAdapter( store ) ) .collect( Collectors.toList() ); - if ( statement.getTransaction().getSnapshot().getAllocSnapshot().getCollectionPlacements( collections.get( 0 ).id ).stream().anyMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { + if ( statement.getTransaction().getSnapshot().alloc().getCollectionPlacements( collections.get( 0 ).id ).stream().anyMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { throw new RuntimeException( "Error while adding a new collection placement, placement already present." ); } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java index 766cd9af28..09e2f8f34a 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDeletePlacement.java @@ -45,7 +45,7 @@ public void execute( Context context, Statement statement, QueryParameters param long namespaceId = context.getSnapshot().getNamespace( ((MqlQueryParameters) parameters).getDatabase() ).id; - List collections = context.getSnapshot().getDocSnapshot( namespaceId ).getCollections( new Pattern( getCollection() ) ); + List collections = context.getSnapshot().doc().getCollections( namespaceId, new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { throw new RuntimeException( "Error while adding new collection placement, collection not found." ); @@ -56,7 +56,7 @@ public void execute( Context context, Statement statement, QueryParameters param .map( store -> (DataStore) adapterManager.getAdapter( store ) ) .collect( Collectors.toList() ); - if ( statement.getTransaction().getSnapshot().getAllocSnapshot().getCollectionPlacements( collections.get( 0 ).id ).stream().noneMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { + if ( statement.getTransaction().getSnapshot().alloc().getCollectionPlacements( collections.get( 0 ).id ).stream().noneMatch( p -> dataStores.stream().map( Adapter::getAdapterId ).collect( Collectors.toList() ).contains( p ) ) ) { throw new RuntimeException( "Error while adding a new collection placement, placement already present." ); } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java index 2e060dd44a..6d0b05d500 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java @@ -53,7 +53,7 @@ public void execute( Context context, Statement statement, QueryParameters param } LogicalNamespace namespace = context.getSnapshot().getNamespaces( new Pattern( database ) ).get( 0 ); - List collections = context.getSnapshot().getDocSnapshot( namespace.id ).getCollections( new Pattern( getCollection() ) ); + List collections = context.getSnapshot().doc().getCollections( namespace.id, new Pattern( getCollection() ) ); if ( collections.size() != 1 ) { // dropping a collection, which does not exist, which is a no-op return; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index fbbe7a495b..18108eba22 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -55,8 +55,7 @@ public void execute( Context context, Statement statement, QueryParameters param String database = ((MqlQueryParameters) parameters).getDatabase(); try { - LogicalNamespace schema = context.getSnapshot().getNamespace( database ); - List tables = context.getSnapshot().getRelSnapshot( schema.id ).getTables( , null ); + List tables = context.getSnapshot().rel().getTables( database, null ); if ( dropTarget ) { Optional newTable = tables.stream() diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index db139c1e65..af98bc0639 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -319,7 +319,7 @@ public AlgRoot convert( MqlCollectionStatement query ) { private CatalogEntity getEntity( MqlCollectionStatement query, String dbSchemaName ) { LogicalNamespace namespace = snapshot.getNamespace( dbSchemaName ); - return snapshot.getDocSnapshot( namespace.id ).getCollection( query.getCollection() ); + return snapshot.doc().getCollection( namespace.id, query.getCollection() ); /* if ( table == null || table.getEntity() == null ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 54f6d06b80..73920840b5 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -52,21 +52,19 @@ import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logical.DocumentCatalog; import org.polypheny.db.catalog.logical.GraphCatalog; import org.polypheny.db.catalog.logical.RelationalCatalog; import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.physical.PolyPhysicalCatalog; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.catalog.snapshot.impl.SnapshotBuilder; +import org.polypheny.db.iface.QueryInterfaceManager; +import org.polypheny.db.iface.QueryInterfaceManager.QueryInterfaceType; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.type.PolyType; @@ -123,10 +121,10 @@ public PolyCatalog() { @Override public void init() { - try { - insertDefaultData(); - } catch ( UnknownAdapterException e ) { - throw new RuntimeException( e ); + insertDefaultData(); + if ( snapshot.getQueryInterface( "avatica" ) == null ) { + QueryInterfaceType avatica = QueryInterfaceManager.getREGISTER().get( "AvaticaInterface" ); + addQueryInterface( "avatica", avatica.clazz.getName(), avatica.defaultSettings ); } } @@ -179,20 +177,15 @@ private void insertDefaultData() throws UnknownAdapterException { adapter.createNewSchema( getSnapshot(), "public", namespaceId ); // init schema - getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); - getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); - getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); - getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); + // getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); + // getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); + // getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); + // getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); - updateSnapshot(); - - try { - CatalogAdapter csv = getSnapshot().getAdapter( "hr" ); - addDefaultCsvColumns( csv, namespaceId ); - } catch ( UnknownTableException | GenericCatalogException | UnknownColumnException e ) { - throw new RuntimeException( e ); - } + // updateSnapshot(); + // CatalogAdapter csv = getSnapshot().getAdapter( "hr" ); + // addDefaultCsvColumns( csv, namespaceId ); } @@ -204,19 +197,19 @@ private void insertDefaultData() throws UnknownAdapterException { /** * Initiates default columns for csv files */ - private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) throws UnknownTableException, UnknownColumnException, GenericCatalogException { - LogicalTable depts = getSnapshot().getRelSnapshot( namespaceId ).getTable( "depts" ); + private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) { + LogicalTable depts = getSnapshot().rel().getTable( namespaceId, "depts" ); addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - LogicalTable emps = getSnapshot().getRelSnapshot( namespaceId ).getTable( "emps" ); + LogicalTable emps = getSnapshot().rel().getTable( namespaceId, "emps" ); addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); - LogicalTable emp = getSnapshot().getRelSnapshot( namespaceId ).getTable( "emp" ); + LogicalTable emp = getSnapshot().rel().getTable( namespaceId, "emp" ); addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); @@ -228,7 +221,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) throws addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); - LogicalTable work = getSnapshot().getRelSnapshot( namespaceId ).getTable( "work" ); + LogicalTable work = getSnapshot().rel().getTable( namespaceId, "work" ); addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); @@ -242,25 +235,25 @@ private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) throws updateSnapshot(); // set all needed primary keys - getLogicalRel( namespaceId ).addPrimaryKey( depts.id, Collections.singletonList( getSnapshot().getRelSnapshot( namespaceId ).getColumn( depts.id, "deptno" ).id ) ); - getLogicalRel( namespaceId ).addPrimaryKey( emps.id, Collections.singletonList( getSnapshot().getRelSnapshot( namespaceId ).getColumn( emps.id, "empid" ).id ) ); - getLogicalRel( namespaceId ).addPrimaryKey( emp.id, Collections.singletonList( getSnapshot().getRelSnapshot( namespaceId ).getColumn( emp.id, "employeeno" ).id ) ); - getLogicalRel( namespaceId ).addPrimaryKey( work.id, Collections.singletonList( getSnapshot().getRelSnapshot( namespaceId ).getColumn( work.id, "employeeno" ).id ) ); + getLogicalRel( namespaceId ).addPrimaryKey( depts.id, Collections.singletonList( getSnapshot().rel().getColumn( depts.id, "deptno" ).id ) ); + getLogicalRel( namespaceId ).addPrimaryKey( emps.id, Collections.singletonList( getSnapshot().rel().getColumn( emps.id, "empid" ).id ) ); + getLogicalRel( namespaceId ).addPrimaryKey( emp.id, Collections.singletonList( getSnapshot().rel().getColumn( emp.id, "employeeno" ).id ) ); + getLogicalRel( namespaceId ).addPrimaryKey( work.id, Collections.singletonList( getSnapshot().rel().getColumn( work.id, "employeeno" ).id ) ); // set foreign keys getLogicalRel( namespaceId ).addForeignKey( emps.id, - ImmutableList.of( getSnapshot().getRelSnapshot( namespaceId ).getColumn( emps.id, "deptno" ).id ), + ImmutableList.of( getSnapshot().rel().getColumn( emps.id, "deptno" ).id ), depts.id, - ImmutableList.of( getSnapshot().getRelSnapshot( namespaceId ).getColumn( depts.id, "deptno" ).id ), + ImmutableList.of( getSnapshot().rel().getColumn( depts.id, "deptno" ).id ), "fk_emps_depts", ForeignKeyOption.NONE, ForeignKeyOption.NONE ); getLogicalRel( namespaceId ).addForeignKey( work.id, - ImmutableList.of( getSnapshot().getRelSnapshot( namespaceId ).getColumn( work.id, "employeeno" ).id ), + ImmutableList.of( getSnapshot().rel().getColumn( work.id, "employeeno" ).id ), emp.id, - ImmutableList.of( getSnapshot().getRelSnapshot( namespaceId ).getColumn( emp.id, "employeeno" ).id ), + ImmutableList.of( getSnapshot().rel().getColumn( emp.id, "employeeno" ).id ), "fk_work_emp", ForeignKeyOption.NONE, ForeignKeyOption.NONE ); @@ -268,7 +261,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) throws private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !getSnapshot().getRelSnapshot( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { + if ( !getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); String filename = table.name + ".csv"; if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { @@ -276,14 +269,15 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String } updateSnapshot(); - long allocId = 0; - if ( !getSnapshot().getAllocSnapshot().adapterHasPlacement( csv.id, table.id ) ) { - allocId = getAllocRel( table.namespaceId ).addDataPlacement( csv.id, table.id ); + AllocationEntity alloc; + if ( !getSnapshot().alloc().adapterHasPlacement( csv.id, table.id ) ) { + alloc = getAllocRel( table.namespaceId ).createAlloctionTable( csv.id, table.id ); } else { - allocId = getSnapshot().getAllocSnapshot().getAllocation( csv.id, table.id ).id; + alloc = getSnapshot().alloc().getAllocation( csv.id, table.id ); } - getAllocRel( table.namespaceId ).addColumnPlacement( allocId, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); + getAllocRel( table.namespaceId ).addColumnPlacement( alloc.id, colId, PlacementType.AUTOMATIC, position ); + //getAllocRel( table.namespaceId ).addColumnPlacement( alloc.id, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); //getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( allocId, colId, position ); updateSnapshot(); @@ -295,16 +289,18 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !getSnapshot().getRelSnapshot( table.namespaceId ).checkIfExistsColumn( table.id, name ) ) { + if ( !getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - AllocationEntity entity = getSnapshot().getAllocSnapshot().getAllocation( adapter.id, table.id ); - getAllocRel( table.namespaceId ).addColumnPlacement( entity.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); + AllocationEntity entity = getSnapshot().alloc().getAllocation( adapter.id, table.id ); + getAllocRel( table.namespaceId ).addColumnPlacement( entity.id, colId, PlacementType.AUTOMATIC, position ); + //getAllocRel( table.namespaceId ).addColumnPlacement( entity.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, colId, position ); } } - private void updateSnapshot() { + @Override + public void updateSnapshot() { // reset physical catalogs Set keys = this.physicalCatalogs.keySet(); keys.forEach( k -> this.physicalCatalogs.replace( k, new PolyPhysicalCatalog() ) ); @@ -313,6 +309,7 @@ private void updateSnapshot() { this.allocationCatalogs.forEach( ( k, v ) -> { if ( v.getNamespace().namespaceType == NamespaceType.RELATIONAL ) { ((AllocationRelationalCatalog) v).getTables().forEach( ( k2, v2 ) -> { + AdapterManager.getInstance().getAdapter( v2.adapterId ).createNewSchema( getSnapshot(), v2.name, v2.namespaceId ); LogicalTable table = getSnapshot().getLogicalEntity( v2.logicalId ).unwrap( LogicalTable.class ); List physicals = AdapterManager.getInstance().getAdapter( v2.adapterId ).createAdapterTable( idBuilder, table, v2 ); getPhysical( table.namespaceId ).addEntities( physicals ); @@ -511,6 +508,7 @@ public long addQueryInterface( String uniqueName, String clazz, Map physicals ) { + public void addEntities( List physicals ) { physicals.forEach( p -> this.physicals.put( p.id, p ) ); } + @Override + public void deleteEntity( long id ) { + physicals.remove( id ); + } + + } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 4acfed046d..3280f24da2 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -260,8 +260,7 @@ LogicalTable parseCatalogTableName( String tableName ) throws ParserException { throw new ParserException( ParserErrorCode.TABLE_LIST_MALFORMED_TABLE, tableName ); } - LogicalNamespace namespace = snapshop.getNamespace( tableElements[0] ); - LogicalTable table = snapshop.getRelSnapshot( namespace.id ).getTable( tableElements[1] ); + LogicalTable table = snapshop.rel().getTable( tableElements[1], tableElements[1] ); if ( log.isDebugEnabled() ) { log.debug( "Finished parsing table \"{}\".", tableName ); } @@ -359,7 +358,7 @@ List generateRequestColumnsWithProject( String projectionString, Set notYetAdded = new HashSet<>( validColumns ); notYetAdded.removeAll( projectedColumns ); for ( long columnId : notYetAdded ) { - LogicalColumn column = snapshop.getNamespaces( null ).stream().map( n -> this.snapshop.getRelSnapshot( n.id ).getColumn( columnId ) ).filter( Objects::nonNull ).findFirst().orElse( null ); + LogicalColumn column = snapshop.getNamespaces( null ).stream().map( n -> this.snapshop.rel().getColumn( columnId ) ).filter( Objects::nonNull ).findFirst().orElse( null ); int calculatedPosition = tableOffsets.get( column.tableId ) + column.position - 1; RequestColumn requestColumn = new RequestColumn( column, calculatedPosition, calculatedPosition, null, null, false ); columns.add( requestColumn ); @@ -418,7 +417,7 @@ private LogicalColumn getCatalogColumnFromString( String name ) throws ParserExc LogicalNamespace namespace = snapshop.getNamespace( splitString[0] ); - return snapshop.getRelSnapshot( namespace.id ).getColumn( splitString[1], splitString[2] ); + return snapshop.rel().getColumn( splitString[1], splitString[2] ); } @@ -746,7 +745,7 @@ private List> parseInsertStatementValues( Map rowVal public Map generateNameMapping( List tables ) { Map nameMapping = new HashMap<>(); for ( LogicalTable table : tables ) { - for ( LogicalColumn column : snapshop.getRelSnapshot( table.namespaceId ).getColumns( table.id ) ) { + for ( LogicalColumn column : snapshop.rel().getColumns( table.id ) ) { nameMapping.put( column.getSchemaName() + "." + column.getTableName() + "." + column.name, column ); } } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 221921c1ef..927150bdf2 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -156,9 +156,8 @@ String processPatchResource( final ResourcePatchRequest resourcePatchRequest, fi RexBuilder rexBuilder = new RexBuilder( typeFactory ); Snapshot snapshot = statement.getTransaction().getSnapshot(); - LogicalNamespace namespace = snapshot.getNamespace( resourcePatchRequest.tables.get( 0 ).getNamespaceName() ); - LogicalTable table = null; - table = snapshot.getRelSnapshot( namespace.id ).getTable( resourcePatchRequest.tables.get( 0 ).name ); + + LogicalTable table = snapshot.rel().getTable( resourcePatchRequest.tables.get( 0 ).id ); // Table Scans algBuilder = this.tableScans( algBuilder, rexBuilder, resourcePatchRequest.tables ); @@ -260,10 +259,7 @@ String processDeleteResource( final ResourceDeleteRequest resourceDeleteRequest, private static LogicalTable getLogicalTable( Snapshot snapshot, String namespaceName, String tableName ) { - LogicalNamespace namespace = snapshot.getNamespace( namespaceName ); - LogicalTable table; - table = snapshot.getRelSnapshot( namespace.id ).getTable( tableName ); - return table; + return snapshot.rel().getTable( namespaceName, tableName ); } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java index 5a0ea25962..5ad2ab7942 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java @@ -130,7 +130,7 @@ public static class HttpRestServer extends QueryInterface { private Javalin restServer; - public HttpRestServer( TransactionManager transactionManager, Authenticator authenticator, int ifaceId, String uniqueName, Map settings ) { + public HttpRestServer( TransactionManager transactionManager, Authenticator authenticator, long ifaceId, String uniqueName, Map settings ) { super( transactionManager, authenticator, ifaceId, uniqueName, settings, true, false ); this.requestParser = new RequestParser( transactionManager, authenticator, "pa", "APP" ); this.uniqueName = uniqueName; @@ -167,7 +167,7 @@ public String toJsonString( @NotNull Object obj ) { config.enableCorsForAllOrigins(); } ).start( port ); - Rest rest = new Rest( transactionManager, Catalog.defaultUserId, Catalog.defaultDatabaseId ); + Rest rest = new Rest( transactionManager, Catalog.defaultUserId, Catalog.defaultNamespaceId ); restRoutes( restServer, rest ); log.info( "{} started and is listening on port {}.", INTERFACE_NAME, port ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java index 12c24707e0..d5ccb81f54 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlProcessorImpl.java @@ -358,19 +358,19 @@ private void addDefaultValues( Transaction transaction, SqlInsert insert ) { private LogicalTable getCatalogTable( Transaction transaction, SqlIdentifier tableName ) { LogicalTable catalogTable; - long schemaId; + long namespaceId; String tableOldName; if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = snapshot.getNamespace( tableName.names.get( 1 ) ).id; + namespaceId = snapshot.getNamespace( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = snapshot.getNamespace( tableName.names.get( 0 ) ).id; + namespaceId = snapshot.getNamespace( tableName.names.get( 0 ) ).id; tableOldName = tableName.names.get( 1 ); } else { // TableName - schemaId = snapshot.getNamespace( transaction.getDefaultSchema().name ).id; + namespaceId = snapshot.getNamespace( transaction.getDefaultSchema().name ).id; tableOldName = tableName.names.get( 0 ); } - catalogTable = snapshot.getRelSnapshot( schemaId ).getTable( tableOldName ); + catalogTable = snapshot.rel().getTable( namespaceId, tableOldName ); return catalogTable; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 0ceab050bc..36458188ac 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -65,7 +65,6 @@ public Operator getOperator() { protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName ) { - LogicalTable catalogTable; long schemaId; String tableOldName; if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName @@ -78,15 +77,12 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName schemaId = snapshot.getNamespace( context.getDefaultSchemaName() ).id; tableOldName = tableName.names.get( 0 ); } - catalogTable = snapshot.getRelSnapshot( schemaId ).getTable( tableOldName ); - return catalogTable; + return snapshot.rel().getTable( schemaId, tableOldName ); } protected LogicalColumn getCatalogColumn( long namespaceId, long tableId, SqlIdentifier columnName ) { - LogicalColumn logicalColumn; - logicalColumn = snapshot.getRelSnapshot( namespaceId ).getColumn( tableId, columnName.getSimple() ); - return logicalColumn; + return snapshot.rel().getColumn( tableId, columnName.getSimple() ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java index adf7f2a570..75751d9ebf 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java @@ -607,8 +607,8 @@ public static SqlLiteral symbol( Enum o, ParserPos parserPos ) { public static AlgDataType getNamedType( Identifier node, Snapshot snapshot ) { - LogicalNamespace namespace = snapshot.getNamespace( node.getNames().get( 0 ) ); - LogicalTable table = snapshot.getRelSnapshot( namespace.id ).getTable( node.getNames().get( 1 ) ); + ; + LogicalTable table = snapshot.rel().getTable( node.getNames().get( 0 ), node.getNames().get( 1 ) ); if ( table != null ) { return table.getRowType(); } else { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index 5ba7c4b4e9..bac88d965e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -132,7 +132,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // Make sure that all adapters are of type store (and not source) - for ( CatalogDataPlacement placement : statement.getTransaction().getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ) ) { + for ( CatalogDataPlacement placement : statement.getTransaction().getSnapshot().alloc().getDataPlacements( catalogTable.id ) ) { getDataStoreInstance( placement.adapterId ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java index 0e60623d9b..342513afa8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java @@ -153,7 +153,7 @@ public void execute( Context context, Statement statement, QueryParameters param try { // Check if table is already partitioned - if ( statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).partitionType == PartitionType.NONE ) { + if ( statement.getTransaction().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).partitionType == PartitionType.NONE ) { DdlManager.getInstance().addPartitioning( PartitionInformation.fromNodeLists( catalogTable, diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index 06682fe098..e6f11ecc91 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -125,7 +125,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // You can't partition placements if the table is not partitioned - if ( !statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).isPartitioned && (!partitionGroupsList.isEmpty() || !partitionGroupNamesList.isEmpty()) ) { + if ( !statement.getTransaction().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).isPartitioned && (!partitionGroupsList.isEmpty() || !partitionGroupNamesList.isEmpty()) ) { throw new RuntimeException( "Partition Placement is not allowed for unpartitioned table '" + catalogTable.name + "'" ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java index 3d68e52650..286556fbf7 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java @@ -88,7 +88,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // Check if table is even partitioned - if ( statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).partitionType != PartitionType.NONE ) { + if ( statement.getTransaction().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).partitionType != PartitionType.NONE ) { if ( log.isDebugEnabled() ) { log.debug( "Merging partitions for table: {} with id {} on schema: {}", catalogTable.name, catalogTable.id, statement.getTransaction().getSnapshot().getNamespace( catalogTable.namespaceId ).name ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java index 7f41439c9d..16bdb3ec3a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java @@ -105,7 +105,7 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); } - if ( !statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).isPartitioned ) { + if ( !statement.getTransaction().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).isPartitioned ) { throw new RuntimeException( "Table '" + catalogTable.name + "' is not partitioned" ); } @@ -123,7 +123,7 @@ public void execute( Context context, Statement statement, QueryParameters param } long storeId = storeInstance.getAdapterId(); // Check whether this placement already exists - if ( !statement.getTransaction().getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ).contains( storeId ) ) { + if ( !statement.getTransaction().getSnapshot().alloc().getDataPlacements( catalogTable.id ).stream().map( p -> p.adapterId ).collect( Collectors.toList() ).contains( storeId ) ) { throw CoreUtil.newContextException( storeName.getPos(), RESOURCE.placementDoesNotExist( storeName.getSimple(), catalogTable.name ) ); @@ -137,16 +137,16 @@ public void execute( Context context, Statement statement, QueryParameters param for ( int partitionId : partitionGroupList ) { // Check if specified partition index is even part of table and if so get corresponding uniquePartId try { - tempPartitionList.add( statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).partitionGroupIds.get( partitionId ) ); + tempPartitionList.add( statement.getTransaction().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).partitionGroupIds.get( partitionId ) ); } catch ( IndexOutOfBoundsException e ) { throw new RuntimeException( "Specified Partition-Index: '" + partitionId + "' is not part of table '" - + catalogTable.name + "', has only " + statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).numPartitionGroups + " partitions" ); + + catalogTable.name + "', has only " + statement.getTransaction().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).numPartitionGroups + " partitions" ); } } } // If name partitions are specified else if ( !partitionGroupNamesList.isEmpty() && partitionGroupList.isEmpty() ) { - List catalogPartitionGroups = catalog.getSnapshot().getAllocSnapshot().getPartitionGroups( tableId ); + List catalogPartitionGroups = catalog.getSnapshot().alloc().getPartitionGroups( tableId ); for ( String partitionName : partitionGroupNamesList.stream().map( Object::toString ) .collect( Collectors.toList() ) ) { boolean isPartOfTable = false; @@ -159,14 +159,14 @@ else if ( !partitionGroupNamesList.isEmpty() && partitionGroupList.isEmpty() ) { } if ( !isPartOfTable ) { throw new RuntimeException( "Specified Partition-Name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "', has only " + catalog.getSnapshot().getAllocSnapshot().getPartitionGroupNames( tableId ) + " partitions" ); + + catalogTable.name + "', has only " + catalog.getSnapshot().alloc().getPartitionGroupNames( tableId ) + " partitions" ); } } } // Check if in-memory dataPartitionPlacement Map should even be changed and therefore start costly partitioning // Avoid unnecessary partitioning when the placement is already partitioned in the same way it has been specified - if ( tempPartitionList.equals( catalog.getSnapshot().getAllocSnapshot().getPartitionGroupsOnDataPlacement( storeId, tableId ) ) ) { + if ( tempPartitionList.equals( catalog.getSnapshot().alloc().getPartitionGroupsOnDataPlacement( storeId, tableId ) ) ) { log.info( "The data placement for table: '{}' on store: '{}' already contains all specified partitions of statement: {}", catalogTable.name, storeName, partitionGroupList ); return; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java index 2a7666467a..9e66f558ab 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java @@ -123,7 +123,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // You can't partition placements if the table is not partitioned - if ( !statement.getTransaction().getSnapshot().getAllocSnapshot().getPartitionProperty( catalogTable.id ).isPartitioned && (!partitionGroupList.isEmpty() || !partitionGroupNamesList.isEmpty()) ) { + if ( !statement.getTransaction().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).isPartitioned && (!partitionGroupList.isEmpty() || !partitionGroupNamesList.isEmpty()) ) { throw new RuntimeException( "Partition Placement is not allowed for unpartitioned table '" + catalogTable.name + "'" ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index a7826fdd04..90efe3de80 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -22,8 +22,10 @@ import java.util.Collection; import java.util.List; import java.util.Map; +import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -82,8 +84,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, @Override public SqlValidatorNamespace getTableNamespace( List names ) { - LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); - CatalogEntity table = validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 1 ) ); + CatalogEntity table = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); return table != null ? new EntityNamespace( validator, table ) : null; @@ -95,10 +96,17 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path final List resolves = ((ResolvedImpl) resolved).resolves; // Look in the default schema, then default catalog, then root schema. - LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); - LogicalTable table = validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 1 ) ); + LogicalTable table; + if ( names.size() == 2 ) { + table = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); + } else if ( names.size() == 1 ) { + table = validator.snapshot.rel().getTable( Catalog.defaultNamespaceName, names.get( 0 ) ); + } else { + throw new NotImplementedException(); + } + if ( table != null ) { - resolves.add( new Resolve( validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 1 ) ) ) ); + resolves.add( new Resolve( table ) ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java index 37d4bf341f..bfc58e0d91 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java @@ -151,8 +151,7 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { } } List ns = id.names; - LogicalNamespace namespace = Catalog.getInstance().getSnapshot().getNamespace( ns.get( 0 ) ); - return new EntityNamespace( validator, Catalog.getInstance().getSnapshot().getRelSnapshot( namespace.id ).getTable( ns.get( 1 ) ) ); + return new EntityNamespace( validator, Catalog.getInstance().getSnapshot().rel().getTable( ns.get( 0 ), ns.get( 1 ) ) ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index 311a319a84..aeb0cceda0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -3391,7 +3391,7 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc private @Nullable CatalogEntity findTable( String tableName, boolean caseSensitive ) { - return snapshot.getNamespaces( null ).stream().map( n -> snapshot.getRelSnapshot( n.id ).getTable( tableName ) ).filter( Objects::isNull ).findFirst().orElse( null ); + return snapshot.rel().getTable( null, tableName ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java index c712b74d53..b6df116ba0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java @@ -71,7 +71,7 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path //final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); //final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); - CatalogEntity entity = validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 0 ) ); + CatalogEntity entity = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); resolved.found( entity ); return; } @@ -84,8 +84,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, if ( names.size() == 1 && names.equals( withItem.name.names ) ) { final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); final Step path = Path.EMPTY.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); - LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); - CatalogEntity entity = validator.snapshot.getRelSnapshot( namespace.id ).getTable( names.get( 0 ) ); + CatalogEntity entity = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); resolved.found( entity ); return; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java index 3c662ba516..21154faed8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java @@ -41,7 +41,7 @@ public class SchemaToJsonMapper { public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogTable, boolean exportPrimaryKey, boolean exportDefaultValues ) { List columns = new LinkedList<>(); - for ( LogicalColumn logicalColumn : Catalog.getInstance().getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : Catalog.getInstance().getSnapshot().rel().getColumns( catalogTable.id ) ) { String defaultValue = null; String defaultFunctionName = null; if ( exportDefaultValues ) { @@ -61,7 +61,7 @@ public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogT } List primaryKeyColumnNames = null; if ( exportPrimaryKey ) { - for ( CatalogKey catalogKey : Catalog.getInstance().getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTableKeys( catalogTable.id ) ) { + for ( CatalogKey catalogKey : Catalog.getInstance().getSnapshot().rel().getTableKeys( catalogTable.id ) ) { if ( catalogKey.id == catalogTable.primaryKey ) { primaryKeyColumnNames = catalogKey.getColumnNames(); break; diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 77704bffa2..1069d3a341 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -304,7 +304,7 @@ Result getTable( final UIRequest request ) { // determine if it is a view or a table LogicalTable catalogTable; - catalogTable = catalog.getSnapshot().getRelSnapshot( catalog.getSnapshot().getNamespace( t[0] ).id ).getTable( t[1] ); + catalogTable = catalog.getSnapshot().rel().getTables( t[0], t[1] ).get( 0 ); result.setNamespaceType( catalogTable.namespaceType ); if ( catalogTable.modifiable ) { result.setType( ResultType.TABLE ); @@ -316,12 +316,12 @@ Result getTable( final UIRequest request ) { ArrayList cols = new ArrayList<>(); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); } - for ( LogicalColumn logicalColumn : catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : catalog.getSnapshot().rel().getColumns( catalogTable.id ) ) { String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( @@ -382,7 +382,7 @@ void getSchemaTree( final Context ctx ) { ArrayList tableTree = new ArrayList<>(); ArrayList viewTree = new ArrayList<>(); ArrayList collectionTree = new ArrayList<>(); - List tables = catalog.getSnapshot().getRelSnapshot( schema.id ).getTables( , null ); + List tables = catalog.getSnapshot().rel().getTables( schema.name, null ); for ( LogicalTable table : tables ) { String icon = "fa fa-table"; if ( table.entityType == EntityType.SOURCE ) { @@ -396,7 +396,7 @@ void getSchemaTree( final Context ctx ) { SidebarElement tableElement = new SidebarElement( schema.name + "." + table.name, table.name, schema.namespaceType, request.routerLinkRoot, icon ); if ( request.depth > 2 ) { - List columns = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( table.id ); + List columns = catalog.getSnapshot().rel().getColumns( table.id ); for ( LogicalColumn column : columns ) { tableElement.addChild( new SidebarElement( schema.name + "." + table.name + "." + column.name, column.name, schema.namespaceType, request.routerLinkRoot, icon ).setCssClass( "sidebarColumn" ) ); } @@ -469,7 +469,7 @@ void getTables( final Context ctx ) { } } - List tables = catalog.getSnapshot().getRelSnapshot( namespaceId ).getTables( , null ); + List tables = catalog.getSnapshot().rel().getTables( namespaceId, null ); ArrayList result = new ArrayList<>(); for ( LogicalTable t : tables ) { result.add( new DbTable( t.name, namespaceName, t.modifiable, t.entityType ) ); @@ -656,8 +656,7 @@ void insertRow( final Context ctx ) { StringJoiner values = new StringJoiner( ",", "(", ")" ); String finalTableId = tableId; - LogicalTable table = catalog.getSnapshot().getNamespaces( null ).stream().map( n -> catalog.getSnapshot().getRelSnapshot( n.id ).getTable( finalTableId ) ).findFirst().orElse( null ); - List logicalColumns = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getSnapshot().rel().getColumns( org.polypheny.db.catalog.logistic.Pattern.of( finalTableId ), null ); try { int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -951,10 +950,10 @@ private String computeWherePK( final String tableName, final String columnName, throw new RuntimeException(); } - LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( catalogColumns.values().iterator().next().namespaceId ).getTable( tableName ); - CatalogPrimaryKey pk = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( catalogColumns.values().iterator().next().namespaceId, tableName ); + CatalogPrimaryKey pk = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( long colId : pk.columnIds ) { - String colName = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( colId ).name; + String colName = catalog.getSnapshot().rel().getColumn( colId ).name; String condition; if ( filter.containsKey( colName ) ) { String val = filter.get( colName ); @@ -1026,7 +1025,7 @@ void updateRow( final Context ctx ) throws ServletException, IOException { LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); - List logicalColumns = catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); + List logicalColumns = catalog.getSnapshot().rel().getColumns( new org.polypheny.db.catalog.logistic.Pattern( split[1] ), null ); int i = 0; for ( LogicalColumn logicalColumn : logicalColumns ) { @@ -1123,15 +1122,15 @@ void getColumns( final Context ctx ) { ArrayList cols = new ArrayList<>(); LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); - LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); + LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( t[0], t[1] ); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( namespace.id ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); } - for ( LogicalColumn logicalColumn : catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumns( catalogTable.id ) ) { + for ( LogicalColumn logicalColumn : catalog.getSnapshot().rel().getColumns( catalogTable.id ) ) { String defaultValue = logicalColumn.defaultValue == null ? null : logicalColumn.defaultValue.value; String collectionsType = logicalColumn.collectionsType == null ? "" : logicalColumn.collectionsType.getName(); cols.add( @@ -1163,13 +1162,12 @@ void getColumns( final Context ctx ) { void getDataSourceColumns( final Context ctx ) throws UnknownTableException, UnknownSchemaException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( request.getSchemaName() ); - LogicalTable catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( request.getTableName() ); + LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( request.getSchemaName(), request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { List columns = new ArrayList<>(); - List cols = catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumns( catalogTable.id ); + List cols = catalog.getSnapshot().rel().getColumns( catalogTable.id ); for ( LogicalColumn col : cols ) { columns.add( new DbColumn( col.name, @@ -1187,17 +1185,17 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk } ctx.json( new Result( columns.toArray( new DbColumn[0] ), null ).setType( ResultType.VIEW ) ); } else { - List allocs = catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ); - if ( catalog.getSnapshot().getAllocSnapshot().getAllocationsFromLogical( catalogTable.id ).size() != 1 ) { + List allocs = catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ); + if ( catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } long adapterId = allocs.get( 0 ).adapterId; - CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); List pkColumnNames = primaryKey.getColumnNames(); List columns = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { - LogicalColumn col = catalog.getSnapshot().getRelSnapshot( namespace.id ).getColumn( ccp.columnId ); + for ( CatalogColumnPlacement ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { + LogicalColumn col = catalog.getSnapshot().rel().getColumn( ccp.columnId ); columns.add( new DbColumn( col.name, col.type.getName(), @@ -1209,7 +1207,7 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk col.cardinality, pkColumnNames.contains( col.name ), col.defaultValue == null ? null : col.defaultValue.value - ).setPhysicalName( ccp.physicalColumnName ) ); + ) );//.setPhysicalName( ccp.physicalColumnName ) ); } ctx.json( new Result( columns.toArray( new DbColumn[0] ), null ).setType( ResultType.TABLE ) ); } @@ -1222,9 +1220,8 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk void getAvailableSourceColumns( final Context ctx ) throws UnknownTableException { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( request.getSchemaName() ); - LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( request.getTableName() ); - Map> placements = catalog.getSnapshot().getAllocSnapshot().getColumnPlacementsByAdapter( table.id ); + LogicalTable table = catalog.getSnapshot().rel().getTable( request.getSchemaName(), request.getTableName() ); + Map> placements = catalog.getSnapshot().alloc().getColumnPlacementsByAdapter( table.id ); Set adapterIds = placements.keySet(); if ( adapterIds.size() > 1 ) { log.warn( String.format( "The number of sources of an entity should not be > 1 (%s.%s)", request.getSchemaName(), request.getTableName() ) ); @@ -1295,9 +1292,7 @@ void getMaterializedInfo( final Context ctx ) throws UnknownTableException, Unkn private LogicalTable getLogicalTable( String schema, String table ) throws UnknownTableException { - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schema ); - - return catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( table ); + return catalog.getSnapshot().rel().getTable( schema, table ); } @@ -1601,7 +1596,7 @@ void getConstraints( final Context ctx ) { // get primary key if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : primaryKey.getColumnNames() ) { if ( !temp.containsKey( "" ) ) { temp.put( "", new ArrayList<>() ); @@ -1615,7 +1610,7 @@ void getConstraints( final Context ctx ) { // get unique constraints. temp.clear(); - List constraints = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getConstraints( catalogTable.id ); + List constraints = catalog.getSnapshot().rel().getConstraints( catalogTable.id ); for ( CatalogConstraint catalogConstraint : constraints ) { if ( catalogConstraint.type == ConstraintType.UNIQUE ) { temp.put( catalogConstraint.name, new ArrayList<>( catalogConstraint.key.getColumnNames() ) ); @@ -1756,7 +1751,7 @@ void getIndexes( final Context ctx ) { Result result; try { LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); - List catalogIndexes = catalog.getSnapshot().getRelSnapshot( catalogTable.id ).getIndexes( catalogTable.id, false ); + List catalogIndexes = catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ); DbColumn[] header = { new DbColumn( "Name" ), @@ -1786,7 +1781,7 @@ void getIndexes( final Context ctx ) { } // Get functional indexes - List placements = catalog.getSnapshot().getAllocSnapshot().getDataPlacements( catalogTable.id ); + List placements = catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ); for ( CatalogDataPlacement placement : placements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); DataStore store; @@ -1892,9 +1887,9 @@ void getUnderlyingTable( final Context ctx ) throws UnknownTableException { for ( Entry> entry : underlyingTableOriginal.entrySet() ) { List columns = new ArrayList<>(); for ( Long ids : entry.getValue() ) { - columns.add( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( ids ).name ); + columns.add( catalog.getSnapshot().rel().getColumn( ids ).name ); } - underlyingTable.put( catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getTable( entry.getKey() ).name, columns ); + underlyingTable.put( catalog.getSnapshot().rel().getTable( entry.getKey() ).name, columns ); } ctx.json( new UnderlyingTables( underlyingTable ) ); } else { @@ -1918,23 +1913,23 @@ private Placement getPlacements( final Index index ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); try { LogicalTable table = getLogicalTable( schemaName, tableName ); - Placement p = new Placement( snapshot.getAllocSnapshot().isPartitioned( table.id ), snapshot.getAllocSnapshot().getPartitionGroupNames( table.id ), table.entityType ); + Placement p = new Placement( snapshot.alloc().isPartitioned( table.id ), snapshot.alloc().getPartitionGroupNames( table.id ), table.entityType ); if ( table.entityType == EntityType.VIEW ) { return p; } else { long pkid = table.primaryKey; - List pkColumnIds = snapshot.getRelSnapshot( table.namespaceId ).getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = snapshot.getRelSnapshot( table.namespaceId ).getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = snapshot.getAllocSnapshot().getColumnPlacements( pkColumn.id ); + List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); + List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); for ( CatalogColumnPlacement placement : pkPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); - PartitionProperty property = snapshot.getAllocSnapshot().getPartitionProperty( table.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); p.addAdapter( new RelationalStore( adapter.getUniqueName(), adapter.getUniqueName(), - snapshot.getAllocSnapshot().getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), - snapshot.getAllocSnapshot().getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), + snapshot.alloc().getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), + snapshot.alloc().getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), property.numPartitionGroups, property.partitionType ) ); } @@ -2061,7 +2056,7 @@ void getPartitionFunctionModel( final Context ctx ) throws UnknownColumnExceptio LogicalNamespace namespace = Catalog.getInstance().getSnapshot().getNamespace( request.schemaName ); - partitionColumn = Catalog.getInstance().getSnapshot().getRelSnapshot( namespace.id ).getColumn( request.tableName, request.column ); + partitionColumn = Catalog.getInstance().getSnapshot().rel().getColumn( request.tableName, request.column ); if ( !partitionManager.supportsColumnOfType( partitionColumn.type ) ) { ctx.json( new PartitionFunctionModel( "The partition function " + request.method + " does not support columns of type " + partitionColumn.type ) ); @@ -2512,15 +2507,12 @@ void getUml( final Context ctx ) { ArrayList fKeys = new ArrayList<>(); ArrayList tables = new ArrayList<>(); - List catalogEntities = Catalog.getInstance().getSnapshot().getNamespaces( new org.polypheny.db.catalog.logistic.Pattern( request.schema ) ) - .stream() - .filter( s -> s.namespaceType == NamespaceType.RELATIONAL ) - .flatMap( s -> catalog.getSnapshot().getRelSnapshot( s.id ).getTables( , null ).stream() ).collect( Collectors.toList() ); + List catalogEntities = catalog.getSnapshot().rel().getTables( request.schema, null ); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { // get foreign keys - List foreignKeys = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getForeignKeys( catalogTable.id ); + List foreignKeys = catalog.getSnapshot().rel().getForeignKeys( catalogTable.id ); for ( CatalogForeignKey catalogForeignKey : foreignKeys ) { for ( int i = 0; i < catalogForeignKey.getReferencedKeyColumnNames().size(); i++ ) { fKeys.add( ForeignKey.builder() @@ -2536,7 +2528,7 @@ void getUml( final Context ctx ) { .build() ); } } - LogicalRelSnapshot relSnapshot = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ); + LogicalRelSnapshot relSnapshot = catalog.getSnapshot().rel(); // get tables with its columns DbTable table = new DbTable( catalogTable.name, catalog.getSnapshot().getNamespace( catalogTable.namespaceId ).getName(), catalogTable.modifiable, catalogTable.entityType ); @@ -2546,14 +2538,14 @@ void getUml( final Context ctx ) { // get primary key with its columns if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey catalogPrimaryKey = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getPrimaryKey( catalogTable.primaryKey ); + CatalogPrimaryKey catalogPrimaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : catalogPrimaryKey.getColumnNames() ) { table.addPrimaryKeyField( columnName ); } } // get unique constraints - List catalogConstraints = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getConstraints( catalogTable.id ); + List catalogConstraints = catalog.getSnapshot().rel().getConstraints( catalogTable.id ); for ( CatalogConstraint catalogConstraint : catalogConstraints ) { if ( catalogConstraint.type == ConstraintType.UNIQUE ) { // TODO: unique constraints can be over multiple columns. @@ -2567,7 +2559,7 @@ void getUml( final Context ctx ) { } // get unique indexes - List catalogIndexes = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getIndexes( catalogTable.id, true ); + List catalogIndexes = catalog.getSnapshot().rel().getIndexes( catalogTable.id, true ); for ( CatalogIndex catalogIndex : catalogIndexes ) { // TODO: unique indexes can be over multiple columns. if ( catalogIndex.key.getColumnNames().size() == 1 && @@ -3196,9 +3188,8 @@ public static Result executeSqlSelect( final Statement statement, final UIReques LogicalTable catalogTable = null; if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); - LogicalNamespace namespace = crud.catalog.getSnapshot().getNamespace( t[0] ); - catalogTable = crud.catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); + catalogTable = crud.catalog.getSnapshot().rel().getTable( t[0], t[1] ); entityType = catalogTable.entityType; } @@ -3228,7 +3219,7 @@ public static Result executeSqlSelect( final Statement statement, final UIReques // Get column default values if ( catalogTable != null ) { - LogicalColumn logicalColumn = crud.catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = crud.catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); if ( logicalColumn != null ) { if ( logicalColumn.defaultValue != null ) { dbCol.defaultValue = logicalColumn.defaultValue.value; @@ -3575,7 +3566,7 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Tra public static Transaction getTransaction( boolean analyze, boolean useCache, TransactionManager transactionManager, long userId, long databaseId, String origin ) { try { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - Transaction transaction = transactionManager.startTransaction( snapshot.getUser( Catalog.defaultUserId ), snapshot.getNamespace( Catalog.defaultDatabaseId ), analyze, origin, MultimediaFlavor.FILE ); + Transaction transaction = transactionManager.startTransaction( snapshot.getUser( Catalog.defaultUserId ), snapshot.getNamespace( Catalog.defaultNamespaceId ), analyze, origin, MultimediaFlavor.FILE ); transaction.setUseCache( useCache ); return transaction; } catch ( UnknownUserException | UnknownSchemaException e ) { @@ -3600,7 +3591,7 @@ private Map getCatalogColumns( String schemaName, String Map dataTypes = new HashMap<>(); try { LogicalTable table = getLogicalTable( schemaName, tableName ); - List logicalColumns = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumns( table.id ); + List logicalColumns = catalog.getSnapshot().rel().getColumns( table.id ); for ( LogicalColumn logicalColumn : logicalColumns ) { dataTypes.put( logicalColumn.name, logicalColumn ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index f88292ead1..44841274af 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -167,7 +167,7 @@ public String toJsonString( @NotNull Object obj ) { this.crud = new Crud( transactionManager, Catalog.defaultUserId, - Catalog.defaultDatabaseId ); + Catalog.defaultNamespaceId ); WebSocket webSocketHandler = new WebSocket( crud, gson ); webSockets( server, webSocketHandler ); diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index bfd71d6ba4..221e079a83 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -152,10 +152,10 @@ public void onMessage( final WsMessageContext ctx ) { result = crud.getTable( uiRequest ); break; case DOCUMENT: - result = LanguageCrud.anyQuery( QueryLanguage.from( "mongo" ), ctx.session, new QueryRequest( String.format( "db.%s.find({})", uiRequest.getTableName() ), false, false, "mql", uiRequest.getSchemaName() ), crud.getTransactionManager(), Catalog.defaultUserId, Catalog.defaultDatabaseId, this.crud ).get( 0 ); + result = LanguageCrud.anyQuery( QueryLanguage.from( "mongo" ), ctx.session, new QueryRequest( String.format( "db.%s.find({})", uiRequest.getTableName() ), false, false, "mql", uiRequest.getSchemaName() ), crud.getTransactionManager(), Catalog.defaultUserId, Catalog.defaultNamespaceId, this.crud ).get( 0 ); break; case GRAPH: - result = LanguageCrud.anyQuery( QueryLanguage.from( "cypher" ), ctx.session, new QueryRequest( "MATCH (n) RETURN n", false, false, "mql", uiRequest.getSchemaName() ), crud.getTransactionManager(), Catalog.defaultUserId, Catalog.defaultDatabaseId, this.crud ).get( 0 ); + result = LanguageCrud.anyQuery( QueryLanguage.from( "cypher" ), ctx.session, new QueryRequest( "MATCH (n) RETURN n", false, false, "mql", uiRequest.getSchemaName() ), crud.getTransactionManager(), Catalog.defaultUserId, Catalog.defaultNamespaceId, this.crud ).get( 0 ); break; } if ( result == null ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index e9be035f1a..c44259c3c2 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -133,7 +133,7 @@ public static InformationManager attachAnalyzerIfSpecified( QueryRequest request public static PolyGraph getGraph( String databaseName, TransactionManager manager ) { - Transaction transaction = Crud.getTransaction( false, false, manager, Catalog.defaultUserId, Catalog.defaultDatabaseId, "getGraph" ); + Transaction transaction = Crud.getTransaction( false, false, manager, Catalog.defaultUserId, Catalog.defaultNamespaceId, "getGraph" ); Processor processor = transaction.getProcessor( QueryLanguage.from( "cypher" ) ); Statement statement = transaction.createStatement(); @@ -189,8 +189,7 @@ public static Result getResult( QueryLanguage language, Statement statement, Que LogicalTable catalogTable = null; if ( request.tableId != null ) { String[] t = request.tableId.split( "\\." ); - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( t[0] ); - catalogTable = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( t[1] ); + catalogTable = catalog.getSnapshot().rel().getTable( t[0], t[1] ); } ArrayList header = new ArrayList<>(); @@ -219,7 +218,7 @@ public static Result getResult( QueryLanguage language, Statement statement, Que // Get column default values if ( catalogTable != null ) { - LogicalColumn logicalColumn = catalog.getSnapshot().getRelSnapshot( catalogTable.namespaceId ).getColumn( catalogTable.id, columnName ); + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); if ( logicalColumn != null ) { if ( logicalColumn.defaultValue != null ) { dbCol.defaultValue = logicalColumn.defaultValue.value; @@ -317,7 +316,7 @@ private Placement getPlacements( final Index index ) { if ( namespaces.size() != 1 ) { throw new RuntimeException(); } - List graphs = catalog.getSnapshot().getGraphSnapshot( namespaces.get( 0 ).id ).getGraphs( new Pattern( graphName ) ); + List graphs = catalog.getSnapshot().graph().getGraphs( new Pattern( graphName ) ); if ( graphs.size() != 1 ) { log.error( "The requested graph does not exist." ); return new Placement( new RuntimeException( "The requested graph does not exist." ) ); @@ -329,13 +328,13 @@ private Placement getPlacements( final Index index ) { return p; } else { - List placements = catalog.getSnapshot().getAllocSnapshot().getDataPlacements( graph.id ); + List placements = catalog.getSnapshot().alloc().getDataPlacements( graph.id ); for ( CatalogDataPlacement placement : placements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); p.addAdapter( new Placement.GraphStore( adapter.getUniqueName(), adapter.getUniqueName(), - catalog.getSnapshot().getAllocSnapshot().getGraphPlacements( placement.adapterId ), + catalog.getSnapshot().alloc().getGraphPlacements( placement.adapterId ), adapter.getSupportedNamespaceTypes().contains( NamespaceType.GRAPH ) ) ); } return p; @@ -363,7 +362,7 @@ public void getCollectionPlacements( Context context ) { Catalog catalog = Catalog.getInstance(); long namespaceId; namespaceId = catalog.getSnapshot().getNamespace( namespace ).id; - List collections = catalog.getSnapshot().getDocSnapshot( namespaceId ).getCollections( new Pattern( collectionName ) ); + List collections = catalog.getSnapshot().doc().getCollections( namespaceId, new Pattern( collectionName ) ); if ( collections.size() != 1 ) { context.json( new Placement( new UnknownCollectionException( 0 ) ) ); @@ -374,14 +373,14 @@ public void getCollectionPlacements( Context context ) { Placement p = new Placement( false, List.of(), EntityType.ENTITY ); - List placements = catalog.getSnapshot().getAllocSnapshot().getCollectionPlacements( collection.id ); + List placements = catalog.getSnapshot().alloc().getCollectionPlacements( collection.id ); for ( CatalogCollectionPlacement placement : placements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); p.addAdapter( new DocumentStore( adapter.getUniqueName(), adapter.getUniqueName(), - catalog.getSnapshot().getAllocSnapshot().getCollectionPlacementsByAdapter( placement.adapterId ), + catalog.getSnapshot().alloc().getCollectionPlacementsByAdapter( placement.adapterId ), adapter.getSupportedNamespaceTypes().contains( NamespaceType.DOCUMENT ) ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java index 06289f3658..aba927587d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/StatisticCrud.java @@ -29,6 +29,7 @@ import org.apache.commons.lang3.math.NumberUtils; import org.polypheny.db.StatisticsManager; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.Config; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.RuntimeConfig; @@ -84,12 +85,9 @@ private void setConfig( Config c ) { public void getTableStatistics( Context ctx ) { UIRequest request = ctx.bodyAsClass( UIRequest.class ); - long tableId; - long schemaId; - schemaId = Catalog.getInstance().getSnapshot().getNamespace( request.tableId.split( "\\." )[0] ).id; - tableId = Catalog.getInstance().getSnapshot().getRelSnapshot( schemaId ).getTable( request.tableId.split( "\\." )[1] ).id; + LogicalTable table = Catalog.getInstance().getSnapshot().rel().getTable( request.tableId.split( "\\." )[0], request.tableId.split( "\\." )[1] ); - ctx.json( statisticsManager.getTableStatistic( schemaId, tableId ) ); + ctx.json( statisticsManager.getTableStatistic( table.namespaceId, table.id ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java index e9247fb452..5afc5883ba 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java @@ -188,8 +188,6 @@ private static class ColumnPlacement { private final String columnName; private final int storeId; private final PlacementType placementType; - private final String physicalSchemaName; - private final String physicalColumnName; public ColumnPlacement( CatalogColumnPlacement catalogColumnPlacement ) { @@ -199,8 +197,6 @@ public ColumnPlacement( CatalogColumnPlacement catalogColumnPlacement ) { this.columnName = catalogColumnPlacement.getLogicalColumnName(); this.storeId = (int) catalogColumnPlacement.adapterId; this.placementType = catalogColumnPlacement.placementType; - this.physicalSchemaName = catalogColumnPlacement.physicalSchemaName; - this.physicalColumnName = catalogColumnPlacement.physicalColumnName; } } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java index dca7651b00..aeeb29c7e5 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java @@ -62,9 +62,8 @@ public String getQuery( String tableId, Statement statement, HttpServletRequest Catalog catalog = Catalog.getInstance(); String[] split = tableId.split( "\\." ); LogicalColumn logicalColumn; - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( split[0] ); - LogicalTable table = catalog.getSnapshot().getRelSnapshot( namespace.id ).getTable( split[1] ); - logicalColumn = catalog.getSnapshot().getRelSnapshot( table.namespaceId ).getColumn( table.id, entry.getKey() ); + LogicalTable table = catalog.getSnapshot().rel().getTable( split[0], split[1] ); + logicalColumn = catalog.getSnapshot().rel().getColumn( table.id, entry.getKey() ); if ( fileName == null && value == null ) { setClauses.add( String.format( "\"%s\"=NULL", entry.getKey() ) ); } else if ( value != null && fileName == null ) { From 2f562e560626a2e352b3224355809805fa40635a Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 11 Apr 2023 07:45:17 +0200 Subject: [PATCH 055/436] adjustment of validator entity logic --- .../polypheny/db/algebra/core/common/Modify.java | 1 + .../db/catalog/entity/logical/LogicalTable.java | 2 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 10 +++++++--- .../sql/language/validate/DelegatingScope.java | 2 +- .../db/sql/language/validate/EmptyScope.java | 5 ++--- .../language/validate/IdentifierNamespace.java | 2 +- .../sql/language/validate/SqlValidatorImpl.java | 4 ++-- .../sql/language/validate/SqlValidatorScope.java | 9 +++++---- .../sql/language/validate/SqlValidatorUtil.java | 16 +++++++--------- .../db/sql/language/validate/WithScope.java | 4 ++-- 10 files changed, 29 insertions(+), 26 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/Modify.java b/core/src/main/java/org/polypheny/db/algebra/core/common/Modify.java index 63563e16ec..997703b7f8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/common/Modify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/Modify.java @@ -25,6 +25,7 @@ public abstract class Modify extends SingleAlg { + @Getter public final E entity; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index ba22b7256a..af2ae87a1f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -129,7 +129,7 @@ public Expression asExpression() { public List getColumnStrategies() { - return null; + return getColumns().stream().map( c -> c.nullable ? ColumnStrategy.NULLABLE : ColumnStrategy.NOT_NULLABLE ).collect( Collectors.toList() ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index 788b16ead1..8d8fd5804e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -48,9 +48,11 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap namespaces; + ImmutableMap namespaceNames; + ImmutableMap tables; - ImmutableMap tableNames; + ImmutableMap, LogicalTable> tableNames; ImmutableMap> tableColumns; ImmutableMap columns; @@ -81,9 +83,10 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { public LogicalRelSnapshotImpl( Map catalogs ) { namespaces = ImmutableMap.copyOf( catalogs.values().stream().map( LogicalRelationalCatalog::getLogicalNamespace ).collect( Collectors.toMap( n -> n.id, n -> n ) ) ); + namespaceNames = ImmutableMap.copyOf( namespaces.values().stream().collect( Collectors.toMap( n -> n.name, n -> n ) ) ); tables = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getTables().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> namespaces.get( e.getValue().namespaceId ).caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase(), Entry::getValue ) ) ); + tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> Pair.of( e.getValue().namespaceId, namespaces.get( e.getValue().namespaceId ).caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); columns = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getColumns().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespaces.get( e.getValue().namespaceId ).caseSensitive ? Pair.of( e.getValue().tableId, e.getValue().name ) : Pair.of( e.getValue().tableId, e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); @@ -357,7 +360,8 @@ public LogicalTable getTable( long namespaceId, String name ) { @Override public LogicalTable getTable( String namespaceName, String tableName ) { - return null; + LogicalNamespace namespace = namespaceNames.get( namespaceName ); + return tableNames.get( Pair.of( namespace.id, namespace.caseSensitive ? tableName : tableName.toLowerCase() ) ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index 57c6cf3e81..a22c8cd78e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -96,7 +96,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, */ void resolveInNamespace( SqlValidatorNamespace ns, boolean nullable, List names, NameMatcher nameMatcher, Path path, Resolved resolved ) { if ( names.isEmpty() ) { - resolved.found( null ); + resolved.found( validator, null ); return; } final AlgDataType rowType = ns.getRowType(); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index 90efe3de80..181837921b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -27,7 +27,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.PolyphenyDbSchema; import org.polypheny.db.sql.language.SqlCall; @@ -106,7 +105,7 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path } if ( table != null ) { - resolves.add( new Resolve( table ) ); + resolves.add( new Resolve( validator, table ) ); } } @@ -117,7 +116,7 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L LogicalTable table = rootSchema.getTable( concat ); if ( table != null ) { - resolved.found( table ); + resolved.found( validator, table ); return; } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java index bfc58e0d91..60b14c2ca5 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java @@ -29,7 +29,6 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlIdentifier; @@ -113,6 +112,7 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { SqlValidatorScope.Resolve previousResolve = null; if ( resolved.count() == 1 ) { final SqlValidatorScope.Resolve resolve = previousResolve = resolved.only(); + return resolve.namespace; /*if ( resolve.remainingNames.isEmpty() ) { return resolve.namespace; }*/ diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index aeb0cceda0..fa175702f3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -70,6 +70,7 @@ import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordType; import org.polypheny.db.catalog.entity.CatalogEntity; +import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; @@ -90,7 +91,6 @@ import org.polypheny.db.nodes.validate.ValidatorException; import org.polypheny.db.nodes.validate.ValidatorNamespace; import org.polypheny.db.nodes.validate.ValidatorScope; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexPatternFieldRef; @@ -4122,7 +4122,7 @@ public RexNode convertExpression( Node e ) { throw new UnsupportedOperationException(); } }; - final List strategies = table.unwrap( AlgOptEntity.class ).getColumnStrategies(); + final List strategies = table.unwrap( LogicalTable.class ).getColumnStrategies(); for ( final AlgDataTypeField field : table.getRowType().getFieldList() ) { final AlgDataTypeField targetField = logicalTargetRowType.getField( field.getName(), true, false ); switch ( strategies.get( field.getIndex() ) ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java index 289c066221..2461d3abb0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java @@ -193,7 +193,7 @@ default boolean isWithin( SqlValidatorScope scope2 ) { */ interface Resolved { - void found( CatalogEntity entity ); + void found( SqlValidatorImpl validator, CatalogEntity entity ); int count(); @@ -311,8 +311,8 @@ class ResolvedImpl implements Resolved { @Override - public void found( CatalogEntity entity ) { - resolves.add( new Resolve( entity ) ); + public void found( SqlValidatorImpl validator, CatalogEntity entity ) { + resolves.add( new Resolve( validator, entity ) ); } @@ -348,8 +348,9 @@ class Resolve { public SqlValidatorNamespace namespace; - Resolve( CatalogEntity entity ) { + Resolve( SqlValidatorImpl validator, CatalogEntity entity ) { this.entity = entity; + this.namespace = new EntityNamespace( validator, entity ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 0902eec998..6dee63fd40 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -86,15 +86,12 @@ private SqlValidatorUtil() { * @param usedDataset Output parameter which is set to true if a sample dataset is found; may be null */ public static CatalogEntity getLogicalEntity( SqlValidatorNamespace namespace, Snapshot snapshot, String datasetName, boolean[] usedDataset ) { - final EntityNamespace entityNamespace = namespace.unwrap( EntityNamespace.class ); - return entityNamespace.getTable(); - /*if ( namespace.isWrapperFor( TableNamespace.class ) ) { - final TableNamespace tableNamespace = namespace.unwrap( TableNamespace.class ); - return getLogicalEntity( tableNamespace, catalogReader, datasetName, usedDataset, tableNamespace.extendedFields ); - } else if ( namespace.isWrapperFor( SqlValidatorImpl.DmlNamespace.class ) ) { + + if ( namespace.isWrapperFor( SqlValidatorImpl.DmlNamespace.class ) ) { final SqlValidatorImpl.DmlNamespace dmlNamespace = namespace.unwrap( SqlValidatorImpl.DmlNamespace.class ); final SqlValidatorNamespace resolvedNamespace = dmlNamespace.resolve(); - if ( resolvedNamespace.isWrapperFor( TableNamespace.class ) ) { + return resolvedNamespace.getTable(); + /*if ( resolvedNamespace.isWrapperFor( TableNamespace.class ) ) { final TableNamespace tableNamespace = resolvedNamespace.unwrap( TableNamespace.class ); final ValidatorTable validatorTable = tableNamespace.getTable(); final AlgDataTypeFactory typeFactory = AlgDataTypeFactory.DEFAULT; @@ -103,9 +100,10 @@ public static CatalogEntity getLogicalEntity( SqlValidatorNamespace namespace, S ? ImmutableList.of() : getExtendedColumns( typeFactory, validatorTable, dmlNamespace.extendList ); return getLogicalEntity( tableNamespace, catalogReader, datasetName, usedDataset, extendedFields ); - } + }*/ } - return null;*/ + final EntityNamespace entityNamespace = namespace.unwrap( EntityNamespace.class ); + return entityNamespace.getTable(); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java index b6df116ba0..9d9c3d9a15 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java @@ -72,7 +72,7 @@ public void resolveTable( List names, NameMatcher nameMatcher, Path path //final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); CatalogEntity entity = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); - resolved.found( entity ); + resolved.found( validator, entity ); return; } super.resolveTable( names, nameMatcher, path, resolved ); @@ -85,7 +85,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); final Step path = Path.EMPTY.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); CatalogEntity entity = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); - resolved.found( entity ); + resolved.found( validator, entity ); return; } super.resolve( names, nameMatcher, deep, resolved ); From 273d40b470e8c4ecf474363b3377f6e1461edd58 Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 11 Apr 2023 23:38:41 +0200 Subject: [PATCH 056/436] restoring some validation, working basic insert --- .../db/adapter/java/ReflectiveSchema.java | 4 +- .../db/algebra/logical/lpg/LogicalGraph.java | 4 +- .../org/polypheny/db/catalog/Catalog.java | 3 + .../db/catalog/catalogs/PhysicalCatalog.java | 5 + .../db/catalog/entity/CatalogEntity.java | 5 + .../entity/allocation/AllocationEntity.java | 8 ++ .../catalog/entity/logical/LogicalTable.java | 1 + .../entity/physical/PhysicalTable.java | 2 +- .../db/catalog/refactor/Expressible.java | 2 +- .../catalog/snapshot/LogicalRelSnapshot.java | 2 - .../db/catalog/snapshot/PhysicalSnapshot.java | 3 + .../snapshot/impl/AllocSnapshotImpl.java | 6 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 2 +- .../snapshot/impl/PhysicalSnapshotImpl.java | 10 ++ .../java/org/polypheny/db/ddl/DdlManager.java | 4 +- .../polypheny/db/schema/LogicalSchema.java | 4 +- .../org/polypheny/db/schema/Namespace.java | 8 +- .../java/org/polypheny/db/schema/Schemas.java | 9 +- .../db/schema/impl/AbstractNamespace.java | 4 +- .../db/schema/impl/DelegatingNamespace.java | 4 +- .../org/polypheny/db/util/BuiltInMethod.java | 5 + .../org/polypheny/db/ddl/DdlManagerImpl.java | 20 ++-- .../db/routing/routers/AbstractDqlRouter.java | 8 +- .../db/routing/routers/BaseRouter.java | 18 ++-- .../db/routing/routers/CachedPlanRouter.java | 2 +- .../db/routing/routers/DmlRouterImpl.java | 96 +++++++++++++++---- .../routers/FullPlacementQueryRouter.java | 9 +- .../db/routing/routers/IcarusRouter.java | 5 +- .../db/routing/routers/SimpleRouter.java | 5 +- .../db/sql/clause/SimpleSqlTest.java | 29 ++++-- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 1 + .../polypheny/db/adapter/jdbc/JdbcRules.java | 6 +- .../polypheny/db/adapter/jdbc/JdbcSchema.java | 12 ++- .../jdbc/JdbcToEnumerableConverter.java | 45 ++++++--- .../jdbc/rel2sql/AlgToSqlConverter.java | 3 +- .../org/polypheny/db/catalog/PolyCatalog.java | 13 ++- .../db/catalog/logical/RelationalCatalog.java | 1 - .../catalog/physical/PolyPhysicalCatalog.java | 17 +++- .../language/validate/DelegatingScope.java | 76 +++++++-------- .../db/sql/language/validate/EmptyScope.java | 32 ++++--- .../language/validate/EntityNamespace.java | 3 +- .../validate/IdentifierNamespace.java | 9 +- .../db/sql/language/validate/ListScope.java | 27 +++--- .../language/validate/SqlValidatorImpl.java | 4 +- .../language/validate/SqlValidatorScope.java | 53 ++++++---- .../db/sql/language/validate/WithScope.java | 8 +- .../db/sql/sql2alg/SqlToAlgConverter.java | 7 +- 47 files changed, 397 insertions(+), 207 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index 0d91620ebb..ab74d566c8 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -179,11 +179,11 @@ private Multimap createFunctionMap() { /** * Returns an expression for the object wrapped by this schema (not the schema itself). */ - Expression getTargetExpression( Snapshot snapshot, String name ) { + Expression getTargetExpression( Snapshot snapshot, long id ) { return Types.castIfNecessary( target.getClass(), Expressions.call( - Schemas.unwrap( getExpression( snapshot, name ), ReflectiveSchema.class ), + Schemas.unwrap( getExpression( snapshot, id ), ReflectiveSchema.class ), BuiltInMethod.REFLECTIVE_SCHEMA_GET_TARGET.method ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java index 2a8d16cb31..4511ddd007 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalGraph.java @@ -108,8 +108,8 @@ public Set getSubNamespaceNames() { @Override - public Expression getExpression( Snapshot snapshot, String name ) { - return Schemas.subSchemaExpression( snapshot, name, LogicalGraph.class ); + public Expression getExpression( Snapshot snapshot, long id ) { + return Schemas.subSchemaExpression( snapshot, id, null, LogicalGraph.class ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index b2649571a6..ecc9187066 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -57,6 +57,9 @@ public abstract class Catalog implements ExtensionPoint { public static final Expression CATALOG_EXPRESSION = Expressions.call( Catalog.class, "getInstance" ); + public static final Expression SNAPSHOT_EXPRESSION = Expressions.call( Catalog.class, "snapshot" ); + public static final Expression PHYSICAL_EXPRESSION = Expressions.call( SNAPSHOT_EXPRESSION, "physical" ); + public static Catalog setAndGetInstance( Catalog catalog ) { if ( INSTANCE != null ) { diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index 70ebf9e97d..ebe28cd60d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.concurrent.ConcurrentHashMap; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.schema.Namespace; public interface PhysicalCatalog { @@ -31,4 +32,8 @@ public interface PhysicalCatalog { void deleteEntity( long id ); + void addNamespace( long adapterId, Namespace currentSchema ); + + ConcurrentHashMap getNamespaces(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java index 6a194534ba..c62b5711fd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogEntity.java @@ -121,4 +121,9 @@ public Statistic getStatistic() { return null; } + + public String getNamespaceName() { + return null; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java index 9dbc5d64bd..f0b65886d7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -20,13 +20,16 @@ import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.NonFinal; +import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; @EqualsAndHashCode(callSuper = true) @Value @NonFinal +@Slf4j public abstract class AllocationEntity extends CatalogEntity { @Serialize @@ -52,4 +55,9 @@ public State getCatalogType() { } + public PartitionType getPartitionType() { + log.warn( "change me" ); + return PartitionType.NONE; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index af2ae87a1f..48d2b5c434 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -148,6 +148,7 @@ public List getColumnNames() { } + @Override public String getNamespaceName() { return Catalog.getInstance().getSnapshot().getNamespace( namespaceId ).name; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index c651aa6690..e3d2a014ba 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -90,7 +90,7 @@ public Serializable[] getParameterArray() { @Override public Expression asExpression() { - return Expressions.call( Catalog.CATALOG_EXPRESSION, "getPhysicalTable", Expressions.constant( id ) ); + return Expressions.call( Catalog.PHYSICAL_EXPRESSION, "getPhysicalTable", Expressions.constant( id ) ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java b/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java index e036044501..786d4ad266 100644 --- a/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java +++ b/core/src/main/java/org/polypheny/db/catalog/refactor/Expressible.java @@ -24,7 +24,7 @@ public interface Expressible { Expression asExpression(); default Expression asExpression( Class clazz ) { - return Expressions.typeAs( asExpression(), clazz ); + return Expressions.convert_( asExpression(), clazz ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 75d2cd0e29..89b9678242 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -24,7 +24,6 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -56,7 +55,6 @@ public interface LogicalRelSnapshot { /** * Returns the table with the given name in the specified schema. * - * @param namespaceName * @param tableName The name of the table * @return The table * @throws UnknownTableException If there is no table with this name in the specified database and schema. diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java index 2aeda84671..2f0bd29b21 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/PhysicalSnapshot.java @@ -22,6 +22,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.schema.Namespace; public interface PhysicalSnapshot { @@ -49,4 +50,6 @@ public interface PhysicalSnapshot { List fromAlloc( long id ); + Namespace getNamespace( long id, long adapterId ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index 021dbaa36a..248e60b4cc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -23,6 +23,7 @@ import java.util.Map; import java.util.stream.Collectors; import lombok.Value; +import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.catalogs.AllocationCatalog; import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; @@ -42,12 +43,14 @@ import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.util.Pair; @Value +@Slf4j public class AllocSnapshotImpl implements AllocSnapshot { ImmutableMap tables; @@ -517,7 +520,8 @@ public List getCollectionPlacements( long collection @Override public PartitionProperty getPartitionProperty( long id ) { - return null; + log.warn( "replace me" ); + return new PartitionProperty( PartitionType.NONE, false, List.of(), List.of(), -1, -1, -1, false ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index 8d8fd5804e..b8eb0df56b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -354,7 +354,7 @@ public LogicalTable getTable( long namespaceId, String name ) { if ( !namespaces.get( namespaceId ).caseSensitive ) { adjustedName = name.toLowerCase(); } - return tableNames.get( adjustedName ); + return tableNames.get( Pair.of( namespaceId, adjustedName ) ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java index b554047f7c..24ce67202f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java @@ -31,6 +31,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; +import org.polypheny.db.schema.Namespace; import org.polypheny.db.util.Pair; @Value @@ -38,6 +39,8 @@ public class PhysicalSnapshotImpl implements PhysicalSnapshot { ImmutableMap entities; + ImmutableMap, Namespace> namespaces; + ImmutableMap, PhysicalEntity> adapterLogicalEntity; ImmutableMap> adapterPhysicals; @@ -48,6 +51,7 @@ public class PhysicalSnapshotImpl implements PhysicalSnapshot { public PhysicalSnapshotImpl( Map physicalCatalogs ) { this.entities = ImmutableMap.copyOf( physicalCatalogs.values().stream().flatMap( c -> c.getPhysicals().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); + this.namespaces = ImmutableMap.copyOf( physicalCatalogs.values().stream().flatMap( n -> n.getNamespaces().values().stream() ).collect( Collectors.toMap( n -> Pair.of( n.getId(), n.getAdapterId() ), n -> n ) ) ); this.adapterLogicalEntity = buildAdapterLogicalEntity(); this.adapterPhysicals = buildAdapterPhysicals(); this.logicalToPhysicals = buildLogicalToPhysicals(); @@ -163,4 +167,10 @@ public List fromAlloc( long id ) { } + @Override + public Namespace getNamespace( long id, long adapterId ) { + return namespaces.get( Pair.of( id, adapterId ) ); + } + + } diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 22faf1b019..b2e13fee53 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -486,10 +486,10 @@ public static DdlManager getInstance() { * @param namespaceId * @param constraintName the name of the constraint * @param constraintType the type of the constraint - * @param columnNames the names of the columns for which to create the constraint + * @param columnIds the ids of the columns for which to create the constraint * @param tableId the id of the table */ - public abstract void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException; + public abstract void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnIds, long tableId ) throws UnknownColumnException, GenericCatalogException; /** * Drop a schema diff --git a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java index e1bc6f0026..3585cab33a 100644 --- a/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/LogicalSchema.java @@ -97,8 +97,8 @@ public Set getSubNamespaceNames() { @Override - public Expression getExpression( Snapshot snapshot, String name ) { - return Schemas.subSchemaExpression( snapshot, name, LogicalSchema.class ); + public Expression getExpression( Snapshot snapshot, long id ) { + return Schemas.subSchemaExpression( snapshot, id, null, LogicalSchema.class ); } diff --git a/core/src/main/java/org/polypheny/db/schema/Namespace.java b/core/src/main/java/org/polypheny/db/schema/Namespace.java index 9bad3993f8..f406bf7b81 100644 --- a/core/src/main/java/org/polypheny/db/schema/Namespace.java +++ b/core/src/main/java/org/polypheny/db/schema/Namespace.java @@ -64,10 +64,14 @@ * * A schema may be nested within another schema; see {@link Namespace#getSubNamespace(String)}. */ -public interface Namespace { +public interface Namespace extends Wrapper { long getId(); + default Long getAdapterId() { + return null; + } + /** * Returns a sub-schema with a given name, or null. * @@ -135,7 +139,7 @@ public interface Namespace { * @param name Name of this schema * @return Expression by which this schema can be referenced in generated code */ - Expression getExpression( Snapshot snapshot, String name ); + Expression getExpression( Snapshot snapshot, long id ); /** * Returns whether the user is allowed to create new tables, functions and sub-schemas in this schema, in addition to diff --git a/core/src/main/java/org/polypheny/db/schema/Schemas.java b/core/src/main/java/org/polypheny/db/schema/Schemas.java index 604e5b6a47..cd117f01e6 100644 --- a/core/src/main/java/org/polypheny/db/schema/Schemas.java +++ b/core/src/main/java/org/polypheny/db/schema/Schemas.java @@ -112,14 +112,15 @@ public static Expression expression( Snapshot snapshot ) { /** * Returns the expression for a sub-schema. */ - public static Expression subSchemaExpression( Snapshot snapshot, String name, Class type ) { + public static Expression subSchemaExpression( Snapshot snapshot, long id, Long adapterId, Class type ) { // (Type) schemaExpression.getSubSchema("name") - final Expression schemaExpression = expression( snapshot ); + final Expression schemaExpression = Expressions.call( expression( snapshot ), BuiltInMethod.SNAPSHOT_GET_PHYSICAL.method ); Expression call = Expressions.call( schemaExpression, - BuiltInMethod.SCHEMA_GET_SUB_SCHEMA.method, - Expressions.constant( name ) ); + BuiltInMethod.SNAPSHOT_GET_NAMESPACE.method, + Expressions.constant( id ), + Expressions.constant( adapterId ) ); //CHECKSTYLE: IGNORE 2 //noinspection unchecked if ( false && type != null && !type.isAssignableFrom( Namespace.class ) ) { diff --git a/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java index 9ee24edfa3..876bfa7bae 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/AbstractNamespace.java @@ -87,8 +87,8 @@ public Namespace snapshot( SchemaVersion version ) { @Override - public Expression getExpression( Snapshot snapshot, String name ) { - return Schemas.subSchemaExpression( snapshot, name, getClass() ); + public Expression getExpression( Snapshot snapshot, long id ) { + return Schemas.subSchemaExpression( snapshot, id, null, getClass() ); } diff --git a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java index 59e72295c4..d9346beaf3 100644 --- a/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java +++ b/core/src/main/java/org/polypheny/db/schema/impl/DelegatingNamespace.java @@ -82,8 +82,8 @@ public Namespace snapshot( SchemaVersion version ) { @Override - public Expression getExpression( Snapshot snapshot, String name ) { - return namespace.getExpression( snapshot, name ); + public Expression getExpression( Snapshot snapshot, long id ) { + return namespace.getExpression( snapshot, id ); } diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index 14a3d30e9c..54128c32db 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -109,6 +109,7 @@ import org.polypheny.db.algebra.metadata.BuiltInMetadata.UniqueKeys; import org.polypheny.db.algebra.metadata.Metadata; import org.polypheny.db.catalog.refactor.QueryableEntity; +import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.interpreter.Context; import org.polypheny.db.interpreter.Row; @@ -163,6 +164,10 @@ public enum BuiltInMethod { INTO( ExtendedEnumerable.class, "into", Collection.class ), REMOVE_ALL( ExtendedEnumerable.class, "removeAll", Collection.class ), SCHEMA_GET_SUB_SCHEMA( Namespace.class, "getSubNamespace", String.class ), + + SNAPSHOT_GET_NAMESPACE( PhysicalSnapshot.class, "getNamespace", long.class, long.class ), + + SNAPSHOT_GET_PHYSICAL( Snapshot.class, "physical" ), SCHEMA_GET_TABLE( Namespace.class, "getEntity", String.class ), SCHEMA_PLUS_UNWRAP( SchemaPlus.class, "unwrap", Class.class ), SCHEMAS_ENUMERABLE_SCANNABLE( Schemas.class, "enumerable", ScannableEntity.class, DataContext.class ), diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index f4194d0cb0..a689675253 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -23,6 +23,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -2207,7 +2208,7 @@ public void createTableOld( long namespaceId, String name, List f // addLColumns - List ids = new ArrayList<>(); + Map ids = new LinkedHashMap<>(); for ( FieldInformation information : fields ) { - ids.add( addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, logical.id, information.position, stores, placementType ) ); + ids.put( information.name, addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, logical.id, information.position, stores, placementType ) ); } + for ( ConstraintInformation constraint : constraints ) { + addConstraint( namespaceId, constraint.name, constraint.type, constraint.columnNames.stream().map( ids::get ).collect( Collectors.toList() ), logical.id ); + } + catalog.updateSnapshot(); // addATable for ( DataStore store : stores ) { AllocationTable alloc = catalog.getAllocRel( namespaceId ).createAlloctionTable( store.getAdapterId(), logical.id ); int i = 0; - for ( Long id : ids ) { + for ( Long id : ids.values() ) { alloc = catalog.getAllocRel( namespaceId ).addColumnPlacement( alloc.id, id, PlacementType.AUTOMATIC, i ); i++; } @@ -2923,12 +2928,7 @@ private long addColumn( long namespaceId, String columnName, ColumnTypeInformati @Override - public void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnNames, long tableId ) throws UnknownColumnException, GenericCatalogException { - List columnIds = new LinkedList<>(); - for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( tableId, columnName ); - columnIds.add( logicalColumn.id ); - } + public void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnIds, long tableId ) throws UnknownColumnException, GenericCatalogException { if ( constraintType == ConstraintType.PRIMARY ) { catalog.getLogicalRel( namespaceId ).addPrimaryKey( tableId, columnIds ); } else if ( constraintType == ConstraintType.UNIQUE ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 611dbe7e5f..bab3ff0f54 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -41,6 +41,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -128,13 +129,12 @@ public List route( AlgRoot logicalRoot, Statement statement, L throw new IllegalStateException( "Should never happen for Iterator" ); } else { RoutedAlgBuilder builder = RoutedAlgBuilder.create( statement, logicalRoot.alg.getCluster() ); - List routedAlgBuilders = buildDql( + return buildDql( logicalRoot.alg, Lists.newArrayList( builder ), statement, logicalRoot.alg.getCluster(), queryInformation ); - return routedAlgBuilders; } } @@ -224,12 +224,12 @@ protected List buildSelect( AlgNode node, List 1 ) { + if ( Catalog.snapshot().alloc().getAllocationsFromLogical( catalogTable.id ).size() > 1 ) { return handleVerticalPartitioningOrReplication( node, catalogTable, statement, logicalTable, builders, cluster, queryInformation ); } return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 2dfabb93eb..50f48ae0c2 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -97,7 +97,7 @@ public abstract class BaseRouter implements Router { .maximumSize( RuntimeConfig.JOINED_TABLE_SCAN_CACHE_SIZE.getInteger() ) .build(); - final static Snapshot snapshot = Catalog.getInstance().getSnapshot(); + final static Catalog catalog = Catalog.getInstance(); static { @@ -112,7 +112,7 @@ protected static Map> selectPlacement( Logica // Find the adapter with the most column placements long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; - for ( Entry> entry : snapshot.alloc().getColumnPlacementsByAdapter( table.id ).entrySet() ) { + for ( Entry> entry : Catalog.snapshot().alloc().getColumnPlacementsByAdapter( table.id ).entrySet() ) { if ( entry.getValue().size() > numOfPlacements ) { adapterIdWithMostPlacements = entry.getKey(); numOfPlacements = entry.getValue().size(); @@ -121,12 +121,12 @@ protected static Map> selectPlacement( Logica // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); - for ( LogicalColumn column : snapshot.rel().getColumns( table.id ) ) { - placementList.add( snapshot.alloc().getColumnPlacements( column.id ).get( 0 ) ); + for ( LogicalColumn column : Catalog.snapshot().rel().getColumns( table.id ) ) { + placementList.add( Catalog.snapshot().alloc().getColumnPlacements( column.id ).get( 0 ) ); } return new HashMap<>() {{ - List allocs = snapshot.alloc().getAllocationsFromLogical( table.id ); + List allocs = Catalog.snapshot().alloc().getAllocationsFromLogical( table.id ); put( allocs.get( 0 ).id, placementList ); }}; } @@ -171,7 +171,7 @@ public RoutedAlgBuilder handleScan( Statement statement, long allocId ) { - List physicals = snapshot.physical().fromAlloc( allocId ); + List physicals = Catalog.snapshot().physical().fromAlloc( allocId ); PhysicalEntity physical = physicals.get( 0 ); AlgNode node = builder.scan( physical ).build(); @@ -366,7 +366,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List CatalogColumnPlacement placement = allocationEntities.get( 0 ).unwrap( AllocationTable.class ).placements.get( 0 ); // todo dl: remove after RowType refactor - if ( snapshot.getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { + if ( Catalog.snapshot().getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); builder.push( new LogicalTransformer( node.getCluster(), @@ -445,7 +445,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List tables = snapshot.rel().getTables( namespace.id, null ); + List tables = Catalog.snapshot().rel().getTables( namespace.id, null ); List> scans = tables.stream() .map( t -> Pair.of( t.name, buildJoinedScan( statement, cluster, null ) ) ) .collect( Collectors.toList() ); @@ -459,7 +459,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace na private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); - List collections = snapshot.doc().getCollections( namespace.id, null ); + List collections = Catalog.snapshot().doc().getCollections( namespace.id, null ); List> scans = collections.stream() .map( t -> { RoutedAlgBuilder algBuilder = RoutedAlgBuilder.create( statement, alg.getCluster() ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index fc3c970afa..5eb3b26f32 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -63,7 +63,7 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build if ( node instanceof LogicalRelScan && node.getEntity() != null ) { LogicalTable catalogTable = node.getEntity().unwrap( LogicalTable.class ); - PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); List partitionIds = property.partitionIds; Map> placement = new HashMap<>(); for ( long partition : partitionIds ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 5925f6ddf5..d675736614 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -80,6 +80,7 @@ import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -120,11 +121,66 @@ public class DmlRouterImpl extends BaseRouter implements DmlRouter { private Snapshot snapshot; + @Override + public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { + AlgOptCluster cluster = modify.getCluster(); + + if ( modify.getEntity() == null ) { + throw new RuntimeException( "Unexpected operator!" ); + } + + LogicalTable catalogTable = modify.getEntity().unwrap( LogicalTable.class ); + Snapshot snapshot = statement.getTransaction().getSnapshot(); + + // Get placements of this table + + // Make sure that this table can be modified + if ( !catalogTable.modifiable ) { + if ( catalogTable.entityType == EntityType.ENTITY ) { + throw new RuntimeException( "Unable to modify a table marked as read-only!" ); + } else if ( catalogTable.entityType == EntityType.SOURCE ) { + throw new RuntimeException( "The table '" + catalogTable.name + "' is provided by a data source which does not support data modification." ); + } else if ( catalogTable.entityType == EntityType.VIEW ) { + throw new RuntimeException( "Polypheny-DB does not support modifying views." ); + } + throw new RuntimeException( "Unknown table type: " + catalogTable.entityType.name() ); + } + + long pkid = catalogTable.primaryKey; + List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); + + List allocs = snapshot.alloc().getAllocationsFromLogical( catalogTable.id ); + + PhysicalTable physical = snapshot.physical().fromAlloc( allocs.get( 0 ).id ).get( 0 ).unwrap( PhysicalTable.class ); + ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); + + AlgNode input = buildDmlNew( + super.recursiveCopy( modify.getInput( 0 ) ), + statement + ).build(); + + // Build DML + + List updateColumnList = modify.getUpdateColumnList(); + List sourceExpressionList = modify.getSourceExpressionList(); + + return modifiableTable.toModificationAlg( + cluster, + cluster.traitSet(), + physical, + input, + modify.getOperation(), + updateColumnList, + sourceExpressionList ); + + } + + /** * Default implementation: Execute DML on all placements */ - @Override - public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { + public AlgNode routeDmlOld( LogicalRelModify modify, Statement statement ) { AlgOptCluster cluster = modify.getCluster(); if ( modify.getEntity() == null ) { @@ -154,9 +210,9 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { // Essentially gets a list of all stores where this table resides List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); - PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); - if ( property.isPartitioned && log.isDebugEnabled() ) { - log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, property.partitionGroupIds ); + List allocs = snapshot.alloc().getAllocationsFromLogical( catalogTable.id );//.getPartitionProperty( catalogTable.id ); + if ( !allocs.isEmpty() && log.isDebugEnabled() ) { + log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, -1 );//property.partitionGroupIds ); for ( CatalogColumnPlacement dataPlacement : pkPlacements ) { log.debug( "\t\t -> '{}' {}\t{}", @@ -213,14 +269,14 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { long identifiedPartitionForSetValue = -1; Set accessedPartitionList = new HashSet<>(); // Identify where clause of UPDATE - if ( property.isPartitioned ) { + if ( allocs.size() > 1 ) { boolean worstCaseRouting = false; Set identifiedPartitionsInFilter = new HashSet<>(); PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( allocs.get( 0 ).getPartitionType() ); - WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, columns.indexOf( property.partitionColumnId ) ); + WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, columns.indexOf( -1 ) );//property.partitionColumnId ) ); modify.accept( new AlgShuttleImpl() { @Override public AlgNode visit( LogicalFilter filter ) { @@ -260,9 +316,9 @@ public AlgNode visit( LogicalFilter filter ) { int index = 0; for ( String cn : updateColumnList ) { - if ( snapshot.rel().getColumn( catalogTable.id, cn ).id == property.partitionColumnId ) { + if ( snapshot.rel().getColumn( catalogTable.id, cn ).id == -1 ) {//property.partitionColumnId ) { if ( log.isDebugEnabled() ) { - log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", property.partitionColumnId, index ); + log.debug( " UPDATE: Found PartitionColumnID Match: '{}' at index: {}", -1, index );//property.partitionColumnId, index ); } // Routing/Locking can now be executed on certain partitions partitionValue = sourceExpressionList.get( index ).toString().replace( "'", "" ); @@ -318,7 +374,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { //Partition functionality cannot be used --> worstCase --> send query to every partition else { worstCaseRouting = true; - accessedPartitionList = new HashSet<>( property.partitionIds ); + accessedPartitionList = allocs.stream().map( a -> a.id ).collect( Collectors.toSet() ); } } else if ( modify.getOperation() == Modify.Operation.INSERT ) { @@ -337,10 +393,10 @@ else if ( identifiedPartitionForSetValue != -1 ) { resultColMapping.put( columns.stream().map( c -> c.id ).collect( Collectors.toList() ).get( columnIndex ), j ); // Determine location of partitionColumn in fieldList - if ( columns.stream().map( c -> c.id ).collect( Collectors.toList() ).get( columnIndex ) == property.partitionColumnId ) { + if ( columns.stream().map( c -> c.id ).collect( Collectors.toList() ).get( columnIndex ) == -1 ) {//property.partitionColumnId ) { partitionColumnIndex = columnIndex; if ( log.isDebugEnabled() ) { - log.debug( "INSERT: Found PartitionColumnID: '{}' at column index: {}", property.partitionColumnId, j ); + log.debug( "INSERT: Found PartitionColumnID: '{}' at column index: {}", -1, j );//property.partitionColumnId, j ); worstCaseRouting = false; } @@ -414,7 +470,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { } else if ( modify.getInput() instanceof LogicalProject && ((LogicalProject) modify.getInput()).getInput() instanceof LogicalValues ) { - String partitionColumnName = snapshot.rel().getColumn( property.partitionColumnId ).name; + String partitionColumnName = "empty";//snapshot.rel().getColumn( property.partitionColumnId ).name; List fieldNames = modify.getInput().getRowType().getFieldNames(); LogicalRelModify ltm = modify; @@ -515,7 +571,7 @@ else if ( identifiedPartitionForSetValue != -1 ) { } if ( log.isDebugEnabled() ) { - String partitionColumnName = snapshot.rel().getColumn( property.partitionColumnId ).name; + String partitionColumnName = "empty";//snapshot.rel().getColumn( property.partitionColumnId ).name; String partitionName = snapshot.alloc().getPartitionGroup( identPart ).partitionGroupName; log.debug( "INSERT: partitionColumn-value: '{}' should be put on partition: {} ({}), which is partitioned with column {}", partitionValue, identPart, partitionName, partitionColumnName ); @@ -533,11 +589,11 @@ else if ( identifiedPartitionForSetValue != -1 ) { if ( worstCaseRouting ) { log.debug( "PartitionColumnID was not an explicit part of statement, partition routing will therefore assume worst-case: Routing to ALL PARTITIONS" ); - accessedPartitionList = new HashSet<>( property.partitionIds ); + accessedPartitionList = allocs.stream().map( a -> a.id ).collect( Collectors.toSet() );//property.partitionIds ); } } else { // un-partitioned tables only have one partition anyway - identPart = property.partitionIds.get( 0 ); + identPart = allocs.get( 0 ).id;//property.partitionIds.get( 0 ); accessedPartitionList.add( identPart ); } @@ -1175,6 +1231,12 @@ private Modify getModify( CatalogEntity table, AlgNode input, Statement state } + private AlgBuilder buildDmlNew( AlgNode algNode, Statement statement ) { + + return RoutedAlgBuilder.create( statement ).push( algNode ); + } + + private AlgBuilder buildDml( AlgNode node, RoutedAlgBuilder builder, diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index f498a91304..d76950a1ad 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -26,6 +26,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalEntity; @@ -112,7 +113,7 @@ protected List handleNonePartitioning( PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id );*/ //currentPlacementDistribution.put( property.partitionIds.get( 0 ), placementCombination ); - List allocationEntities = snapshot.alloc().getAllocationsFromLogical( catalogTable.id ); + List allocationEntities = Catalog.snapshot().alloc().getAllocationsFromLogical( catalogTable.id ); for ( RoutedAlgBuilder builder : builders ) { RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); @@ -131,7 +132,7 @@ protected List handleNonePartitioning( protected Collection>> selectPlacementHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); // Utilize scanId to retrieve Partitions being accessed @@ -148,7 +149,7 @@ protected Set> selectPlacement( LogicalTable catalo List usedColumns = queryInformation.getAllColumnsPerTable( catalogTable.id ); // Filter for placements by adapters - List adapters = snapshot.alloc().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() + List adapters = Catalog.snapshot().alloc().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() .stream() .filter( elem -> elem.getValue().containsAll( usedColumns ) ) .map( Entry::getKey ) @@ -157,7 +158,7 @@ protected Set> selectPlacement( LogicalTable catalo final Set> result = new HashSet<>(); for ( long adapterId : adapters ) { List placements = usedColumns.stream() - .map( colId -> snapshot.alloc().getColumnPlacement( adapterId, colId ) ) + .map( colId -> Catalog.snapshot().alloc().getColumnPlacement( adapterId, colId ) ) .collect( Collectors.toList() ); if ( !placements.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index 93cdb680d6..981832dfd8 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -27,6 +27,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -72,7 +73,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa if ( builders.size() == 1 && builders.get( 0 ).getPhysicalPlacementsOfPartitions().isEmpty() ) { for ( List currentPlacement : placements ) { final Map> currentPlacementDistribution = new HashMap<>(); - PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); currentPlacementDistribution.put( property.partitionIds.get( 0 ), currentPlacement ); final RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builders.get( 0 ) ); @@ -90,7 +91,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa for ( List currentPlacement : placements ) { final Map> currentPlacementDistribution = new HashMap<>(); - PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); currentPlacementDistribution.put( property.partitionIds.get( 0 ), currentPlacement ); // AdapterId for all col placements same diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java index 4374e02ea1..3a4814e9cc 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java @@ -22,6 +22,7 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalEntity; @@ -56,7 +57,7 @@ protected List handleVerticalPartitioningOrReplication( AlgNod protected List handleNonePartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // Get placements and convert into placement distribution // final Map> placements = selectPlacement( catalogTable ); - List entities = snapshot.alloc().getAllocationsFromLogical( catalogTable.id ); + List entities = Catalog.snapshot().alloc().getAllocationsFromLogical( catalogTable.id ); // Only one builder available // builders.get( 0 ).addPhysicalInfo( placements ); @@ -69,7 +70,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa @Override protected List handleHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, LogicalEntity logicalTable, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); + PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); // Utilize scanId to retrieve Partitions being accessed diff --git a/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java b/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java index 38659cb949..a1b3a2ee46 100644 --- a/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java +++ b/dbms/src/test/java/org/polypheny/db/sql/clause/SimpleSqlTest.java @@ -16,21 +16,12 @@ package org.polypheny.db.sql.clause; -import com.google.common.collect.ImmutableList; -import java.sql.Connection; import java.sql.SQLException; -import java.sql.Statement; import java.util.List; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; -import org.junit.experimental.categories.Category; import org.polypheny.db.TestHelper; -import org.polypheny.db.TestHelper.JdbcConnection; -import org.polypheny.db.excluded.CottontailExcluded; -import org.polypheny.db.excluded.FileExcluded; -import org.polypheny.db.util.Template; -import org.polypheny.db.util.TestUtil; @SuppressWarnings({ "SqlDialectInspection", "SqlNoDataSourceInspection" }) public class SimpleSqlTest { @@ -80,4 +71,24 @@ public void insert() throws SQLException { } + + @Test + public void select() throws SQLException { + List data = List.of( + new Object[]{ 12, "Name1", 60 }, + new Object[]{ 15, "Name2", 24 }, + new Object[]{ 99, "Name3", 11 } + ); + TestHelper.executeSql( + ( c, s ) -> s.executeUpdate( "CREATE TABLE TableA(ID INTEGER NOT NULL, NAME VARCHAR(20), AGE INTEGER, PRIMARY KEY (ID))" ), + ( c, s ) -> s.executeUpdate( "INSERT INTO TableA VALUES (12, 'Name1', 60)" ), + ( c, s ) -> s.executeUpdate( "INSERT INTO TableA VALUES (15, 'Name2', 24)" ), + ( c, s ) -> s.executeUpdate( "INSERT INTO TableA VALUES (99, 'Name3', 11)" ), + ( c, s ) -> TestHelper.checkResultSet( s.executeQuery( "SELECT * FROM TableA" ), data, true ), + ( c, s ) -> s.executeUpdate( "DROP TABLE TableA" ), + ( c, s ) -> c.commit() + ); + + } + } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 47fe65d1e3..84e56ec413 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -189,6 +189,7 @@ public SqlIdentifier physicalTableName() { public SqlIdentifier physicalColumnName( String logicalColumnName ) { + String physicalName = columns.get( List.of( allocation.getColumnNamesId().values() ).indexOf( logicalColumnName ) ); return new SqlIdentifier( Collections.singletonList( physicalName ), ParserPos.ZERO ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java index 7ad7427674..031694eb0f 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java @@ -69,6 +69,7 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.nodes.Function; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; @@ -88,7 +89,6 @@ import org.polypheny.db.rex.RexProgram; import org.polypheny.db.rex.RexVisitorImpl; import org.polypheny.db.schema.ModelTrait; -import org.polypheny.db.schema.ModifiableEntity; import org.polypheny.db.schema.document.DocumentRules; import org.polypheny.db.sql.language.SqlAggFunction; import org.polypheny.db.sql.language.SqlDialect; @@ -1004,7 +1004,7 @@ private JdbcTableModificationRule( JdbcConvention out, AlgBuilderFactory algBuil @Override public boolean matches( AlgOptRuleCall call ) { - final RelModify modify = call.alg( 0 ); + final RelModify modify = call.alg( 0 ); if ( modify.getEntity().unwrap( JdbcEntity.class ) != null ) { JdbcEntity table = modify.getEntity().unwrap( JdbcEntity.class ); if ( out.getJdbcSchema() == table.getSchema() ) { @@ -1018,7 +1018,7 @@ public boolean matches( AlgOptRuleCall call ) { @Override public AlgNode convert( AlgNode alg ) { final RelModify modify = (RelModify) alg; - final ModifiableEntity modifiableTable = modify.getEntity().unwrap( ModifiableEntity.class ); + final org.polypheny.db.catalog.refactor.ModifiableEntity modifiableTable = modify.getEntity().unwrap( org.polypheny.db.catalog.refactor.ModifiableEntity.class ); if ( modifiableTable == null ) { return null; } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java index c82e7018c0..e3a5664d5e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcSchema.java @@ -134,6 +134,12 @@ public JdbcSchema( } + @Override + public Long getAdapterId() { + return adapter.getAdapterId(); + } + + public JdbcEntity createJdbcTable( long id, LogicalTable logicalTable, @@ -154,7 +160,7 @@ public static JdbcSchema create( ConnectionFactory connectionFactory, SqlDialect dialect, Adapter adapter ) { - final Expression expression = Schemas.subSchemaExpression( snapshot, name, JdbcSchema.class ); + final Expression expression = Schemas.subSchemaExpression( snapshot, id, adapter.getAdapterId(), JdbcSchema.class ); final JdbcConvention convention = JdbcConvention.of( dialect, expression, name ); return new JdbcSchema( id, connectionFactory, dialect, convention, adapter ); } @@ -199,8 +205,8 @@ public ConnectionHandler getConnectionHandler( DataContext dataContext ) { @Override - public Expression getExpression( Snapshot snapshot, String name ) { - return Schemas.subSchemaExpression( snapshot, name, JdbcSchema.class ); + public Expression getExpression( Snapshot snapshot, long id ) { + return Schemas.subSchemaExpression( snapshot, id, getAdapterId(), JdbcSchema.class ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcToEnumerableConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcToEnumerableConverter.java index f9f6d042e7..5b1b04d5fb 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcToEnumerableConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcToEnumerableConverter.java @@ -34,10 +34,31 @@ package org.polypheny.db.adapter.jdbc; +import java.io.PushbackInputStream; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; +import java.util.TimeZone; +import java.util.stream.Collectors; import org.apache.calcite.linq4j.function.Function1; -import org.apache.calcite.linq4j.tree.*; +import org.apache.calcite.linq4j.tree.BlockBuilder; +import org.apache.calcite.linq4j.tree.ConstantExpression; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.apache.calcite.linq4j.tree.ParameterExpression; +import org.apache.calcite.linq4j.tree.Primitive; +import org.apache.calcite.linq4j.tree.Types; +import org.apache.calcite.linq4j.tree.UnaryExpression; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.enumerable.*; +import org.polypheny.db.adapter.enumerable.EnumerableAlg; +import org.polypheny.db.adapter.enumerable.EnumerableAlgImplementor; +import org.polypheny.db.adapter.enumerable.JavaRowFormat; +import org.polypheny.db.adapter.enumerable.PhysType; +import org.polypheny.db.adapter.enumerable.PhysTypeImpl; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; import org.polypheny.db.algebra.AbstractAlgNode; @@ -46,7 +67,12 @@ import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.plan.*; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptPlanner; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.plan.ConventionTraitDef; +import org.polypheny.db.prepare.JavaTypeFactoryImpl; import org.polypheny.db.runtime.Hook; import org.polypheny.db.runtime.functions.Functions; import org.polypheny.db.schema.Schemas; @@ -61,17 +87,6 @@ import org.polypheny.db.type.PolyTypeFamily; import org.polypheny.db.util.BuiltInMethod; -import java.io.PushbackInputStream; -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.List; -import java.util.TimeZone; -import java.util.stream.Collectors; - /** * Relational expression representing a scan of a table in a JDBC data source. @@ -418,7 +433,7 @@ private String jdbcGetMethod( Primitive primitive ) { private SqlString generateSql( SqlDialect dialect, JdbcSchema jdbcSchema ) { - final JdbcImplementor jdbcImplementor = new JdbcImplementor( dialect, (JavaTypeFactory) getCluster().getTypeFactory(), jdbcSchema ); + final JdbcImplementor jdbcImplementor = new JdbcImplementor( dialect, (JavaTypeFactory) new JavaTypeFactoryImpl(), jdbcSchema ); final JdbcImplementor.Result result = jdbcImplementor.visitChild( 0, getInput() ); return result.asStatement().toSqlString( dialect ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java index 69f1232340..8ca010a80d 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java @@ -519,7 +519,8 @@ private SqlNodeList identifierList( List names ) { * Converts a list of names expressions to a list of single-part {@link SqlIdentifier}s. */ private SqlNodeList physicalIdentifierList( JdbcEntity partitionPlacement, List columnNames ) { - return new SqlNodeList( columnNames.stream().map( columnName -> getPhysicalColumnName( partitionPlacement, columnName ) ).collect( Collectors.toList() ), POS ); + return new SqlNodeList( partitionPlacement.columns.values().stream().map( c -> new SqlIdentifier( c, ParserPos.ZERO ) ).collect( Collectors.toList() ), POS ); + //return new SqlNodeList( columnNames.stream().map( columnName -> getPhysicalColumnName( partitionPlacement, columnName ) ).collect( Collectors.toList() ), POS ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 73920840b5..aecaf15041 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -25,7 +25,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -302,14 +301,20 @@ private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, Strin @Override public void updateSnapshot() { // reset physical catalogs - Set keys = this.physicalCatalogs.keySet(); - keys.forEach( k -> this.physicalCatalogs.replace( k, new PolyPhysicalCatalog() ) ); + // Set keys = this.physicalCatalogs.keySet(); + // keys.forEach( k -> this.physicalCatalogs.replace( k, new PolyPhysicalCatalog() ) ); // generate new physical entities, atm only relational this.allocationCatalogs.forEach( ( k, v ) -> { if ( v.getNamespace().namespaceType == NamespaceType.RELATIONAL ) { ((AllocationRelationalCatalog) v).getTables().forEach( ( k2, v2 ) -> { - AdapterManager.getInstance().getAdapter( v2.adapterId ).createNewSchema( getSnapshot(), v2.name, v2.namespaceId ); + Adapter adapter = AdapterManager.getInstance().getAdapter( v2.adapterId ); + + if ( adapter.getCurrentSchema() == null || adapter.getCurrentSchema().getId() != v2.namespaceId ) { + adapter.createNewSchema( getSnapshot(), v2.name, v2.namespaceId ); + getPhysical( v2.namespaceId ).addNamespace( adapter.getAdapterId(), adapter.getCurrentSchema() ); + } + LogicalTable table = getSnapshot().getLogicalEntity( v2.logicalId ).unwrap( LogicalTable.class ); List physicals = AdapterManager.getInstance().getAdapter( v2.adapterId ).createAdapterTable( idBuilder, table, v2 ); getPhysical( table.namespaceId ).addEntities( physicals ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 2898cd121a..842a8ac190 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -194,7 +194,6 @@ public void setPrimaryKey( long tableId, Long keyId ) { tables.put( tableId, tables.get( tableId ).toBuilder().primaryKey( keyId ).build() ); keys.put( keyId, new CatalogPrimaryKey( keys.get( keyId ) ) ); - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java index 6c2d13d137..86dded6330 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java @@ -16,27 +16,32 @@ package org.polypheny.db.catalog.physical; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.schema.Namespace; public class PolyPhysicalCatalog implements PhysicalCatalog { @Getter private final ConcurrentHashMap physicals; + @Getter + private final ConcurrentHashMap namespaces; + public PolyPhysicalCatalog() { - this( new ConcurrentHashMap<>() ); + this( new ConcurrentHashMap<>(), new HashMap<>() ); } - public PolyPhysicalCatalog( Map physicals ) { + public PolyPhysicalCatalog( Map physicals, Map namespaces ) { this.physicals = new ConcurrentHashMap<>( physicals ); - + this.namespaces = new ConcurrentHashMap<>( namespaces ); } @@ -63,4 +68,10 @@ public void deleteEntity( long id ) { } + @Override + public void addNamespace( long adapterId, Namespace currentSchema ) { + namespaces.put( adapterId, currentSchema ); + } + + } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java index a22c8cd78e..69f979b060 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/DelegatingScope.java @@ -24,6 +24,7 @@ import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import org.polypheny.db.algebra.constant.MonikerType; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; @@ -96,7 +97,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, */ void resolveInNamespace( SqlValidatorNamespace ns, boolean nullable, List names, NameMatcher nameMatcher, Path path, Resolved resolved ) { if ( names.isEmpty() ) { - resolved.found( validator, null ); + resolved.found( ns, nullable, this, path, null ); return; } final AlgDataType rowType = ns.getRowType(); @@ -311,9 +312,10 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { resolved.clear(); resolve( prefix.names, liberalMatcher, false, resolved ); if ( resolved.count() == 1 ) { + final Step lastStep = Util.last( resolved.only().path.steps() ); throw validator.newValidationError( prefix, - Static.RESOURCE.tableNameNotFoundDidYouMean( prefix.toString(), resolved.only().getEntity().name ) ); + Static.RESOURCE.tableNameNotFoundDidYouMean( prefix.toString(), lastStep.name ) ); } } } @@ -321,44 +323,42 @@ public SqlQualified fullyQualify( SqlIdentifier identifier ) { // Look for a column not qualified by a table alias. columnName = identifier.names.get( 0 ); final Map map = findQualifyingTableNames( columnName, identifier, nameMatcher ); - switch ( map.size() ) { - default: - final SqlIdentifier prefix1 = identifier.skipLast( 1 ); - throw validator.newValidationError( prefix1, Static.RESOURCE.tableNameNotFound( prefix1.toString() ) ); - case 1: { - final Map.Entry entry = map.entrySet().iterator().next(); - final String tableName2 = map.keySet().iterator().next(); - - fromPath = Path.EMPTY; - - // Adding table name is for RecordType column with StructKind.PEEK_FIELDS or StructKind.PEEK_FIELDS only. - // Access to a field in a RecordType column of other StructKind should always be qualified with table name. - final AlgDataTypeField field = nameMatcher.field( fromNs.getRowType(), columnName ); - if ( field != null ) { - switch ( field.getType().getStructKind() ) { - case PEEK_FIELDS: - case PEEK_FIELDS_DEFAULT: - case PEEK_FIELDS_NO_EXPAND: - columnName = field.getName(); // use resolved field name - resolve( ImmutableList.of( tableName2 ), nameMatcher, false, resolved ); - if ( resolved.count() == 1 ) { - final Resolve resolve = resolved.only(); - fromRowType = resolve.rowType(); - identifier = identifier - .setName( 0, columnName ) - .add( 0, tableName2, ParserPos.ZERO ); - ++i; - ++size; - } - break; - default: - // Throw an error if the table was not found. - // If one or more of the child namespaces allows peeking (e.g. if they are Phoenix column families) then we relax the SQL standard requirement that record fields are qualified by table alias. - final SqlIdentifier prefix = identifier.skipLast( 1 ); - throw validator.newValidationError( prefix, Static.RESOURCE.tableNameNotFound( prefix.toString() ) ); - } + if ( map.size() == 1 ) { + final Entry entry = map.entrySet().iterator().next(); + final String tableName2 = map.keySet().iterator().next(); + + fromPath = Path.EMPTY; + + // Adding table name is for RecordType column with StructKind.PEEK_FIELDS or StructKind.PEEK_FIELDS only. + // Access to a field in a RecordType column of other StructKind should always be qualified with table name. + final AlgDataTypeField field = nameMatcher.field( fromNs.getRowType(), columnName ); + if ( field != null ) { + switch ( field.getType().getStructKind() ) { + case PEEK_FIELDS: + case PEEK_FIELDS_DEFAULT: + case PEEK_FIELDS_NO_EXPAND: + columnName = field.getName(); // use resolved field name + resolve( ImmutableList.of( tableName2 ), nameMatcher, false, resolved ); + if ( resolved.count() == 1 ) { + final Resolve resolve = resolved.only(); + fromRowType = resolve.rowType(); + identifier = identifier + .setName( 0, columnName ) + .add( 0, tableName2, ParserPos.ZERO ); + ++i; + ++size; + } + break; + default: + // Throw an error if the table was not found. + // If one or more of the child namespaces allows peeking (e.g. if they are Phoenix column families) then we relax the SQL standard requirement that record fields are qualified by table alias. + final SqlIdentifier prefix = identifier.skipLast( 1 ); + throw validator.newValidationError( prefix, Static.RESOURCE.tableNameNotFound( prefix.toString() ) ); } } + } else { + final SqlIdentifier prefix1 = identifier.skipLast( 1 ); + throw validator.newValidationError( prefix1, Static.RESOURCE.tableNameNotFound( prefix1.toString() ) ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java index 181837921b..939144d3b7 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EmptyScope.java @@ -19,13 +19,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; -import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.schema.PolyphenyDbSchema; @@ -92,20 +91,25 @@ public SqlValidatorNamespace getTableNamespace( List names ) { @Override public void resolveTable( List names, NameMatcher nameMatcher, Path path, Resolved resolved ) { + final List imperfectResolves = new ArrayList<>(); final List resolves = ((ResolvedImpl) resolved).resolves; // Look in the default schema, then default catalog, then root schema. - LogicalTable table; - if ( names.size() == 2 ) { - table = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); - } else if ( names.size() == 1 ) { - table = validator.snapshot.rel().getTable( Catalog.defaultNamespaceName, names.get( 0 ) ); - } else { - throw new NotImplementedException(); - } - - if ( table != null ) { - resolves.add( new Resolve( validator, table ) ); + /*for ( List schemaPath : validator.catalogReader.getSchemaPaths() ) { + //resolve_( validator.catalogReader.getRootSchema(), names, schemaPath, nameMatcher, path, resolved ); + for ( Resolve resolve : resolves ) { + if ( resolve.remainingNames.isEmpty() ) { + // There is a full match. Return it as the only match. + ((ResolvedImpl) resolved).clear(); + resolves.add( resolve ); + return; + } + } + imperfectResolves.addAll( resolves ); + }*/ + // If there were no matches in the last round, restore those found in previous rounds + if ( resolves.isEmpty() ) { + resolves.addAll( imperfectResolves ); } } @@ -116,7 +120,7 @@ private void resolve_( final PolyphenyDbSchema rootSchema, List names, L LogicalTable table = rootSchema.getTable( concat ); if ( table != null ) { - resolved.found( validator, table ); + //resolved.found( validator, table ); return; } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java index 57a6783983..a218790e6b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/EntityNamespace.java @@ -21,7 +21,6 @@ import java.util.List; import java.util.Map; import lombok.Getter; -import lombok.NonNull; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; @@ -51,7 +50,7 @@ class EntityNamespace extends AbstractNamespace { /** * Creates a TableNamespace. */ - private EntityNamespace( SqlValidatorImpl validator, @NonNull CatalogEntity entity, List fields ) { + private EntityNamespace( SqlValidatorImpl validator, CatalogEntity entity, List fields ) { super( validator, null ); this.table = entity; this.extendedFields = ImmutableList.copyOf( fields ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java index 60b14c2ca5..8cf36c35c4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java @@ -151,7 +151,14 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { } } List ns = id.names; - return new EntityNamespace( validator, Catalog.getInstance().getSnapshot().rel().getTable( ns.get( 0 ), ns.get( 1 ) ) ); + if ( ns.size() == 1 ) { + return new EntityNamespace( validator, Catalog.getInstance().getSnapshot().rel().getTable( Catalog.defaultNamespaceId, ns.get( 0 ) ) ); + } else if ( ns.size() == 2 ) { + return new EntityNamespace( validator, Catalog.getInstance().getSnapshot().rel().getTable( ns.get( 0 ), ns.get( 1 ) ) ); + } else { + throw new RuntimeException(); + } + } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java index 93651379e2..77b45b776b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java @@ -18,16 +18,17 @@ import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; import org.polypheny.db.algebra.constant.MonikerType; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.util.Moniker; import org.polypheny.db.util.MonikerImpl; @@ -66,7 +67,7 @@ public void addChild( SqlValidatorNamespace ns, String alias, boolean nullable ) * @return list of child namespaces */ public List getChildren() { - return Lists.transform( children, scopeChild -> scopeChild.namespace ); + return children.stream().map( scopeChild -> scopeChild.namespace ).collect( Collectors.toList() ); } @@ -76,7 +77,7 @@ public List getChildren() { * @return list of child namespaces */ List getChildNames() { - return Lists.transform( children, scopeChild -> scopeChild.name ); + return children.stream().map( scopeChild -> scopeChild.name ).collect( Collectors.toList() ); } @@ -130,8 +131,7 @@ public void findAliases( Collection result ) { @Override - public Pair - findQualifyingTableName( final String columnName, SqlNode ctx ) { + public Pair findQualifyingTableName( final String columnName, SqlNode ctx ) { final NameMatcher nameMatcher = validator.snapshot.nameMatcher; final Map map = findQualifyingTableNames( columnName, ctx, nameMatcher ); switch ( map.size() ) { @@ -147,8 +147,7 @@ public void findAliases( Collection result ) { @Override - public Map - findQualifyingTableNames( String columnName, SqlNode ctx, NameMatcher nameMatcher ) { + public Map findQualifyingTableNames( String columnName, SqlNode ctx, NameMatcher nameMatcher ) { final Map map = new HashMap<>(); for ( ScopeChild child : children ) { final ResolvedImpl resolved = new ResolvedImpl(); @@ -157,12 +156,10 @@ public void findAliases( Collection result ) { map.put( child.name, child ); } } - switch ( map.size() ) { - case 0: - return parent.findQualifyingTableNames( columnName, ctx, nameMatcher ); - default: - return map; + if ( map.size() == 0 ) { + return parent.findQualifyingTableNames( columnName, ctx, nameMatcher ); } + return map; } @@ -172,19 +169,19 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, final ScopeChild child0 = findChild( names, nameMatcher ); if ( child0 != null ) { - /*final Step path = + final Step path = Path.EMPTY.plus( child0.namespace.getRowType(), child0.ordinal, child0.name, StructKind.FULLY_QUALIFIED ); + resolved.found( child0.namespace, child0.nullable, this, path, - null );*/ - return; + null ); } // Recursively look deeper into the record-valued fields of the namespace, if it allows skipping fields. diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index fa175702f3..b24c273f0a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -69,6 +69,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.DynamicRecordType; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -3343,6 +3344,7 @@ private Pair findTableColumnPair( SqlIdentifier identifier, SqlV return null; } SqlQualified qualified = scope.fullyQualify( identifier ); + List names = qualified.identifier.names; if ( names.size() < 2 ) { @@ -3391,7 +3393,7 @@ private boolean isRolledUpColumn( SqlIdentifier identifier, SqlValidatorScope sc private @Nullable CatalogEntity findTable( String tableName, boolean caseSensitive ) { - return snapshot.rel().getTable( null, tableName ); + return snapshot.rel().getTable( Catalog.defaultNamespaceId, tableName ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java index 2461d3abb0..f4356c3ff0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorScope.java @@ -25,17 +25,16 @@ import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; -import lombok.Getter; import org.polypheny.db.algebra.constant.Monotonicity; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.StructKind; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.nodes.validate.ValidatorCatalogReader; import org.polypheny.db.nodes.validate.ValidatorScope; import org.polypheny.db.sql.language.SqlCall; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.sql.language.SqlNodeList; +import org.polypheny.db.sql.language.SqlSelect; import org.polypheny.db.sql.language.SqlWindow; import org.polypheny.db.util.Moniker; import org.polypheny.db.util.NameMatcher; @@ -193,7 +192,7 @@ default boolean isWithin( SqlValidatorScope scope2 ) { */ interface Resolved { - void found( SqlValidatorImpl validator, CatalogEntity entity ); + void found( SqlValidatorNamespace namespace, boolean nullable, SqlValidatorScope scope, Path path, List remainingNames ); int count(); @@ -311,8 +310,15 @@ class ResolvedImpl implements Resolved { @Override - public void found( SqlValidatorImpl validator, CatalogEntity entity ) { - resolves.add( new Resolve( validator, entity ) ); + public void found( SqlValidatorNamespace namespace, boolean nullable, SqlValidatorScope scope, Path path, List remainingNames ) { + if ( scope instanceof TableScope ) { + scope = scope.getValidator().getSelectScope( (SqlSelect) scope.getNode() ); + } + if ( scope instanceof AggregatingSelectScope ) { + scope = ((AggregatingSelectScope) scope).parent; + assert scope instanceof SelectScope; + } + resolves.add( new Resolve( namespace, nullable, scope, path, remainingNames ) ); } @@ -336,21 +342,34 @@ public void clear() { } - + /** + * A match found when looking up a name. + */ /** * A match found when looking up a name. */ class Resolve { - @Getter - private final CatalogEntity entity; - public Path path; - public SqlValidatorNamespace namespace; - - - Resolve( SqlValidatorImpl validator, CatalogEntity entity ) { - this.entity = entity; - this.namespace = new EntityNamespace( validator, entity ); + public final SqlValidatorNamespace namespace; + private final boolean nullable; + public final SqlValidatorScope scope; // may be null + public final Path path; + /** + * Names not matched; empty if it was a full match. + */ + final List remainingNames; + + + Resolve( SqlValidatorNamespace namespace, boolean nullable, SqlValidatorScope scope, Path path, List remainingNames ) { + this.namespace = Objects.requireNonNull( namespace ); + this.nullable = nullable; + this.scope = scope; + assert !(scope instanceof TableScope); + this.path = Objects.requireNonNull( path ); + this.remainingNames = + remainingNames == null + ? ImmutableList.of() + : ImmutableList.copyOf( remainingNames ); } @@ -358,7 +377,9 @@ class Resolve { * The row type of the found namespace, nullable if the lookup has looked into outer joins. */ public AlgDataType rowType() { - return entity.getRowType(); + return namespace.getValidator() + .getTypeFactory() + .createTypeWithNullability( namespace.getRowType(), nullable ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java index 9d9c3d9a15..f3688eac90 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java @@ -68,11 +68,11 @@ public SqlValidatorNamespace getTableNamespace( List names ) { @Override public void resolveTable( List names, NameMatcher nameMatcher, Path path, Resolved resolved ) { if ( names.size() == 1 && names.equals( withItem.name.names ) ) { - //final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); - //final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); + final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); + final Step path2 = path.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); LogicalNamespace namespace = validator.snapshot.getNamespace( names.get( 0 ) ); CatalogEntity entity = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); - resolved.found( validator, entity ); + resolved.found( ns, false, null, path2, null ); return; } super.resolveTable( names, nameMatcher, path, resolved ); @@ -85,7 +85,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, final SqlValidatorNamespace ns = validator.getSqlNamespace( withItem ); final Step path = Path.EMPTY.plus( ns.getRowType(), 0, names.get( 0 ), StructKind.FULLY_QUALIFIED ); CatalogEntity entity = validator.snapshot.rel().getTable( names.get( 0 ), names.get( 1 ) ); - resolved.found( validator, entity ); + resolved.found( ns, false, null, path, null ); return; } super.resolve( names, nameMatcher, deep, resolved ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java index a930067e01..59a020016a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/sql2alg/SqlToAlgConverter.java @@ -3865,7 +3865,7 @@ Pair> lookupExp( SqlQualified qualified ) { // Found in current query's from list. Find which from item. // We assume that the order of the from clause items has been preserved. - /*final SqlValidatorScope ancestorScope = resolve.scope; + final SqlValidatorScope ancestorScope = resolve.scope; boolean isParent = ancestorScope != scope; if ( (inputs != null) && !isParent ) { final LookupContext algs = new LookupContext( this, inputs, systemFieldList.size() ); @@ -3881,7 +3881,7 @@ Pair> lookupExp( SqlQualified qualified ) { } final Map map = ImmutableMap.copyOf( fieldOffsets ); return Pair.of( node, map ); - + } } else { // We're referencing a relational expression which has not been converted yet. This occurs when from items are correlated, e.g. "select from emp as emp join emp.getDepts() as dept". // Create a temporary expression. @@ -3910,8 +3910,7 @@ Pair> lookupExp( SqlQualified qualified ) { final RexNode c = rexBuilder.makeCorrel( builder.uniquify().build(), correlId ); return Pair.of( c, fields.build() ); } - }}*/ - return null; + } } From a4b526df927103324bcf38ee84f8c8c1bd31c0f7 Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 12 Apr 2023 17:19:29 +0200 Subject: [PATCH 057/436] adjusted exception to use generic exception --- .../db/adapter/index/IndexManager.java | 13 +- .../org/polypheny/db/catalog/Catalog.java | 6 +- .../catalogs/AllocationDocumentCatalog.java | 14 - .../catalogs/AllocationRelationalCatalog.java | 130 +---- .../catalogs/LogicalDocumentCatalog.java | 7 +- .../catalog/catalogs/LogicalGraphCatalog.java | 21 +- .../catalogs/LogicalRelationalCatalog.java | 58 +- .../db/catalog/catalogs/PhysicalCatalog.java | 2 - .../entity/physical/PhysicalCollection.java | 7 + .../entity/physical/PhysicalEntity.java | 4 + .../entity/physical/PhysicalGraph.java | 7 + .../entity/physical/PhysicalTable.java | 29 +- .../ColumnAlreadyExistsException.java | 25 - .../EntityAlreadyExistsException.java | 23 - .../exceptions/GenericCatalogException.java | 40 -- ...tion.java => GenericRuntimeException.java} | 8 +- .../GraphAlreadyExistsException.java | 26 - .../NamespaceAlreadyExistsException.java | 21 - .../NoTablePrimaryKeyException.java | 28 - .../exceptions/UnknownAdapterException.java | 33 -- .../UnknownAdapterIdRuntimeException.java | 26 - .../exceptions/UnknownCollationException.java | 26 - .../UnknownCollationIdRuntimeException.java | 26 - .../UnknownCollectionException.java | 25 - .../UnknownCollectionPlacementException.java | 25 - .../exceptions/UnknownColumnException.java | 39 -- .../UnknownColumnIdRuntimeException.java | 26 - ...nknownColumnPlacementRuntimeException.java | 27 - .../UnknownConstraintException.java | 35 -- .../UnknownConstraintTypeException.java | 26 - ...UnknownConstraintTypeRuntimeException.java | 26 - .../UnknownDatabaseIdRuntimeException.java | 26 - .../UnknownForeignKeyException.java | 34 -- .../UnknownForeignKeyOptionException.java | 26 - ...knownForeignKeyOptionRuntimeException.java | 26 - .../UnknownGraphPlacementsException.java | 25 - .../exceptions/UnknownIndexException.java | 32 - .../UnknownIndexIdRuntimeException.java | 25 - .../exceptions/UnknownIndexTypeException.java | 26 - .../UnknownIndexTypeRuntimeException.java | 26 - .../exceptions/UnknownKeyException.java | 26 - .../UnknownKeyIdRuntimeException.java | 26 - ...knownPartitionGroupIdRuntimeException.java | 26 - .../UnknownPartitionPlacementException.java | 26 - .../UnknownPartitionTypeException.java | 27 - .../UnknownPartitionTypeRuntimeException.java | 27 - .../UnknownPlacementRoleException.java | 26 - .../UnknownPlacementRoleRuntimeException.java | 26 - .../UnknownPlacementTypeException.java | 26 - .../UnknownPlacementTypeRuntimeException.java | 26 - .../UnknownQueryInterfaceException.java | 33 -- ...UnknownQueryInterfaceRuntimeException.java | 26 - .../exceptions/UnknownSchemaException.java | 34 -- .../UnknownSchemaIdRuntimeException.java | 26 - .../UnknownSchemaTypeException.java | 26 - .../UnknownSchemaTypeRuntimeException.java | 26 - .../exceptions/UnknownTableException.java | 39 -- .../UnknownTableIdRuntimeException.java | 26 - .../exceptions/UnknownTableTypeException.java | 26 - .../UnknownTableTypeRuntimeException.java | 26 - .../exceptions/UnknownUserException.java | 33 -- .../UnknownUserIdRuntimeException.java | 26 - .../db/catalog/logistic/Collation.java | 8 +- .../db/catalog/logistic/ConstraintType.java | 8 +- .../catalog/logistic/DataPlacementRole.java | 9 +- .../db/catalog/logistic/EntityType.java | 8 +- .../db/catalog/logistic/ForeignKeyOption.java | 8 +- .../db/catalog/logistic/IndexType.java | 8 +- .../db/catalog/logistic/NamespaceType.java | 10 +- .../db/catalog/logistic/PartitionType.java | 9 +- .../db/catalog/logistic/PlacementType.java | 8 +- .../catalog/snapshot/LogicalRelSnapshot.java | 16 +- .../db/catalog/snapshot/Snapshot.java | 8 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 11 +- .../catalog/snapshot/impl/SnapshotImpl.java | 6 +- .../java/org/polypheny/db/ddl/DdlManager.java | 107 ++-- .../ddl/exception/AlterSourceException.java | 21 - .../exception/ColumnNotExistsException.java | 40 -- .../ddl/exception/DdlOnSourceException.java | 21 - .../ddl/exception/IndexExistsException.java | 21 - .../IndexPreventsRemovalException.java | 34 -- .../ddl/exception/LastPlacementException.java | 21 - .../MissingColumnPlacementException.java | 31 - .../NotMaterializedViewException.java | 26 - .../NotNullAndDefaultValueException.java | 21 - .../db/ddl/exception/NotViewException.java | 26 - ...PartitionGroupNamesNotUniqueException.java | 21 - .../PlacementAlreadyExistsException.java | 21 - .../PlacementIsPrimaryException.java | 21 - .../PlacementNotExistsException.java | 21 - .../exception/SchemaNotExistException.java | 21 - .../UnknownIndexMethodException.java | 21 - .../db/iface/QueryInterfaceManager.java | 3 +- .../polypheny/db/processing/Processor.java | 5 +- .../org/polypheny/db/tools/AlgBuilder.java | 10 +- .../db/transaction/TransactionManager.java | 7 +- .../org/polypheny/db/catalog/MockCatalog.java | 17 +- .../java/org/polypheny/db/PolyphenyDb.java | 9 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 550 ++++++++---------- .../db/partition/FrequencyMapImpl.java | 5 +- .../db/processing/AuthenticatorImpl.java | 16 +- .../processing/ConstraintEnforceAttacher.java | 5 +- .../db/routing/routers/AbstractDqlRouter.java | 4 +- .../db/routing/routers/BaseRouter.java | 5 +- .../transaction/TransactionManagerImpl.java | 7 +- .../db/view/MaterializedViewManagerImpl.java | 42 +- .../java/org/polypheny/db/TestHelper.java | 26 +- .../java/org/polypheny/db/mql/DdlTest.java | 7 +- .../db/statistics/StatisticsTest.java | 28 +- .../statistics/StatisticQueryProcessor.java | 10 +- .../org/polypheny/db/cql/ColumnIndex.java | 21 +- .../polypheny/db/adapter/csv/CsvTable.java | 1 + .../ExploreQueryProcessor.java | 8 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 1 + .../db/languages/mql/MqlCreateCollection.java | 27 +- .../db/languages/mql/MqlCreateView.java | 30 +- .../db/languages/mql/MqlRenameCollection.java | 4 +- .../db/languages/mql/MqlUseDatabase.java | 7 +- .../org/polypheny/db/catalog/PolyCatalog.java | 14 +- .../allocation/PolyAllocDocCatalog.java | 10 - .../allocation/PolyAllocRelCatalog.java | 94 +-- .../db/catalog/logical/DocumentCatalog.java | 8 - .../db/catalog/logical/GraphCatalog.java | 10 - .../db/catalog/logical/RelationalCatalog.java | 62 +- .../catalog/physical/PolyPhysicalCatalog.java | 6 - .../polypheny/db/restapi/RequestParser.java | 16 +- .../java/org/polypheny/db/restapi/Rest.java | 9 +- .../language/ddl/SqlAlterAdaptersDrop.java | 6 - .../language/ddl/SqlAlterInterfacesDrop.java | 6 - .../language/ddl/SqlColumnDeclaration.java | 20 +- .../ddl/SqlCreateMaterializedView.java | 11 +- .../db/sql/language/ddl/SqlCreateSchema.java | 10 +- .../db/sql/language/ddl/SqlCreateTable.java | 26 +- .../db/sql/language/ddl/SqlCreateView.java | 36 +- .../SqlAlterMaterializedViewAddIndex.java | 32 +- .../SqlAlterMaterializedViewDropIndex.java | 12 +- .../SqlAlterMaterializedViewRename.java | 15 +- .../SqlAlterMaterializedViewRenameColumn.java | 15 +- .../ddl/alterschema/SqlAlterSchemaRename.java | 13 +- .../SqlAlterSourceTableAddColumn.java | 32 +- .../altertable/SqlAlterTableAddColumn.java | 32 +- .../SqlAlterTableAddForeignKey.java | 38 +- .../ddl/altertable/SqlAlterTableAddIndex.java | 32 +- .../SqlAlterTableAddPartitions.java | 11 +- .../altertable/SqlAlterTableAddPlacement.java | 24 +- .../SqlAlterTableAddPrimaryKey.java | 18 +- .../SqlAlterTableAddUniqueConstraint.java | 18 +- .../altertable/SqlAlterTableDropColumn.java | 12 +- .../SqlAlterTableDropConstraint.java | 12 +- .../SqlAlterTableDropForeignKey.java | 12 +- .../altertable/SqlAlterTableDropIndex.java | 12 +- .../SqlAlterTableDropPlacement.java | 19 +- .../SqlAlterTableDropPrimaryKey.java | 12 +- .../SqlAlterTableMergePartitions.java | 12 +- .../altertable/SqlAlterTableModifyColumn.java | 51 +- .../SqlAlterTableModifyPartitions.java | 17 +- .../SqlAlterTableModifyPlacement.java | 42 +- ...SqlAlterTableModifyPlacementAddColumn.java | 37 +- ...qlAlterTableModifyPlacementDropColumn.java | 48 +- .../ddl/altertable/SqlAlterTableOwner.java | 13 +- .../ddl/altertable/SqlAlterTableRename.java | 12 +- .../altertable/SqlAlterTableRenameColumn.java | 13 +- .../ddl/alterview/SqlAlterViewRename.java | 18 +- .../alterview/SqlAlterViewRenameColumn.java | 15 +- .../java/org/polypheny/db/webui/Crud.java | 332 +++++------ .../org/polypheny/db/webui/HttpServer.java | 6 - .../polypheny/db/webui/crud/LanguageCrud.java | 3 +- 167 files changed, 861 insertions(+), 3604 deletions(-) delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/ColumnAlreadyExistsException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/EntityAlreadyExistsException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/GenericCatalogException.java rename core/src/main/java/org/polypheny/db/catalog/exceptions/{UnknownGraphException.java => GenericRuntimeException.java} (72%) delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/GraphAlreadyExistsException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/NamespaceAlreadyExistsException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/NoTablePrimaryKeyException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownAdapterException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownAdapterIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollationException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollationIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollectionException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollectionPlacementException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnPlacementRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintTypeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintTypeRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownDatabaseIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyOptionException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyOptionRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownGraphPlacementsException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexTypeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexTypeRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownKeyException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownKeyIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionGroupIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionPlacementException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionTypeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionTypeRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementRoleException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementRoleRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementTypeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementTypeRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownQueryInterfaceException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownQueryInterfaceRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaTypeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaTypeRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableTypeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableTypeRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownUserException.java delete mode 100644 core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownUserIdRuntimeException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/AlterSourceException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/ColumnNotExistsException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/DdlOnSourceException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/IndexExistsException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/IndexPreventsRemovalException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/LastPlacementException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/MissingColumnPlacementException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/NotMaterializedViewException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/NotNullAndDefaultValueException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/NotViewException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/PartitionGroupNamesNotUniqueException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/PlacementAlreadyExistsException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/PlacementIsPrimaryException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/PlacementNotExistsException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/SchemaNotExistException.java delete mode 100644 core/src/main/java/org/polypheny/db/ddl/exception/UnknownIndexMethodException.java diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index 98224a7f68..bc465254ee 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -35,11 +35,6 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownKeyException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationAction; import org.polypheny.db.information.InformationGraph; @@ -148,7 +143,7 @@ public void initialize( final TransactionManager transactionManager ) { } - public void restoreIndexes() throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, TransactionException { + public void restoreIndexes() throws TransactionException { for ( final CatalogIndex index : Catalog.getInstance().getSnapshot().rel().getIndexes() ) { if ( index.location == 0 ) { addIndex( index ); @@ -157,18 +152,18 @@ public void restoreIndexes() throws UnknownSchemaException, GenericCatalogExcept } - public void addIndex( final CatalogIndex index ) throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, TransactionException { + public void addIndex( final CatalogIndex index ) throws TransactionException { addIndex( index, null ); } - public void addIndex( final CatalogIndex index, final Statement statement ) throws UnknownSchemaException, GenericCatalogException, UnknownTableException, UnknownKeyException, UnknownUserException, TransactionException { + public void addIndex( final CatalogIndex index, final Statement statement ) throws TransactionException { // TODO(s3lph): persistent addIndex( index.id, index.name, index.key, index.method, index.unique, null, statement ); } - protected void addIndex( final long id, final String name, final CatalogKey key, final String method, final Boolean unique, final Boolean persistent, final Statement statement ) throws UnknownSchemaException, GenericCatalogException, UnknownUserException, TransactionException { + protected void addIndex( final long id, final String name, final CatalogKey key, final String method, final Boolean unique, final Boolean persistent, final Statement statement ) throws TransactionException { final IndexFactory factory = INDEX_FACTORIES.stream() .filter( it -> it.canProvide( method, unique, persistent ) ) .findFirst() diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index ecc9187066..ed10ee1474 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -35,7 +35,6 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Transaction; @@ -82,7 +81,7 @@ public static Catalog getInstance() { public abstract void updateSnapshot(); - public abstract void commit() throws NoTablePrimaryKeyException; + public abstract void commit(); public abstract void rollback(); @@ -154,8 +153,7 @@ protected final boolean isValidIdentifier( final String str ) { /** - * Inserts a new user, - * if a user with the same name already exists, it throws an error + * Inserts a new user * * @param name of the user * @param password of the user diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java index 4031d17a7e..b304f22dc5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationDocumentCatalog.java @@ -16,26 +16,12 @@ package org.polypheny.db.catalog.catalogs; -import java.util.List; import java.util.Map; -import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.PlacementType; public interface AllocationDocumentCatalog extends AllocationCatalog { - /** - * Added the required additional entities for the substitutions entities on different data models. - * - * @param schemaId The id of the namespace to which the collection belongs - * @param name The name of the collection - * @param stores The stores on which the collection was added - * @param onlyPlacement If the substitution entities should be created fully or only the placements - * @return The id of the mapping - */ - public abstract long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException; - void addCollectionPlacement( long namespaceId, long adapterId, long id, PlacementType placementType ); void dropCollectionPlacement( long id, long adapterId ); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index c100af5159..46fd204b68 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -18,9 +18,7 @@ import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; @@ -50,7 +48,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { void deleteColumnPlacement( long allocationId, long columnId, boolean columnOnly ); - /** * Update the type of a placement. * @@ -97,7 +94,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionType partition Type of the added partition * @return The id of the created partitionGroup */ - long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; + long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ); /** * Should only be called from mergePartitions(). Deletes a single partition and all references. @@ -117,7 +114,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param partitionGroupId partitionGroupId where the partition should be initially added to * @return The id of the created partition */ - long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException; + long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ); /** * Deletes a single partition and all references. @@ -148,15 +145,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void mergeTable( long tableId ); - /** - * Updates partitionProperties on table - * - * @param tableId Table to be partitioned - * @param partitionProperty Partition properties - */ - void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ); - - /** * Updates the specified partition group with the attached partitionIds * @@ -165,22 +153,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void updatePartitionGroup( long partitionGroupId, List partitionIds ); - /** - * Adds a partition to an already existing partition Group - * - * @param partitionGroupId Group to add to - * @param partitionId Partition to add - */ - void addPartitionToGroup( long partitionGroupId, Long partitionId ); - - /** - * Removes a partition from an already existing partition Group - * - * @param partitionGroupId Group to remove the partition from - * @param partitionId Partition to remove - */ - void removePartitionFromGroup( long partitionGroupId, Long partitionId ); - /** * Assign the partition to a new partitionGroup * @@ -221,27 +193,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param tableId table to retrieve the placement from * @return */ - AllocationTable createAlloctionTable( long adapterId, long tableId ); - - /** - * Adds a new DataPlacement for a given table on a specific store. - * If it already exists it simply returns the existing placement. - * - * @param adapterId adapter where placement is located - * @param tableId table to retrieve the placement from - * @return DataPlacement of a table placed on a specific store - */ - CatalogDataPlacement addDataPlacementIfNotExists( long adapterId, long tableId ); - - /** - * Modifies a specific DataPlacement of a given table. - * - * @param adapterId adapter where placement is located - * @param tableId table to retrieve the placement from - * @param catalogDataPlacement new dataPlacement to be written - */ - void modifyDataPlacement( long adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ); - + AllocationTable createAllocationTable( long adapterId, long tableId ); /** * Removes a DataPlacement for a given table on a specific store @@ -253,66 +205,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { void deleteAllocation( long allocId ); - /** - * Adds a single dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - */ - void addSingleDataPlacementToTable( long adapterId, long tableId ); - - /** - * Removes a single dataPlacement from a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - */ - void removeSingleDataPlacementFromTable( long adapterId, long tableId ); - - /** - * Updates the list of data placements on a table - * - * @param tableId table to be updated - * @param newDataPlacements list of new DataPlacements that shall replace the old ones - */ - void updateDataPlacementsOnTable( long tableId, List newDataPlacements ); - - /** - * Adds columns to dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - * @param columnIds List of columnIds to add to a specific store for the table - */ - void addColumnsToDataPlacement( long adapterId, long tableId, List columnIds ); - - /** - * Remove columns to dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - * @param columnIds List of columnIds to remove from a specific store for the table - */ - void removeColumnsFromDataPlacement( long adapterId, long tableId, List columnIds ); - - /** - * Adds partitions to dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - * @param partitionIds List of partitionIds to add to a specific store for the table - */ - void addPartitionsToDataPlacement( long adapterId, long tableId, List partitionIds ); - - /** - * Remove partitions to dataPlacement on a store for a specific table - * - * @param adapterId adapter id corresponding to a new DataPlacements - * @param tableId table to be updated - * @param partitionIds List of partitionIds to remove from a specific store for the table - */ - void removePartitionsFromDataPlacement( long adapterId, long tableId, List partitionIds ); - /** * Updates and overrides list of associated columnPlacements {@code &} partitionPlacements for a given data placement * @@ -333,22 +225,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { void deletePartitionPlacement( long adapterId, long partitionId ); - - /** - * Registers a table to be considered for periodic processing - * - * @param tableId ID of table to be considered for periodic processing - */ - void addTableToPeriodicProcessing( long tableId ); - - /** - * Remove a table from periodic background processing - * - * @param tableId ID of table to be removed for periodic processing - */ - void removeTableFromPeriodicProcessing( long tableId ); - - Map getTables(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java index bbe2a38bd5..4885c7e7ea 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalDocumentCatalog.java @@ -16,9 +16,7 @@ package org.polypheny.db.catalog.catalogs; -import java.util.List; import java.util.Map; -import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.EntityType; @@ -33,7 +31,7 @@ public interface LogicalDocumentCatalog extends LogicalCatalog { * @param modifiable If the collection is modifiable * @return The id of the added collection */ - public abstract long addCollection( Long id, String name, EntityType entity, boolean modifiable ); + long addCollection( Long id, String name, EntityType entity, boolean modifiable ); /** @@ -41,9 +39,8 @@ public interface LogicalDocumentCatalog extends LogicalCatalog { * * @param id The id of the collection to delete */ - public abstract void deleteCollection( long id ); + void deleteCollection( long id ); - long addCollectionLogistics( String name, List stores, boolean placementOnly ); Map getCollections(); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java index 3831b68247..25675da970 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalGraphCatalog.java @@ -20,9 +20,6 @@ import java.util.Map; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; public interface LogicalGraphCatalog extends LogicalCatalog { @@ -33,7 +30,7 @@ public interface LogicalGraphCatalog extends LogicalCatalog { * @param alias The alias to add * @param ifNotExists If the alias should only be added if it not already exists */ - public abstract void addGraphAlias( long graphId, String alias, boolean ifNotExists ); + void addGraphAlias( long graphId, String alias, boolean ifNotExists ); /** * Removes a given alias for a specific graph. @@ -42,7 +39,7 @@ public interface LogicalGraphCatalog extends LogicalCatalog { * @param alias The alias to remove * @param ifExists If the alias should only be removed if it exists */ - public abstract void removeGraphAlias( long graphId, String alias, boolean ifExists ); + void removeGraphAlias( long graphId, String alias, boolean ifExists ); /** * Adds a new graph to the catalog, on the same layer as schema in relational. @@ -54,24 +51,14 @@ public interface LogicalGraphCatalog extends LogicalCatalog { * @param replace If the graph should replace an existing one * @return The id of the newly added graph */ - public abstract long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ); + long addGraph( String name, List stores, boolean modifiable, boolean ifNotExists, boolean replace ); /** * Deletes an existing graph. * * @param id The id of the graph to delete */ - public abstract void deleteGraph( long id ); - - - /** - * Additional operations for the creation of a graph entity. - * - * @param id The predefined id of the already added graph - * @param stores The stores on which the graph was placed - * @param onlyPlacement If the substitution only creates the placements and not the entites - */ - public abstract void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException; + void deleteGraph( long id ); Map getGraphs(); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 9191c471fa..7208c33320 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -29,7 +29,6 @@ import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; @@ -47,7 +46,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param modifiable Whether the content of the table can be modified * @return The id of the inserted table */ - public abstract LogicalTable addTable( String name, EntityType entityType, boolean modifiable ); + LogicalTable addTable( String name, EntityType entityType, boolean modifiable ); /** @@ -62,7 +61,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param fieldList all columns used within the View * @return The id of the inserted table */ - public abstract long addView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ); + long addView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ); /** * Adds a materialized view to a specified schema. @@ -81,7 +80,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param ordered if materialized view is ordered or not * @return id of the inserted materialized view */ - public abstract long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException; + long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ); /** * Renames a table @@ -89,14 +88,14 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param tableId The if of the table to rename * @param name New name of the table */ - public abstract void renameTable( long tableId, String name ); + void renameTable( long tableId, String name ); /** * Delete the specified table. Columns need to be deleted before. * * @param tableId The id of the table to delete */ - public abstract void deleteTable( long tableId ); + void deleteTable( long tableId ); /** * Set the primary key of a table @@ -104,7 +103,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param tableId The id of the table * @param keyId The id of the key to set as primary key. Set null to set no primary key. */ - public abstract void setPrimaryKey( long tableId, Long keyId ); + void setPrimaryKey( long tableId, Long keyId ); /** * Adds a column. @@ -119,7 +118,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param collation The collation of the field (if applicable, else null) * @return The id of the inserted column */ - public abstract long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ); + long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ); /** @@ -128,7 +127,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param columnId The if of the column to rename * @param name New name of the column */ - public abstract void renameColumn( long columnId, String name ); + void renameColumn( long columnId, String name ); /** * Change the position of the column. @@ -136,7 +135,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param columnId The id of the column for which to change the position * @param position The new position of the column */ - public abstract void setColumnPosition( long columnId, int position ); + void setColumnPosition( long columnId, int position ); /** * Change the data type of a column. @@ -144,7 +143,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param columnId The id of the column * @param type The new type of the column */ - public abstract void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ) throws GenericCatalogException; + void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer precision, Integer dimension, Integer cardinality ); /** * Change nullability of the column (weather the column allows null values). @@ -152,7 +151,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param columnId The id of the column * @param nullable True if the column should allow null values, false if not. */ - public abstract void setNullable( long columnId, boolean nullable ) throws GenericCatalogException; + void setNullable( long columnId, boolean nullable ); /** * Set the collation of a column. @@ -161,7 +160,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param columnId The id of the column * @param collation The collation to set */ - public abstract void setCollation( long columnId, Collation collation ); + void setCollation( long columnId, Collation collation ); /** @@ -169,7 +168,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * * @param columnId The id of the column to delete */ - public abstract void deleteColumn( long columnId ); + void deleteColumn( long columnId ); /** * Adds a default value for a column. If there already is a default values, it being replaced. @@ -178,15 +177,14 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param type The type of the default value * @param defaultValue True if the column should allow null values, false if not. */ - public abstract void setDefaultValue( long columnId, PolyType type, String defaultValue ); + void setDefaultValue( long columnId, PolyType type, String defaultValue ); /** * Deletes an existing default value of a column. NoOp if there is no default value defined. * * @param columnId The id of the column */ - public abstract void deleteDefaultValue( long columnId ); - + void deleteDefaultValue( long columnId ); /** @@ -195,7 +193,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param tableId The id of the table * @param columnIds The id of key which will be part of the primary keys */ - public abstract void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException; + void addPrimaryKey( long tableId, List columnIds ); /** @@ -209,7 +207,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param onUpdate The option for updates * @param onDelete The option for deletes */ - public abstract void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException; + void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ); /** * Adds a unique constraint. @@ -218,21 +216,21 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param constraintName The name of the constraint * @param columnIds A list of column ids */ - public abstract void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException; + void addUniqueConstraint( long tableId, String constraintName, List columnIds ); /** * Deletes the specified primary key (including the entry in the key table). If there is an index on this key, make sure to delete it first. * * @param tableId The id of the key to drop */ - public abstract void deletePrimaryKey( long tableId ) throws GenericCatalogException; + void deletePrimaryKey( long tableId ); /** * Delete the specified foreign key (does not delete the referenced key). * * @param foreignKeyId The id of the foreign key to delete */ - public abstract void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException; + void deleteForeignKey( long foreignKeyId ); /** * Delete the specified constraint. @@ -240,7 +238,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * * @param constraintId The id of the constraint to delete */ - public abstract void deleteConstraint( long constraintId ) throws GenericCatalogException; + void deleteConstraint( long constraintId ); /** @@ -248,14 +246,14 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * * @param catalogView view for which to delete its dependencies */ - public abstract void deleteViewDependencies( CatalogView catalogView ); + void deleteViewDependencies( CatalogView catalogView ); /** * Updates the last time a materialized view has been refreshed. * * @param materializedViewId id of the materialized view */ - public abstract void updateMaterializedViewRefreshTime( long materializedViewId ); + void updateMaterializedViewRefreshTime( long materializedViewId ); /** @@ -265,7 +263,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param tableId table to be flagged for deletion * @param flag true if it should be flagged, false if flag should be removed */ - public abstract void flagTableForDeletion( long tableId, boolean flag ); + void flagTableForDeletion( long tableId, boolean flag ); /** * Is used to detect if a table is flagged for deletion. @@ -275,7 +273,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param tableId table to be checked * @return If table is flagged for deletion or not */ - public abstract boolean isTableFlaggedForDeletion( long tableId ); + boolean isTableFlaggedForDeletion( long tableId ); /** * Adds an index over the specified columns @@ -290,7 +288,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param indexName The name of the index * @return The id of the created index */ - public abstract long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ) throws GenericCatalogException; + long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ); /** * Set physical index name. @@ -298,14 +296,14 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param indexId The id of the index * @param physicalName The physical name to be set */ - public abstract void setIndexPhysicalName( long indexId, String physicalName ); + void setIndexPhysicalName( long indexId, String physicalName ); /** * Delete the specified index * * @param indexId The id of the index to drop */ - public abstract void deleteIndex( long indexId ); + void deleteIndex( long indexId ); Map getTables(); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java index ebe28cd60d..d7170bc42c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/PhysicalCatalog.java @@ -23,8 +23,6 @@ public interface PhysicalCatalog { - void addPhysicalEntity( PhysicalEntity physicalEntity ); - ConcurrentHashMap getPhysicals(); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java index 0a19af328b..bdec8514c5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalCollection.java @@ -22,6 +22,7 @@ import lombok.experimental.NonFinal; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -47,4 +48,10 @@ public Expression asExpression() { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getPhysicalCollection", Expressions.constant( id ) ); } + + @Override + public AlgDataType getLogicalRowType() { + return getRowType(); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java index 898fbde9da..32478a2939 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java @@ -19,6 +19,7 @@ import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.NonFinal; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -48,4 +49,7 @@ public State getCatalogType() { return State.PHYSICAL; } + + public abstract AlgDataType getLogicalRowType(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java index 1084aee201..f0465b19ba 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalGraph.java @@ -22,6 +22,7 @@ import lombok.experimental.NonFinal; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -47,4 +48,10 @@ public Expression asExpression() { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getPhysicalGraph", Expressions.constant( id ) ); } + + @Override + public AlgDataType getLogicalRowType() { + return getRowType(); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java index e3d2a014ba..ea4d77eb07 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalTable.java @@ -44,33 +44,52 @@ public class PhysicalTable extends PhysicalEntity { public ImmutableMap columns; + public ImmutableMap logicalColumns; public String namespaceName; public ImmutableMap types; public ImmutableList order; - public PhysicalTable( long id, long logicalId, long allocationId, String name, long namespaceId, String namespaceName, long adapterId, Map columns, Map types, List order ) { + public PhysicalTable( + long id, + long logicalId, + long allocationId, + String name, + long namespaceId, + String namespaceName, + long adapterId, + Map columns, + Map logicalColumns, + Map types, + List order ) { super( id, logicalId, allocationId, name, namespaceId, namespaceName, EntityType.ENTITY, NamespaceType.RELATIONAL, adapterId ); this.namespaceName = namespaceName; this.columns = ImmutableMap.copyOf( columns ); + this.logicalColumns = ImmutableMap.copyOf( logicalColumns ); this.types = ImmutableMap.copyOf( types ); this.order = ImmutableList.copyOf( order ); } - public PhysicalTable( long id, AllocationTable table, String name, String namespaceName, Map columns, Map types, List order ) { - this( id, table.logicalId, table.id, name, table.namespaceId, namespaceName, table.adapterId, columns, types, order ); + public PhysicalTable( long id, AllocationTable table, String name, String namespaceName, Map columns, Map logicalColumns, Map types, List order ) { + this( id, table.logicalId, table.id, name, table.namespaceId, namespaceName, table.adapterId, columns, logicalColumns, types, order ); } @Override public AlgDataType getRowType() { - return buildProto().apply( AlgDataTypeFactory.DEFAULT ); + return buildProto( columns ).apply( AlgDataTypeFactory.DEFAULT ); } - public AlgProtoDataType buildProto() { + @Override + public AlgDataType getLogicalRowType() { + return buildProto( logicalColumns ).apply( AlgDataTypeFactory.DEFAULT ); + } + + + public AlgProtoDataType buildProto( Map columns ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/ColumnAlreadyExistsException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/ColumnAlreadyExistsException.java deleted file mode 100644 index aed853a703..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/ColumnAlreadyExistsException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - -public class ColumnAlreadyExistsException extends CatalogException { - - public ColumnAlreadyExistsException( String columnName, String tableName ) { - super( "There is already a column with the name '" + columnName + "' in table '" + tableName + "'" ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/EntityAlreadyExistsException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/EntityAlreadyExistsException.java deleted file mode 100644 index 8684d4e3ed..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/EntityAlreadyExistsException.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class EntityAlreadyExistsException extends CatalogException { - -} - diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/GenericCatalogException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/GenericCatalogException.java deleted file mode 100644 index 5aaf8cc643..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/GenericCatalogException.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class GenericCatalogException extends CatalogException { - - private Exception exception; - - - public GenericCatalogException( String message ) { - super( message ); - } - - - public GenericCatalogException( String message, Exception e ) { - super( message, e ); - exception = e; - } - - - public GenericCatalogException( Exception e ) { - super( e ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownGraphException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/GenericRuntimeException.java similarity index 72% rename from core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownGraphException.java rename to core/src/main/java/org/polypheny/db/catalog/exceptions/GenericRuntimeException.java index 846b31e66f..6a92d4723f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownGraphException.java +++ b/core/src/main/java/org/polypheny/db/catalog/exceptions/GenericRuntimeException.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,10 +16,10 @@ package org.polypheny.db.catalog.exceptions; -public class UnknownGraphException extends CatalogRuntimeException { +public class GenericRuntimeException extends RuntimeException { - public UnknownGraphException( long id ) { - super( "Graph with id: " + id + " does not exist." ); + public GenericRuntimeException( String message, Object... params ) { + super( String.format( message, params ) ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/GraphAlreadyExistsException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/GraphAlreadyExistsException.java deleted file mode 100644 index f9b75a69d5..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/GraphAlreadyExistsException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class GraphAlreadyExistsException extends CatalogRuntimeException { - - public GraphAlreadyExistsException( String name ) { - super( String.format( "A graph with the name: %s does already exist.", name ) ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/NamespaceAlreadyExistsException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/NamespaceAlreadyExistsException.java deleted file mode 100644 index 804e413578..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/NamespaceAlreadyExistsException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - -public class NamespaceAlreadyExistsException extends CatalogException { - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/NoTablePrimaryKeyException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/NoTablePrimaryKeyException.java deleted file mode 100644 index f6f6e66ba0..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/NoTablePrimaryKeyException.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -/** - * Is thrown when a new table is created and no primary key provided. - */ -public class NoTablePrimaryKeyException extends CatalogException { - - public NoTablePrimaryKeyException() { - super( "No primary key was provided on table creation." ); - } -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownAdapterException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownAdapterException.java deleted file mode 100644 index 76c97702f8..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownAdapterException.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -import lombok.Getter; - -public class UnknownAdapterException extends CatalogException { - - @Getter - private final String adapterName; - - - public UnknownAdapterException( String adapterName ) { - super( "There is no adapter with name " + adapterName ); - this.adapterName = adapterName; - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownAdapterIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownAdapterIdRuntimeException.java deleted file mode 100644 index e417468a5f..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownAdapterIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownAdapterIdRuntimeException extends CatalogRuntimeException { - - public UnknownAdapterIdRuntimeException( int adapterId ) { - super( "There is no adapter with the id " + adapterId ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollationException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollationException.java deleted file mode 100644 index 22dda54f8f..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollationException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownCollationException extends CatalogException { - - public UnknownCollationException( String name ) { - super( "There is no Collation with name: " + name ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollationIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollationIdRuntimeException.java deleted file mode 100644 index 5c5264a2ae..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollationIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownCollationIdRuntimeException extends CatalogRuntimeException { - - public UnknownCollationIdRuntimeException( int id ) { - super( "There is no Collation with id: " + id ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollectionException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollectionException.java deleted file mode 100644 index 123d8dc052..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollectionException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - -public class UnknownCollectionException extends CatalogRuntimeException { - - public UnknownCollectionException( long collectionId ) { - super( "Unknown Collection with id: " + collectionId ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollectionPlacementException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollectionPlacementException.java deleted file mode 100644 index 64c10f2129..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownCollectionPlacementException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - -public class UnknownCollectionPlacementException extends CatalogRuntimeException { - - public UnknownCollectionPlacementException( long collectionId, int adapterId ) { - super( String.format( "Placement of document with id %s does not exist on adapter with id %s", collectionId, adapterId ) ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnException.java deleted file mode 100644 index 84dd7dc904..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnException.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -import lombok.Getter; - -public class UnknownColumnException extends CatalogException { - - @Getter - private final String columnName; - - - public UnknownColumnException( String schemaName, String tableName, String columnName ) { - super( "There is no column with name '" + columnName + "' in table '" + tableName + "' of schema '" + schemaName + "'" ); - this.columnName = columnName; - } - - - public UnknownColumnException( long tableId, String columnName ) { - super( "There is no column with name '" + columnName + "' in the table with the id '" + tableId + "'" ); - this.columnName = columnName; - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnIdRuntimeException.java deleted file mode 100644 index 6d9ceb884c..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownColumnIdRuntimeException extends CatalogRuntimeException { - - public UnknownColumnIdRuntimeException( long columnId ) { - super( "There is no column with id '" + columnId + "';" ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnPlacementRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnPlacementRuntimeException.java deleted file mode 100644 index 6e44721d42..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownColumnPlacementRuntimeException.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownColumnPlacementRuntimeException extends CatalogRuntimeException { - - - public UnknownColumnPlacementRuntimeException( long adapterId, long columnId ) { - super( "There is no column placement for column id '" + columnId + "' on adapter with id '" + adapterId + "'" ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintException.java deleted file mode 100644 index 250f9bd291..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintException.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownConstraintException extends CatalogException { - - public UnknownConstraintException( String name ) { - super( "There is no constraint with name: " + name ); - } - - - public UnknownConstraintException( long id ) { - super( "There is no constraint with id: " + id ); - } - - - public UnknownConstraintException( long tableId, String name ) { - super( "There is no constraint in table: " + tableId + " with name: " + name ); - } -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintTypeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintTypeException.java deleted file mode 100644 index f8fe182d64..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintTypeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownConstraintTypeException extends CatalogException { - - public UnknownConstraintTypeException( String name ) { - super( "There is no constraint type with name: " + name ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintTypeRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintTypeRuntimeException.java deleted file mode 100644 index 9cf7479444..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownConstraintTypeRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownConstraintTypeRuntimeException extends CatalogRuntimeException { - - public UnknownConstraintTypeRuntimeException( int id ) { - super( "There is no constraint type with id: " + id ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownDatabaseIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownDatabaseIdRuntimeException.java deleted file mode 100644 index 4f61022c42..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownDatabaseIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownDatabaseIdRuntimeException extends CatalogRuntimeException { - - public UnknownDatabaseIdRuntimeException( long databaseId ) { - super( "There is no database with the id " + databaseId ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyException.java deleted file mode 100644 index 8ab899c114..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyException.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownForeignKeyException extends CatalogException { - - public UnknownForeignKeyException( String name ) { - super( "There is no foreign key with name: " + name ); - } - - - public UnknownForeignKeyException( long id ) { - super( "There is no foreign key with id: " + id ); - } - - public UnknownForeignKeyException( long tableId, String name ) { - super( "There is no foreign key on table: " + tableId + " with name: " + name ); - } -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyOptionException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyOptionException.java deleted file mode 100644 index f58c9b38cb..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyOptionException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownForeignKeyOptionException extends CatalogException { - - public UnknownForeignKeyOptionException( String name ) { - super( "There is no Foreign Key Option with name: " + name ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyOptionRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyOptionRuntimeException.java deleted file mode 100644 index 2768deeb3e..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownForeignKeyOptionRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021‚ The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownForeignKeyOptionRuntimeException extends CatalogRuntimeException { - - public UnknownForeignKeyOptionRuntimeException( int id ) { - super( "There is no Foreign Key Option with id: " + id ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownGraphPlacementsException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownGraphPlacementsException.java deleted file mode 100644 index 2ce7570c83..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownGraphPlacementsException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - -public class UnknownGraphPlacementsException extends CatalogRuntimeException { - - public UnknownGraphPlacementsException( long graphId, int adapterId ) { - super( String.format( "Placement of graph with id %s does not exist on adapter with id %s", graphId, adapterId ) ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexException.java deleted file mode 100644 index 8f67c8378e..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexException.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019-2020 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownIndexException extends CatalogException { - - - public UnknownIndexException( String indexName ) { - super( "There is no index with this name: '" + indexName + "'." ); - } - - - public UnknownIndexException( long tableId, String indexName ) { - super( "Unknown index on table: " + tableId + " with name: " + indexName ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexIdRuntimeException.java deleted file mode 100644 index cad3ed5610..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexIdRuntimeException.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - -public class UnknownIndexIdRuntimeException extends CatalogRuntimeException { - - public UnknownIndexIdRuntimeException( long indexId ) { - super( "Unknown index id: " + indexId + ". There is no index with this id." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexTypeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexTypeException.java deleted file mode 100644 index 2e15f3551b..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexTypeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownIndexTypeException extends CatalogException { - - public UnknownIndexTypeException( String name ) { - super( "There is no index type with this name: '" + name + "'." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexTypeRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexTypeRuntimeException.java deleted file mode 100644 index e9e5054fb3..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownIndexTypeRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownIndexTypeRuntimeException extends CatalogRuntimeException { - - public UnknownIndexTypeRuntimeException( long id ) { - super( "Unknown index type id: " + id + ". There is no index type with this id." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownKeyException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownKeyException.java deleted file mode 100644 index 241df2751d..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownKeyException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownKeyException extends CatalogException { - - public UnknownKeyException( String keyName ) { - super( "There is no key with this name: '" + keyName + "'." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownKeyIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownKeyIdRuntimeException.java deleted file mode 100644 index 0ac66d2e6d..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownKeyIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownKeyIdRuntimeException extends CatalogRuntimeException { - - public UnknownKeyIdRuntimeException( long keyId ) { - super( "Unknown key id: " + keyId + ". There is no key with this id." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionGroupIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionGroupIdRuntimeException.java deleted file mode 100644 index fd2d42dd36..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionGroupIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownPartitionGroupIdRuntimeException extends CatalogRuntimeException { - - public UnknownPartitionGroupIdRuntimeException( long partitionId ) { - super( "There is no partition with id '" + partitionId + "'." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionPlacementException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionPlacementException.java deleted file mode 100644 index 561ef8673b..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionPlacementException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownPartitionPlacementException extends CatalogRuntimeException { - - public UnknownPartitionPlacementException( long adapterId, long partitionId ) { - super( "There is no partition placement for partition id '" + partitionId + "' on adapter with id '" + adapterId + "'" ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionTypeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionTypeException.java deleted file mode 100644 index 79c775b3e1..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionTypeException.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownPartitionTypeException extends CatalogException { - - public UnknownPartitionTypeException( final String name ) { - super( "There is no PartitionType with name: '" + name + "'" ); - } - -} - diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionTypeRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionTypeRuntimeException.java deleted file mode 100644 index c17d0b6311..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPartitionTypeRuntimeException.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownPartitionTypeRuntimeException extends CatalogRuntimeException { - - public UnknownPartitionTypeRuntimeException( final int id ) { - super( "There is no PartitionType with id: " + id ); - } - -} - diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementRoleException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementRoleException.java deleted file mode 100644 index f2a3c628cb..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementRoleException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownPlacementRoleException extends CatalogException { - - public UnknownPlacementRoleException( final String name ) { - super( "There is no PlacementRole with name: '" + name + "'" ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementRoleRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementRoleRuntimeException.java deleted file mode 100644 index 964ba0a7d6..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementRoleRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownPlacementRoleRuntimeException extends CatalogRuntimeException { - - public UnknownPlacementRoleRuntimeException( final int id ) { - super( "There is no PlacementRole with id: " + id ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementTypeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementTypeException.java deleted file mode 100644 index ecd7bb6ca6..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementTypeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownPlacementTypeException extends CatalogException { - - public UnknownPlacementTypeException( String name ) { - super( "There is no Foreign Key Option with name: " + name ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementTypeRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementTypeRuntimeException.java deleted file mode 100644 index b34e43f290..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownPlacementTypeRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownPlacementTypeRuntimeException extends CatalogRuntimeException { - - public UnknownPlacementTypeRuntimeException( int id ) { - super( "There is no Foreign Key Option with id: " + id ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownQueryInterfaceException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownQueryInterfaceException.java deleted file mode 100644 index df1ee6008a..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownQueryInterfaceException.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -import lombok.Getter; - -public class UnknownQueryInterfaceException extends CatalogException { - - @Getter - private final String ifaceName; - - - public UnknownQueryInterfaceException( String ifaceName ) { - super( "There is no query interface with name " + ifaceName ); - this.ifaceName = ifaceName; - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownQueryInterfaceRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownQueryInterfaceRuntimeException.java deleted file mode 100644 index 8bd99b4bb7..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownQueryInterfaceRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownQueryInterfaceRuntimeException extends CatalogRuntimeException { - - public UnknownQueryInterfaceRuntimeException( int ifaceId ) { - super( "There is no query interface with the id " + ifaceId ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaException.java deleted file mode 100644 index 77469c7cd9..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaException.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - -import lombok.Getter; - - -public class UnknownSchemaException extends CatalogException { - - @Getter - private final String schemaName; - - - public UnknownSchemaException( String schemaName ) { - super( "There is no schema with name '" + schemaName + "'" ); - this.schemaName = schemaName; - } - - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaIdRuntimeException.java deleted file mode 100644 index 5aa2b34133..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownSchemaIdRuntimeException extends CatalogRuntimeException { - - public UnknownSchemaIdRuntimeException( long schemaId ) { - super( "There is no schema with the id '" + schemaId + "'" ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaTypeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaTypeException.java deleted file mode 100644 index 8775ebda10..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaTypeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownSchemaTypeException extends CatalogException { - - public UnknownSchemaTypeException( final String name ) { - super( "There is no NamespaceType with name: " + name ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaTypeRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaTypeRuntimeException.java deleted file mode 100644 index b0ff09c4b0..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownSchemaTypeRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownSchemaTypeRuntimeException extends CatalogRuntimeException { - - public UnknownSchemaTypeRuntimeException( final int id ) { - super( "There is no NamespaceType with id: " + id ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableException.java deleted file mode 100644 index 146ec5fef0..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableException.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -import lombok.Getter; - -public class UnknownTableException extends CatalogException { - - @Getter - private final String tableName; - - - public UnknownTableException( String schemaName, String tableName ) { - super( "There is no table with name '" + tableName + "' in schema '" + schemaName + "'." ); - this.tableName = tableName; - } - - - public UnknownTableException( long schemaId, String tableName ) { - super( "There is no table with name '" + tableName + "' in the schema with the id '" + schemaId + "'." ); - this.tableName = tableName; - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableIdRuntimeException.java deleted file mode 100644 index 7e9aeb6aa6..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownTableIdRuntimeException extends CatalogRuntimeException { - - public UnknownTableIdRuntimeException( long tableId ) { - super( "There is no table with id '" + tableId + "'." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableTypeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableTypeException.java deleted file mode 100644 index d9fff70475..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableTypeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownTableTypeException extends CatalogException { - - public UnknownTableTypeException( final String name ) { - super( "There is no EntityType with name: " + name ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableTypeRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableTypeRuntimeException.java deleted file mode 100644 index 393fd92bde..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownTableTypeRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownTableTypeRuntimeException extends CatalogRuntimeException { - - public UnknownTableTypeRuntimeException( final int id ) { - super( "There is no EntityType with id: " + id ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownUserException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownUserException.java deleted file mode 100644 index fa5358661a..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownUserException.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -import lombok.Getter; - -public class UnknownUserException extends CatalogException { - - @Getter - private final String userName; - - - public UnknownUserException( String userName ) { - super( "There is no user with the username '" + userName + "'." ); - this.userName = userName; - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownUserIdRuntimeException.java b/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownUserIdRuntimeException.java deleted file mode 100644 index 420dd2750e..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/exceptions/UnknownUserIdRuntimeException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2021 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.exceptions; - - -public class UnknownUserIdRuntimeException extends CatalogRuntimeException { - - public UnknownUserIdRuntimeException( int userId ) { - super( "There is no user with the id '" + userId + "'." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/Collation.java b/core/src/main/java/org/polypheny/db/catalog/logistic/Collation.java index 50cafcf52d..425df0b205 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/Collation.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/Collation.java @@ -17,8 +17,6 @@ package org.polypheny.db.catalog.logistic; import lombok.NonNull; -import org.polypheny.db.catalog.exceptions.UnknownCollationException; -import org.polypheny.db.catalog.exceptions.UnknownCollationIdRuntimeException; import org.polypheny.db.config.RuntimeConfig; public enum Collation { @@ -44,17 +42,17 @@ public static Collation getById( int id ) { return c; } } - throw new UnknownCollationIdRuntimeException( id ); + throw new RuntimeException( "Unknown Collation with id: " + id ); } - public static Collation parse( @NonNull String str ) throws UnknownCollationException { + public static Collation parse( @NonNull String str ) { if ( str.equalsIgnoreCase( "CASE SENSITIVE" ) ) { return Collation.CASE_SENSITIVE; } else if ( str.equalsIgnoreCase( "CASE INSENSITIVE" ) ) { return Collation.CASE_INSENSITIVE; } - throw new UnknownCollationException( str ); + throw new RuntimeException( "Unknown Collation with name: " + str ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/ConstraintType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/ConstraintType.java index fab48f8147..aa66293bb7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/ConstraintType.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/ConstraintType.java @@ -17,8 +17,6 @@ package org.polypheny.db.catalog.logistic; import lombok.NonNull; -import org.polypheny.db.catalog.exceptions.UnknownConstraintTypeException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintTypeRuntimeException; public enum ConstraintType { UNIQUE( 1 ), @@ -43,14 +41,14 @@ public static ConstraintType getById( int id ) { return e; } } - throw new UnknownConstraintTypeRuntimeException( id ); + throw new RuntimeException( "Unknown ConstraintType with id: " + id ); } - public static ConstraintType parse( @NonNull String str ) throws UnknownConstraintTypeException { + public static ConstraintType parse( @NonNull String str ) { if ( str.equalsIgnoreCase( "UNIQUE" ) ) { return ConstraintType.UNIQUE; } - throw new UnknownConstraintTypeException( str ); + throw new RuntimeException( "Unknown ConstraintType with name: " + str ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/DataPlacementRole.java b/core/src/main/java/org/polypheny/db/catalog/logistic/DataPlacementRole.java index 5a3e1ce675..ed0e6e5698 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/DataPlacementRole.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/DataPlacementRole.java @@ -16,9 +16,6 @@ package org.polypheny.db.catalog.logistic; -import org.polypheny.db.catalog.exceptions.UnknownPlacementRoleException; -import org.polypheny.db.catalog.exceptions.UnknownPlacementRoleRuntimeException; - public enum DataPlacementRole { UPTODATE( 0 ), REFRESHABLE( 1 ); @@ -42,17 +39,17 @@ public static DataPlacementRole getById( final int id ) { return t; } } - throw new UnknownPlacementRoleRuntimeException( id ); + throw new RuntimeException( "Unknown DataPlacementRole with id: " + id ); } - public static DataPlacementRole getByName( final String name ) throws UnknownPlacementRoleException { + public static DataPlacementRole getByName( final String name ) { for ( DataPlacementRole t : values() ) { if ( t.name().equalsIgnoreCase( name ) ) { return t; } } - throw new UnknownPlacementRoleException( name ); + throw new RuntimeException( "Unknown PartitionType with name: " + name ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/EntityType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/EntityType.java index 4273d95793..55cf8b9cce 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/EntityType.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/EntityType.java @@ -17,8 +17,6 @@ package org.polypheny.db.catalog.logistic; import lombok.RequiredArgsConstructor; -import org.polypheny.db.catalog.exceptions.UnknownTableTypeException; -import org.polypheny.db.catalog.exceptions.UnknownTableTypeRuntimeException; public enum EntityType { ENTITY( 1 ), @@ -46,17 +44,17 @@ public static EntityType getById( final int id ) { return t; } } - throw new UnknownTableTypeRuntimeException( id ); + throw new RuntimeException( "Unknown EntityType with id: " + id ); } - public static EntityType getByName( final String name ) throws UnknownTableTypeException { + public static EntityType getByName( final String name ) { for ( EntityType t : values() ) { if ( t.name().equalsIgnoreCase( name ) ) { return t; } } - throw new UnknownTableTypeException( name ); + throw new RuntimeException( "Unknown EntityType with name: " + name ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/ForeignKeyOption.java b/core/src/main/java/org/polypheny/db/catalog/logistic/ForeignKeyOption.java index eab7c4be4a..598273733f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/ForeignKeyOption.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/ForeignKeyOption.java @@ -17,8 +17,6 @@ package org.polypheny.db.catalog.logistic; import lombok.NonNull; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyOptionException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyOptionRuntimeException; public enum ForeignKeyOption { NONE( -1 ), @@ -47,11 +45,11 @@ public static ForeignKeyOption getById( int id ) { return e; } } - throw new UnknownForeignKeyOptionRuntimeException( id ); + throw new RuntimeException( "Unknown ForeignKeyOption with id: " + id ); } - public static ForeignKeyOption parse( @NonNull String str ) throws UnknownForeignKeyOptionException { + public static ForeignKeyOption parse( @NonNull String str ) { if ( str.equalsIgnoreCase( "NONE" ) ) { return ForeignKeyOption.NONE; } else if ( str.equalsIgnoreCase( "RESTRICT" ) ) { @@ -63,6 +61,6 @@ public static ForeignKeyOption parse( @NonNull String str ) throws UnknownForeig } else if ( str.equalsIgnoreCase( "SET DEFAULT" ) ) { return ForeignKeyOption.SET_DEFAULT; }*/ - throw new UnknownForeignKeyOptionException( str ); + throw new RuntimeException( "Unknown ForeignKeyOption with name: " + str ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/IndexType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/IndexType.java index d54f952467..e9973ca912 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/IndexType.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/IndexType.java @@ -17,8 +17,6 @@ package org.polypheny.db.catalog.logistic; import lombok.NonNull; -import org.polypheny.db.catalog.exceptions.UnknownIndexTypeException; -import org.polypheny.db.catalog.exceptions.UnknownIndexTypeRuntimeException; public enum IndexType { MANUAL( 1 ), @@ -43,16 +41,16 @@ public static IndexType getById( int id ) { return e; } } - throw new UnknownIndexTypeRuntimeException( id ); + throw new RuntimeException( "Unknown indexType with id: " + id ); } - public static IndexType parse( @NonNull String str ) throws UnknownIndexTypeException { + public static IndexType parse( @NonNull String str ) { if ( str.equalsIgnoreCase( "MANUAL" ) ) { return IndexType.MANUAL; } else if ( str.equalsIgnoreCase( "AUTOMATIC" ) ) { return IndexType.AUTOMATIC; } - throw new UnknownIndexTypeException( str ); + throw new RuntimeException( "Unknown indexType with name: " + str ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/NamespaceType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/NamespaceType.java index 6c4500bb1d..1d2cfd17ef 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/NamespaceType.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/NamespaceType.java @@ -17,8 +17,6 @@ package org.polypheny.db.catalog.logistic; import com.google.gson.annotations.SerializedName; -import org.polypheny.db.catalog.exceptions.UnknownSchemaTypeException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaTypeRuntimeException; import org.polypheny.db.plan.AlgTrait; import org.polypheny.db.schema.ModelTrait; @@ -51,23 +49,23 @@ public static NamespaceType getDefault() { } - public static NamespaceType getById( final int id ) throws UnknownSchemaTypeException { + public static NamespaceType getById( final int id ) { for ( NamespaceType t : values() ) { if ( t.id == id ) { return t; } } - throw new UnknownSchemaTypeRuntimeException( id ); + throw new RuntimeException( "Unknown NamespaceType with id: " + id ); } - public static NamespaceType getByName( final String name ) throws UnknownSchemaTypeException { + public static NamespaceType getByName( final String name ) { for ( NamespaceType t : values() ) { if ( t.name().equalsIgnoreCase( name ) ) { return t; } } - throw new UnknownSchemaTypeException( name ); + throw new RuntimeException( "Unknown NamespaceType with name: " + name ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/PartitionType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/PartitionType.java index d49c676e7a..d6618d8224 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/PartitionType.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/PartitionType.java @@ -16,9 +16,6 @@ package org.polypheny.db.catalog.logistic; -import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeRuntimeException; - public enum PartitionType { NONE( 0 ), RANGE( 1 ), @@ -46,17 +43,17 @@ public static PartitionType getById( final int id ) { return t; } } - throw new UnknownPartitionTypeRuntimeException( id ); + throw new RuntimeException( "Unknown PartitionType with id: " + id ); } - public static PartitionType getByName( final String name ) throws UnknownPartitionTypeException { + public static PartitionType getByName( final String name ) { for ( PartitionType t : values() ) { if ( t.name().equalsIgnoreCase( name ) ) { return t; } } - throw new UnknownPartitionTypeException( name ); + throw new RuntimeException( "Unknown PartitionType with name: " + name ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/logistic/PlacementType.java b/core/src/main/java/org/polypheny/db/catalog/logistic/PlacementType.java index 014a0bce0f..a920c90513 100644 --- a/core/src/main/java/org/polypheny/db/catalog/logistic/PlacementType.java +++ b/core/src/main/java/org/polypheny/db/catalog/logistic/PlacementType.java @@ -17,8 +17,6 @@ package org.polypheny.db.catalog.logistic; import lombok.NonNull; -import org.polypheny.db.catalog.exceptions.UnknownPlacementTypeException; -import org.polypheny.db.catalog.exceptions.UnknownPlacementTypeRuntimeException; public enum PlacementType { MANUAL( 1 ), @@ -44,17 +42,17 @@ public static PlacementType getById( int id ) { return e; } } - throw new UnknownPlacementTypeRuntimeException( id ); + throw new RuntimeException( "Unknown PlacementType with id: " + id ); } - public static PlacementType parse( @NonNull String str ) throws UnknownPlacementTypeException { + public static PlacementType parse( @NonNull String str ) { if ( str.equalsIgnoreCase( "MANUAL" ) ) { return PlacementType.MANUAL; } else if ( str.equalsIgnoreCase( "AUTOMATIC" ) ) { return PlacementType.AUTOMATIC; } - throw new UnknownPlacementTypeException( str ); + throw new RuntimeException( "Unknown PlacementType with name: " + str ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 89b9678242..24195ceaaf 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -26,12 +26,6 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.Pattern; public interface LogicalRelSnapshot { @@ -57,7 +51,6 @@ public interface LogicalRelSnapshot { * * @param tableName The name of the table * @return The table - * @throws UnknownTableException If there is no table with this name in the specified database and schema. */ LogicalTable getTable( long namespaceId, String tableName ); @@ -113,7 +106,6 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table * @param columnName The name of the column * @return A CatalogColumn - * @throws UnknownColumnException If there is no column with this name in the specified table of the database and schema. */ LogicalColumn getColumn( long tableId, String columnName ); @@ -124,7 +116,7 @@ public interface LogicalRelSnapshot { * @param columnName The name of the column * @return A CatalogColumn */ - LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException; + LogicalColumn getColumn( String tableName, String columnName ); /** * Checks if there is a column with the specified name in the specified table. @@ -215,7 +207,7 @@ public interface LogicalRelSnapshot { * @param constraintName The name of the constraint * @return The constraint */ - CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException; + CatalogConstraint getConstraint( long tableId, String constraintName ); /** * Return the foreign key with the specified name from the specified table @@ -224,7 +216,7 @@ public interface LogicalRelSnapshot { * @param foreignKeyName The name of the foreign key * @return The foreign key */ - CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException; + CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ); List getIndexes(); @@ -260,7 +252,7 @@ public interface LogicalRelSnapshot { * @param indexName The name of the index * @return The Index */ - CatalogIndex getIndex( long tableId, String indexName ) throws UnknownIndexException; + CatalogIndex getIndex( long tableId, String indexName ); /** * Checks if there is an index with the specified name in the specified table. diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index dd9f83347f..7efcec4377 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -32,8 +32,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.nodes.Identifier; @@ -92,9 +90,8 @@ default Expression getSnapshotExpression( long id ) { * * @param name The name of the user * @return The user - * @throws UnknownUserException If there is no user with the specified name */ - CatalogUser getUser( String name ) throws UnknownUserException; + CatalogUser getUser( String name ); /** * Get the user with the specified id. @@ -147,7 +144,7 @@ default Expression getSnapshotExpression( long id ) { * @param uniqueName The unique name of the query interface * @return The CatalogQueryInterface */ - CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException; + CatalogQueryInterface getQueryInterface( String uniqueName ); /** * Get a query interface by its id @@ -191,4 +188,5 @@ default List getOperatorList() { AllocSnapshot alloc(); PhysicalSnapshot physical(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index b8eb0df56b..d03cec8831 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -34,11 +34,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.util.Pair; @@ -228,7 +223,7 @@ public LogicalColumn getColumn( long tableId, String columnName ) { @Override - public LogicalColumn getColumn( String tableName, String columnName ) throws UnknownColumnException, UnknownSchemaException, UnknownTableException { + public LogicalColumn getColumn( String tableName, String columnName ) { return tableIdColumnNameColumn.get( Pair.of( tableName, columnName ) ); } @@ -295,13 +290,13 @@ public List getConstraints( CatalogKey key ) { @Override - public CatalogConstraint getConstraint( long tableId, String constraintName ) throws UnknownConstraintException { + public CatalogConstraint getConstraint( long tableId, String constraintName ) { return tableConstraints.get( tableId ).stream().filter( c -> c.name.equals( constraintName ) ).findFirst().orElse( null ); } @Override - public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) throws UnknownForeignKeyException { + public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) { return tableForeignKeys.get( tableId ).stream().filter( e -> e.name.equals( foreignKeyName ) ).findFirst().orElse( null ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index b98af6a6c1..d001210029 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -33,8 +33,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; @@ -120,7 +118,7 @@ public boolean checkIfExistsNamespace( String name ) { @Override - public CatalogUser getUser( String name ) throws UnknownUserException { + public CatalogUser getUser( String name ) { return userNames.get( name ); } @@ -162,7 +160,7 @@ public List getQueryInterfaces() { @Override - public CatalogQueryInterface getQueryInterface( String uniqueName ) throws UnknownQueryInterfaceException { + public CatalogQueryInterface getQueryInterface( String uniqueName ) { return interfaceNames.get( uniqueName ); } diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index b2e13fee53..497caab9cf 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -29,36 +29,11 @@ import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.NamespaceAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownKeyException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.PlacementType; -import org.polypheny.db.ddl.exception.AlterSourceException; -import org.polypheny.db.ddl.exception.ColumnNotExistsException; -import org.polypheny.db.ddl.exception.DdlOnSourceException; -import org.polypheny.db.ddl.exception.IndexExistsException; -import org.polypheny.db.ddl.exception.IndexPreventsRemovalException; -import org.polypheny.db.ddl.exception.LastPlacementException; -import org.polypheny.db.ddl.exception.MissingColumnPlacementException; -import org.polypheny.db.ddl.exception.NotNullAndDefaultValueException; -import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; -import org.polypheny.db.ddl.exception.PlacementAlreadyExistsException; -import org.polypheny.db.ddl.exception.PlacementIsPrimaryException; -import org.polypheny.db.ddl.exception.PlacementNotExistsException; -import org.polypheny.db.ddl.exception.SchemaNotExistException; -import org.polypheny.db.ddl.exception.UnknownIndexMethodException; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.DataTypeSpec; import org.polypheny.db.nodes.Identifier; @@ -115,7 +90,7 @@ public static DdlManager getInstance() { * @param ifNotExists whether to silently ignore if the schema does already exist * @param replace whether the replace a existing schema */ - public abstract long createNamespace( String name, NamespaceType type, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException; + public abstract long createNamespace( String name, NamespaceType type, boolean ifNotExists, boolean replace ); /** * Adds a new adapter (data store or data source) @@ -133,7 +108,7 @@ public static DdlManager getInstance() { * @param name name of the adapter to be dropped * @param statement the query statement */ - public abstract void dropAdapter( String name, Statement statement ) throws UnknownAdapterException; + public abstract void dropAdapter( String name, Statement statement ); /** * Change the name of a schema @@ -141,7 +116,7 @@ public static DdlManager getInstance() { * @param newName the new name for the schema * @param oldName the old name current name of the schema */ - public abstract void renameSchema( String newName, String oldName ) throws NamespaceAlreadyExistsException, UnknownSchemaException; + public abstract void renameNamespace( String newName, String oldName ); /** * Adds a column to an existing source table @@ -153,7 +128,7 @@ public static DdlManager getInstance() { * @param afterColumnName the name of the column after the column, which is inserted; can be null * @param defaultValue the default value of the inserted column */ - public abstract void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException; + public abstract void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ); /** * Add a column to an existing table @@ -167,7 +142,7 @@ public static DdlManager getInstance() { * @param defaultValue a default value for the column; can be null * @param statement the query statement */ - public abstract void addColumn( String columnName, LogicalTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws NotNullAndDefaultValueException, ColumnAlreadyExistsException, ColumnNotExistsException; + public abstract void addColumn( String columnName, LogicalTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ); /** * Add a foreign key to a table @@ -180,7 +155,7 @@ public static DdlManager getInstance() { * @param onUpdate how to enforce the constraint on updated * @param onDelete how to enforce the constraint on delete */ - public abstract void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException; + public abstract void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ); /** * Adds an index to a table @@ -193,7 +168,7 @@ public static DdlManager getInstance() { * @param location instance of the data store on which to create the index; if null, default strategy is being used * @param statement the initial query statement */ - public abstract void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; + public abstract void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws TransactionException; /** * Adds an index located in Polypheny to a table @@ -205,7 +180,7 @@ public static DdlManager getInstance() { * @param isUnique whether the index is unique * @param statement the initial query statement */ - public abstract void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException; + public abstract void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws TransactionException; /** * Adds new column placements to a table @@ -217,7 +192,7 @@ public static DdlManager getInstance() { * @param dataStore the data store on which to create the placement * @param statement the query statement */ - public abstract void addDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) throws PlacementAlreadyExistsException; + public abstract void addDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ); /** * Adds a new primary key to a table @@ -226,7 +201,7 @@ public static DdlManager getInstance() { * @param columnNames the names of all columns in the primary key * @param statement the query statement */ - public abstract void addPrimaryKey( LogicalTable catalogTable, List columnNames, Statement statement ) throws DdlOnSourceException; + public abstract void addPrimaryKey( LogicalTable catalogTable, List columnNames, Statement statement ); /** * Adds a unique constraint to a table @@ -235,7 +210,7 @@ public static DdlManager getInstance() { * @param columnNames the names of the columns which are part of the constraint * @param constraintName the name of the unique constraint */ - public abstract void addUniqueConstraint( LogicalTable catalogTable, List columnNames, String constraintName ) throws DdlOnSourceException; + public abstract void addUniqueConstraint( LogicalTable catalogTable, List columnNames, String constraintName ); /** * Drop a specific column in a table @@ -244,7 +219,7 @@ public static DdlManager getInstance() { * @param columnName the name of column which is dropped * @param statement the query statement */ - public abstract void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException; + public abstract void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ); /** * Drop a specific constraint from a table @@ -252,7 +227,7 @@ public static DdlManager getInstance() { * @param catalogTable the table * @param constraintName the name of the constraint to be dropped */ - public abstract void dropConstraint( LogicalTable catalogTable, String constraintName ) throws DdlOnSourceException; + public abstract void dropConstraint( LogicalTable catalogTable, String constraintName ); /** * Drop a foreign key of a table @@ -260,7 +235,7 @@ public static DdlManager getInstance() { * @param catalogTable the table the foreign key belongs to * @param foreignKeyName the name of the foreign key to drop */ - public abstract void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) throws DdlOnSourceException; + public abstract void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ); /** * Drop an indexes @@ -269,7 +244,7 @@ public static DdlManager getInstance() { * @param indexName the name of the index to drop * @param statement the query statement */ - public abstract void dropIndex( LogicalTable catalogTable, String indexName, Statement statement ) throws DdlOnSourceException; + public abstract void dropIndex( LogicalTable catalogTable, String indexName, Statement statement ); /** * Drop the data placement of a table on a specified data store @@ -278,14 +253,14 @@ public static DdlManager getInstance() { * @param storeInstance the data store from which to drop the placement * @param statement the query statement */ - public abstract void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, LastPlacementException; + public abstract void dropTableAllocation( LogicalTable catalogTable, DataStore storeInstance, Statement statement ); /** * Drop the primary key of a table * * @param catalogTable the table */ - public abstract void dropPrimaryKey( LogicalTable catalogTable ) throws DdlOnSourceException; + public abstract void dropPrimaryKey( LogicalTable catalogTable ); /** * Set the type of the column @@ -295,7 +270,7 @@ public static DdlManager getInstance() { * @param typeInformation the new type of the column * @param statement the used statement */ - public abstract void setColumnType( LogicalTable catalogTable, String columnName, ColumnTypeInformation typeInformation, Statement statement ) throws DdlOnSourceException, ColumnNotExistsException, GenericCatalogException; + public abstract void setColumnType( LogicalTable catalogTable, String columnName, ColumnTypeInformation typeInformation, Statement statement ); /** * Set if the column can hold the value NULL or not @@ -305,7 +280,7 @@ public static DdlManager getInstance() { * @param nullable if the column should be nullable * @param statement the used statement */ - public abstract void setColumnNullable( LogicalTable catalogTable, String columnName, boolean nullable, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException, GenericCatalogException; + public abstract void setColumnNullable( LogicalTable catalogTable, String columnName, boolean nullable, Statement statement ); /** * Changes the position of the column and places it before or after the provided columns @@ -316,7 +291,7 @@ public static DdlManager getInstance() { * @param afterColumnName change position of the column and place it after this column; nullable * @param statement the used statement */ - public abstract void setColumnPosition( LogicalTable catalogTable, String columnName, String beforeColumnName, String afterColumnName, Statement statement ) throws ColumnNotExistsException; + public abstract void setColumnPosition( LogicalTable catalogTable, String columnName, String beforeColumnName, String afterColumnName, Statement statement ); /** * Set the collation to the column @@ -326,7 +301,7 @@ public static DdlManager getInstance() { * @param collation the new collation of the column * @param statement the used statement */ - public abstract void setColumnCollation( LogicalTable catalogTable, String columnName, Collation collation, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException; + public abstract void setColumnCollation( LogicalTable catalogTable, String columnName, Collation collation, Statement statement ); /** * Set the default value of the column @@ -336,7 +311,7 @@ public static DdlManager getInstance() { * @param defaultValue the new default value of the column * @param statement the used statement */ - public abstract void setDefaultValue( LogicalTable catalogTable, String columnName, String defaultValue, Statement statement ) throws ColumnNotExistsException; + public abstract void setDefaultValue( LogicalTable catalogTable, String columnName, String defaultValue, Statement statement ); /** * Drop the default value of the column @@ -345,7 +320,7 @@ public static DdlManager getInstance() { * @param columnName the name of the column to be modified * @param statement the used statement */ - public abstract void dropDefaultValue( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException; + public abstract void dropDefaultValue( LogicalTable catalogTable, String columnName, Statement statement ); /** * Modify the placement of a table on a specified data store. This method compares the specified list of column ids with @@ -362,7 +337,7 @@ public static DdlManager getInstance() { * @param storeInstance the data store * @param statement the used statement */ - public abstract void modifyDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException; + public abstract void modifyDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ); /** * Modified the partition distribution on the selected store. Can be used to add or remove partitions on a store. @@ -373,7 +348,7 @@ public static DdlManager getInstance() { * @param storeInstance the data store on which the partition placements should be altered * @param statement the used statement */ - public abstract void modifyPartitionPlacement( LogicalTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ) throws LastPlacementException; + public abstract void modifyPartitionPlacement( LogicalTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ); /** * Add a column placement for a specified column on a specified data store. If the store already contains a placement of @@ -384,7 +359,7 @@ public static DdlManager getInstance() { * @param storeInstance the data store on which the column should be placed * @param statement the used statement */ - public abstract void addColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, PlacementAlreadyExistsException, ColumnNotExistsException; + public abstract void addColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ); /** * Drop a specified column from a specified data store. If the column is part of the primary key, the column placement typ @@ -395,7 +370,7 @@ public static DdlManager getInstance() { * @param storeInstance the data store from which to remove the placement * @param statement the used statement */ - public abstract void dropColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException, PlacementIsPrimaryException, ColumnNotExistsException; + public abstract void dropColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ); /** * Change the owner of a table @@ -403,7 +378,7 @@ public static DdlManager getInstance() { * @param catalogTable the table * @param newOwnerName the name of the new owner */ - public abstract void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) throws UnknownUserException; + public abstract void alterTableOwner( LogicalTable catalogTable, String newOwnerName ); /** * Rename a table (changing the logical name of the table) @@ -412,7 +387,7 @@ public static DdlManager getInstance() { * @param newTableName the new name for the table * @param statement the used statement */ - public abstract void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException; + public abstract void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ); /** * Rename a column of a table (changing the logical name of the column) @@ -422,7 +397,7 @@ public static DdlManager getInstance() { * @param newColumnName the new name for the column * @param statement the used statement */ - public abstract void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException; + public abstract void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ); public abstract void removeGraph( long graphId, boolean ifExists, Statement statement ); @@ -438,7 +413,7 @@ public static DdlManager getInstance() { * @param placementType which placement type should be used for the initial placements * @param statement the used statement */ - public abstract void createTable( long namespaceId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; + public abstract void createTable( long namespaceId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ); /** * Create a new view @@ -448,7 +423,7 @@ public static DdlManager getInstance() { * @param algNode the algNode which was built form the Select part of the view * @param statement the used Statement */ - public abstract void createView( String viewName, long namespaceId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) throws EntityAlreadyExistsException, GenericCatalogException, UnknownColumnException; + public abstract void createView( String viewName, long namespaceId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ); /** @@ -459,9 +434,9 @@ public static DdlManager getInstance() { * @param algRoot the relNode which was built form the Select part of the view * @param statement the used Statement */ - public abstract void createMaterializedView( String viewName, long namespaceId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ) throws EntityAlreadyExistsException, GenericCatalogException, UnknownColumnException, ColumnNotExistsException, ColumnAlreadyExistsException; + public abstract void createMaterializedView( String viewName, long namespaceId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ); - public abstract void createCollection( long namespaceId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException; + public abstract void createCollection( long namespaceId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ); public abstract void addCollectionPlacement( long namespaceId, String name, List stores, Statement statement ); @@ -470,7 +445,7 @@ public static DdlManager getInstance() { * * @param partitionInfo the information concerning the partition */ - public abstract void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException; + public abstract void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws TransactionException; /** * Removes partitioning from Table @@ -478,7 +453,7 @@ public static DdlManager getInstance() { * @param catalogTable teh table to be merged * @param statement the used Statement */ - public abstract void removePartitioning( LogicalTable catalogTable, Statement statement ) throws GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException; + public abstract void removePartitioning( LogicalTable catalogTable, Statement statement ) throws TransactionException; /** * Adds a new constraint to a table @@ -489,7 +464,7 @@ public static DdlManager getInstance() { * @param columnIds the ids of the columns for which to create the constraint * @param tableId the id of the table */ - public abstract void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnIds, long tableId ) throws UnknownColumnException, GenericCatalogException; + public abstract void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnIds, long tableId ); /** * Drop a schema @@ -498,7 +473,7 @@ public static DdlManager getInstance() { * @param ifExists whether to silently ignore if the schema does not exist * @param statement the used statement */ - public abstract void dropNamespace( String schemaName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException; + public abstract void dropNamespace( String schemaName, boolean ifExists, Statement statement ); /** * Drop a table @@ -506,19 +481,19 @@ public static DdlManager getInstance() { * @param catalogTable the table to be dropped * @param statement the used statement */ - public abstract void dropTable( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException; + public abstract void dropTable( LogicalTable catalogTable, Statement statement ); /** * Drop View */ - public abstract void dropView( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException; + public abstract void dropView( LogicalTable catalogTable, Statement statement ); /** * @param materializedView to be dropped * @param statement the used statement */ - public abstract void dropMaterializedView( LogicalTable materializedView, Statement statement ) throws DdlOnSourceException; + public abstract void dropMaterializedView( LogicalTable materializedView, Statement statement ); /** * Truncate a table diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/AlterSourceException.java b/core/src/main/java/org/polypheny/db/ddl/exception/AlterSourceException.java deleted file mode 100644 index 0ca8b8539f..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/AlterSourceException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class AlterSourceException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/ColumnNotExistsException.java b/core/src/main/java/org/polypheny/db/ddl/exception/ColumnNotExistsException.java deleted file mode 100644 index 6625409480..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/ColumnNotExistsException.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -import org.apache.commons.lang.NotImplementedException; -import org.polypheny.db.catalog.Catalog; - -public class ColumnNotExistsException extends Exception { - - public final String tableName; - public final String columnName; - - - public ColumnNotExistsException( String tableName, String columnName ) { - this.tableName = tableName; - this.columnName = columnName; - } - - - public ColumnNotExistsException( long tableId, String columnName ) { - //this.tableName = Catalog.getInstance().getTable( tableId ).name; - this.columnName = columnName; - throw new NotImplementedException(); - } - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/DdlOnSourceException.java b/core/src/main/java/org/polypheny/db/ddl/exception/DdlOnSourceException.java deleted file mode 100644 index 173d72b8e4..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/DdlOnSourceException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class DdlOnSourceException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/IndexExistsException.java b/core/src/main/java/org/polypheny/db/ddl/exception/IndexExistsException.java deleted file mode 100644 index 833462737c..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/IndexExistsException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class IndexExistsException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/IndexPreventsRemovalException.java b/core/src/main/java/org/polypheny/db/ddl/exception/IndexPreventsRemovalException.java deleted file mode 100644 index a15e6be8a6..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/IndexPreventsRemovalException.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -import lombok.Getter; - -public class IndexPreventsRemovalException extends Exception { - - @Getter - private final String columnName; - @Getter - private final String indexName; - - - public IndexPreventsRemovalException( String indexName, String columnName ) { - this.columnName = columnName; - this.indexName = indexName; - } - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/LastPlacementException.java b/core/src/main/java/org/polypheny/db/ddl/exception/LastPlacementException.java deleted file mode 100644 index 864ffe95b2..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/LastPlacementException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class LastPlacementException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/MissingColumnPlacementException.java b/core/src/main/java/org/polypheny/db/ddl/exception/MissingColumnPlacementException.java deleted file mode 100644 index 0cbaeb11c9..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/MissingColumnPlacementException.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -import lombok.Getter; - -public class MissingColumnPlacementException extends Exception { - - @Getter - private final String columnName; - - - public MissingColumnPlacementException( String columnName ) { - this.columnName = columnName; - } - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/NotMaterializedViewException.java b/core/src/main/java/org/polypheny/db/ddl/exception/NotMaterializedViewException.java deleted file mode 100644 index 3ce09a19f6..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/NotMaterializedViewException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - - -public class NotMaterializedViewException extends RuntimeException { - - public NotMaterializedViewException() { - super( "The provided entity is not a view." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/NotNullAndDefaultValueException.java b/core/src/main/java/org/polypheny/db/ddl/exception/NotNullAndDefaultValueException.java deleted file mode 100644 index 82e835db3b..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/NotNullAndDefaultValueException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class NotNullAndDefaultValueException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/NotViewException.java b/core/src/main/java/org/polypheny/db/ddl/exception/NotViewException.java deleted file mode 100644 index 1c0b3658e3..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/NotViewException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - - -public class NotViewException extends RuntimeException { - - public NotViewException() { - super( "The provided entity is not a view." ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/PartitionGroupNamesNotUniqueException.java b/core/src/main/java/org/polypheny/db/ddl/exception/PartitionGroupNamesNotUniqueException.java deleted file mode 100644 index 3563cf5f9a..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/PartitionGroupNamesNotUniqueException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class PartitionGroupNamesNotUniqueException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/PlacementAlreadyExistsException.java b/core/src/main/java/org/polypheny/db/ddl/exception/PlacementAlreadyExistsException.java deleted file mode 100644 index 33d810f20f..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/PlacementAlreadyExistsException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class PlacementAlreadyExistsException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/PlacementIsPrimaryException.java b/core/src/main/java/org/polypheny/db/ddl/exception/PlacementIsPrimaryException.java deleted file mode 100644 index 54609051dd..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/PlacementIsPrimaryException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class PlacementIsPrimaryException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/PlacementNotExistsException.java b/core/src/main/java/org/polypheny/db/ddl/exception/PlacementNotExistsException.java deleted file mode 100644 index 1c8e4eabe3..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/PlacementNotExistsException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class PlacementNotExistsException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/SchemaNotExistException.java b/core/src/main/java/org/polypheny/db/ddl/exception/SchemaNotExistException.java deleted file mode 100644 index d6e5f96595..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/SchemaNotExistException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class SchemaNotExistException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/ddl/exception/UnknownIndexMethodException.java b/core/src/main/java/org/polypheny/db/ddl/exception/UnknownIndexMethodException.java deleted file mode 100644 index 7825d25cb6..0000000000 --- a/core/src/main/java/org/polypheny/db/ddl/exception/UnknownIndexMethodException.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2019-2022 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.ddl.exception; - -public class UnknownIndexMethodException extends Exception { - -} diff --git a/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java b/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java index 09aa0bf1aa..8680084cc5 100644 --- a/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java +++ b/core/src/main/java/org/polypheny/db/iface/QueryInterfaceManager.java @@ -36,7 +36,6 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogQueryInterface; -import org.polypheny.db.catalog.exceptions.UnknownQueryInterfaceException; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.iface.QueryInterface.QueryInterfaceSetting; import org.polypheny.db.transaction.TransactionManager; @@ -201,7 +200,7 @@ public QueryInterface addQueryInterface( Catalog catalog, String clazzName, Stri } - public void removeQueryInterface( Catalog catalog, String uniqueName ) throws UnknownQueryInterfaceException { + public void removeQueryInterface( Catalog catalog, String uniqueName ) { uniqueName = uniqueName.toLowerCase(); if ( !interfaceByName.containsKey( uniqueName ) ) { throw new RuntimeException( "Unknown query interface: " + uniqueName ); diff --git a/core/src/main/java/org/polypheny/db/processing/Processor.java b/core/src/main/java/org/polypheny/db/processing/Processor.java index 18f9018358..c3569ea8ad 100644 --- a/core/src/main/java/org/polypheny/db/processing/Processor.java +++ b/core/src/main/java/org/polypheny/db/processing/Processor.java @@ -23,7 +23,6 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.ExecutableStatement; import org.polypheny.db.nodes.Node; @@ -53,7 +52,7 @@ public PolyImplementation prepareDdl( Statement statement, Node parsed, QueryPar return getResult( statement, parsed, parameters ); } catch ( DeadlockException e ) { throw new RuntimeException( "Exception while acquiring global schema lock", e ); - } catch ( TransactionException | NoTablePrimaryKeyException e ) { + } catch ( TransactionException e ) { throw new RuntimeException( e ); } finally { // Release lock @@ -65,7 +64,7 @@ public PolyImplementation prepareDdl( Statement statement, Node parsed, QueryPar } - PolyImplementation getResult( Statement statement, Node parsed, QueryParameters parameters ) throws TransactionException, NoTablePrimaryKeyException { + PolyImplementation getResult( Statement statement, Node parsed, QueryParameters parameters ) throws TransactionException { ((ExecutableStatement) parsed).execute( statement.getPrepareContext(), statement, parameters ); statement.getTransaction().commit(); Catalog.getInstance().commit(); diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 1611c3b09d..c0e7fcb6b9 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -103,10 +103,10 @@ import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; @@ -1348,6 +1348,14 @@ public AlgBuilder scan( @Nonnull CatalogEntity entity ) { } + public AlgBuilder scan( @Nonnull PhysicalEntity entity ) { + final AlgNode scan = scanFactory.createScan( cluster, entity ); + push( scan ); + rename( entity.getLogicalRowType().getFieldNames() ); + return this; + } + + /** * Creates a {@link RelScan} of the table with a given name. * diff --git a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java index b67b6e56bb..3756d268ae 100644 --- a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java +++ b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java @@ -19,9 +19,6 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.transaction.Transaction.MultimediaFlavor; @@ -31,9 +28,9 @@ public interface TransactionManager { Transaction startTransaction( CatalogUser user, LogicalNamespace defaultSchema, boolean analyze, String origin, MultimediaFlavor flavor ); - Transaction startTransaction( long userId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownSchemaException; + Transaction startTransaction( long userId, boolean analyze, String origin ); - Transaction startTransaction( long userId, boolean analyze, String origin, MultimediaFlavor flavor ) throws UnknownUserException, UnknownSchemaException; + Transaction startTransaction( long userId, boolean analyze, String origin, MultimediaFlavor flavor ); void removeTransaction( PolyXid xid ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 43a2e4da82..b8522bd3c1 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -34,16 +34,8 @@ import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalEntity; -import org.polypheny.db.catalog.exceptions.NoTablePrimaryKeyException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; -import org.polypheny.db.catalog.snapshot.AllocSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalGraphSnapshot; -import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; -import org.polypheny.db.catalog.snapshot.PhysicalSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.transaction.Transaction; @@ -85,6 +77,7 @@ public Map getInterfaces() { throw new NotImplementedException(); } + @Override public LogicalRelationalCatalog getLogicalRel( long namespaceId ) { throw new NotImplementedException(); @@ -121,7 +114,6 @@ public AllocationGraphCatalog getAllocGraph( long namespaceId ) { } - @Override public PhysicalCatalog getPhysical( long namespaceId ) { throw new NotImplementedException(); @@ -146,9 +138,8 @@ public Snapshot getSnapshot() { } - @Override - public void commit() throws NoTablePrimaryKeyException { + public void commit() { throw new NotImplementedException(); } @@ -204,13 +195,12 @@ private CatalogDatabase getDatabase( long databaseId ) { } - private List getSchemas( long databaseId, Pattern schemaNamePattern ) { throw new NotImplementedException(); } - private LogicalNamespace getNamespace( long databaseId, String schemaName ) throws UnknownSchemaException { + private LogicalNamespace getNamespace( long databaseId, String schemaName ) { throw new NotImplementedException(); } @@ -221,7 +211,6 @@ public long addNamespace( String name, NamespaceType namespaceType, boolean case } - @Override public void renameNamespace( long schemaId, String name ) { throw new NotImplementedException(); diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index 5b64190e1b..e79e181dac 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -35,11 +35,6 @@ import org.polypheny.db.catalog.Adapter; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownKeyException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.ConfigManager; import org.polypheny.db.config.RuntimeConfig; @@ -166,7 +161,7 @@ public static void main( final String[] args ) { } - public void runPolyphenyDb() throws GenericCatalogException { + public void runPolyphenyDb() { if ( resetDocker ) { log.warn( "[-resetDocker] option is set, this option is only for development." ); } @@ -397,7 +392,7 @@ public void join( final long millis ) throws InterruptedException { try { IndexManager.getInstance().initialize( transactionManager ); IndexManager.getInstance().restoreIndexes(); - } catch ( UnknownUserException | UnknownSchemaException | UnknownTableException | TransactionException | UnknownKeyException e ) { + } catch ( TransactionException e ) { throw new RuntimeException( "Something went wrong while initializing index manager.", e ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index a689675253..641f1da6b7 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -30,6 +30,7 @@ import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nullable; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.polypheny.db.StatisticsManager; @@ -75,22 +76,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; -import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.NamespaceAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.catalog.exceptions.UnknownCollationException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownConstraintException; -import org.polypheny.db.catalog.exceptions.UnknownForeignKeyException; -import org.polypheny.db.catalog.exceptions.UnknownGraphException; -import org.polypheny.db.catalog.exceptions.UnknownIndexException; -import org.polypheny.db.catalog.exceptions.UnknownKeyException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.DataPlacementRole; @@ -106,22 +92,15 @@ import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.ddl.exception.AlterSourceException; -import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.ddl.exception.DdlOnSourceException; -import org.polypheny.db.ddl.exception.IndexExistsException; import org.polypheny.db.ddl.exception.IndexPreventsRemovalException; import org.polypheny.db.ddl.exception.LastPlacementException; -import org.polypheny.db.ddl.exception.MissingColumnPlacementException; import org.polypheny.db.ddl.exception.NotMaterializedViewException; -import org.polypheny.db.ddl.exception.NotNullAndDefaultValueException; import org.polypheny.db.ddl.exception.NotViewException; import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; -import org.polypheny.db.ddl.exception.PlacementAlreadyExistsException; import org.polypheny.db.ddl.exception.PlacementIsPrimaryException; import org.polypheny.db.ddl.exception.PlacementNotExistsException; import org.polypheny.db.ddl.exception.SchemaNotExistException; -import org.polypheny.db.ddl.exception.UnknownIndexMethodException; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.monitoring.events.DdlEvent; import org.polypheny.db.monitoring.events.StatementEvent; @@ -133,7 +112,6 @@ import org.polypheny.db.partition.raw.RawTemperaturePartitionInformation; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.routing.RoutingManager; -import org.polypheny.db.runtime.PolyphenyDbContextException; import org.polypheny.db.runtime.PolyphenyDbException; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.TransactionException; @@ -153,9 +131,9 @@ public DdlManagerImpl( Catalog catalog ) { } - private void checkIfDdlPossible( EntityType entityType ) throws DdlOnSourceException { + private void checkIfDdlPossible( EntityType entityType ) { if ( entityType == EntityType.SOURCE ) { - throw new DdlOnSourceException(); + throw new GenericRuntimeException( "Can not use DDLs on Sources" ); } } @@ -183,29 +161,29 @@ private void addDefaultValue( long namespaceId, String defaultValue, long addedC } - protected DataStore getDataStoreInstance( long storeId ) throws DdlOnSourceException { + protected DataStore getDataStoreInstance( long storeId ) { Adapter adapterInstance = AdapterManager.getInstance().getAdapter( storeId ); if ( adapterInstance == null ) { - throw new RuntimeException( "Unknown store id: " + storeId ); + throw new GenericRuntimeException( "Unknown store id: %i", storeId ); } // Make sure it is a data store instance if ( adapterInstance instanceof DataStore ) { return (DataStore) adapterInstance; } else if ( adapterInstance instanceof DataSource ) { - throw new DdlOnSourceException(); + throw new GenericRuntimeException( "Can not use DDLs on Sources" ); } else { - throw new RuntimeException( "Unknown kind of adapter: " + adapterInstance.getClass().getName() ); + throw new GenericRuntimeException( "Unknown kind of adapter: %s", adapterInstance.getClass().getName() ); } } - private LogicalColumn getCatalogColumn( long namespaceId, long tableId, String columnName ) throws ColumnNotExistsException { + private LogicalColumn getCatalogColumn( long namespaceId, long tableId, String columnName ) { return catalog.getSnapshot().rel().getColumn( tableId, columnName ); } @Override - public long createNamespace( String name, NamespaceType type, boolean ifNotExists, boolean replace ) throws NamespaceAlreadyExistsException { + public long createNamespace( String name, NamespaceType type, boolean ifNotExists, boolean replace ) { name = name.toLowerCase(); // Check if there is already a schema with this name if ( catalog.getSnapshot().checkIfExistsNamespace( name ) ) { @@ -213,13 +191,11 @@ public long createNamespace( String name, NamespaceType type, boolean ifNotExist // It is ok that there is already a schema with this name because "IF NOT EXISTS" was specified return catalog.getSnapshot().getNamespace( name ).id; } else if ( replace ) { - throw new RuntimeException( "Replacing namespace is not yet supported." ); - } else { - throw new NamespaceAlreadyExistsException(); + throw new GenericRuntimeException( "Replacing namespace is not yet supported." ); } - } else { - return catalog.addNamespace( name, type, false ); } + return catalog.addNamespace( name, type, false ); + } @@ -240,7 +216,7 @@ private void handleSource( DataSource adapter ) { exportedColumns = adapter.getExportedColumns(); } catch ( Exception e ) { AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); - throw new RuntimeException( "Could not deploy adapter", e ); + throw new GenericRuntimeException( "Could not deploy adapter", e ); } // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { @@ -289,29 +265,27 @@ private void handleSource( DataSource adapter ) { physicalTableName = exportedColumn.physicalTableName; } } - try { - catalog.getLogicalRel( defaultNamespaceId ).addPrimaryKey( table.id, primaryKeyColIds ); - LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( table.id ); - CatalogDataPlacement placement = catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ).get( 0 ); - catalog.getAllocRel( defaultNamespaceId ) - .addPartitionPlacement( - catalogTable.namespaceId, - adapter.getAdapterId(), - catalogTable.id, - placement.getAdapterId(), - PlacementType.AUTOMATIC, - DataPlacementRole.UPTODATE ); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( "Exception while adding primary key" ); - } + catalog.getLogicalRel( defaultNamespaceId ).addPrimaryKey( table.id, primaryKeyColIds ); + LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( table.id ); + + CatalogDataPlacement placement = catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ).get( 0 ); + catalog.getAllocRel( defaultNamespaceId ) + .addPartitionPlacement( + catalogTable.namespaceId, + adapter.getAdapterId(), + catalogTable.id, + placement.getAdapterId(), + PlacementType.AUTOMATIC, + DataPlacementRole.UPTODATE ); + } } @Override - public void dropAdapter( String name, Statement statement ) throws UnknownAdapterException { + public void dropAdapter( String name, Statement statement ) { long defaultNamespaceId = 1; if ( name.startsWith( "'" ) ) { name = name.substring( 1 ); @@ -333,7 +307,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte // Make sure that there is only one adapter if ( catalog.getSnapshot().alloc().getDataPlacements( collection.id ).size() != 1 ) { - throw new RuntimeException( "The data source contains collections with more than one placement. This should not happen!" ); + throw new GenericRuntimeException( "The data source contains collections with more than one placement. This should not happen!" ); } dropCollection( collection, statement ); @@ -355,11 +329,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte // Remove foreign keys for ( Long tableId : tablesToDrop ) { for ( CatalogForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( tableId ) ) { - try { - catalog.getLogicalRel( defaultNamespaceId ).deleteForeignKey( fk.id ); - } catch ( GenericCatalogException e ) { - throw new PolyphenyDbContextException( "Exception while dropping foreign key", e ); - } + catalog.getLogicalRel( defaultNamespaceId ).deleteForeignKey( fk.id ); } } // Drop tables @@ -368,12 +338,12 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte // Make sure that there is only one adapter if ( catalog.getSnapshot().alloc().getDataPlacements( tableId ).size() != 1 ) { - throw new RuntimeException( "The data source contains tables with more than one placement. This should not happen!" ); + throw new GenericRuntimeException( "The data source contains tables with more than one placement. This should not happen!" ); } // Make sure table is of type source if ( table.entityType != EntityType.SOURCE ) { - throw new RuntimeException( "Trying to drop a table located on a data source which is not of table type SOURCE. This should not happen!" ); + throw new GenericRuntimeException( "Trying to drop a table located on a data source which is not of table type SOURCE. This should not happen!" ); } AllocationEntity entity = catalog.getSnapshot().alloc().getAllocation( catalogAdapter.id, tableId ); // Delete column placement in catalog @@ -384,11 +354,7 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte } // Remove primary keys - try { - catalog.getLogicalRel( defaultNamespaceId ).deletePrimaryKey( table.id ); - } catch ( GenericCatalogException e ) { - throw new PolyphenyDbContextException( "Exception while dropping primary key", e ); - } + catalog.getLogicalRel( defaultNamespaceId ).deletePrimaryKey( table.id ); // Delete columns for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( tableId ) ) { @@ -407,10 +373,10 @@ public void dropAdapter( String name, Statement statement ) throws UnknownAdapte @Override - public void renameSchema( String newName, String oldName ) throws NamespaceAlreadyExistsException, UnknownSchemaException { + public void renameNamespace( String newName, String oldName ) { newName = newName.toLowerCase(); if ( catalog.getSnapshot().checkIfExistsNamespace( newName ) ) { - throw new NamespaceAlreadyExistsException(); + throw new GenericRuntimeException( "The name of the namespace is already used" ); } LogicalNamespace logicalNamespace = catalog.getSnapshot().getNamespace( oldName ); catalog.renameNamespace( logicalNamespace.id, newName ); @@ -421,10 +387,10 @@ public void renameSchema( String newName, String oldName ) throws NamespaceAlrea @Override - public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) throws ColumnAlreadyExistsException, DdlOnSourceException, ColumnNotExistsException { + public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, String defaultValue, Statement statement ) { if ( catalog.getSnapshot().rel().checkIfExistsColumn( catalogTable.id, columnLogicalName ) ) { - throw new ColumnAlreadyExistsException( columnLogicalName, catalogTable.name ); + throw new GenericRuntimeException( "There exist already a column with name %s on table %s", columnLogicalName, catalogTable.name ); } LogicalColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.namespaceId, catalogTable.id, beforeColumnName ); @@ -432,12 +398,12 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys // Make sure that the table is of table type SOURCE if ( catalogTable.entityType != EntityType.SOURCE ) { - throw new RuntimeException( "Illegal operation on table of type " + catalogTable.entityType ); + throw new GenericRuntimeException( "Illegal operation on table of type %s", catalogTable.entityType ); } // Make sure there is only one adapter if ( catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ).size() != 1 ) { - throw new RuntimeException( "The table has an unexpected number of placements!" ); + throw new GenericRuntimeException( "The table has an unexpected number of placements!" ); } long adapterId = catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ).get( 0 ).adapterId; @@ -454,7 +420,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } } if ( exportedColumn == null ) { - throw new RuntimeException( "Invalid physical column name '" + columnPhysicalName + "'!" ); + throw new GenericRuntimeException( "Invalid physical column name '%s'", columnPhysicalName ); } // Make sure this physical column has not already been added to this table @@ -519,15 +485,15 @@ private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn be @Override - public void addColumn( String columnName, LogicalTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws NotNullAndDefaultValueException, ColumnAlreadyExistsException, ColumnNotExistsException { + public void addColumn( String columnName, LogicalTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) { columnName = adjustNameIfNeeded( columnName, catalogTable.namespaceId ); // Check if the column either allows null values or has a default value defined. if ( defaultValue == null && !nullable ) { - throw new NotNullAndDefaultValueException(); + throw new GenericRuntimeException( "Column is not nullable and does not have a default value defined." ); } if ( catalog.getSnapshot().rel().checkIfExistsColumn( catalogTable.id, columnName ) ) { - throw new ColumnAlreadyExistsException( columnName, catalogTable.name ); + throw new GenericRuntimeException( "There already exists a column with name %s on table %s", columnName, catalogTable.name ); } // LogicalColumn beforeColumn = beforeColumnName == null ? null : getCatalogColumn( catalogTable.namespaceId, catalogTable.id, beforeColumnName ); @@ -573,7 +539,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo @Override - public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException { + public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); @@ -589,7 +555,7 @@ public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, Lis @Override - public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { + public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws TransactionException { List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); @@ -600,12 +566,12 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnIds, IndexType type ) throws MissingColumnPlacementException, UnknownIndexMethodException, GenericCatalogException { + private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodName, String indexName, boolean isUnique, DataStore location, Statement statement, List columnIds, IndexType type ) { // Check if all required columns are present on this store for ( long columnId : columnIds ) { if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { - throw new MissingColumnPlacementException( catalog.getSnapshot().rel().getColumn( columnId ).name ); + throw new GenericRuntimeException( "Not all required columns for this index are placed on this store. e.g %s ", catalog.getSnapshot().rel().getColumn( columnId ).name ); } } @@ -688,7 +654,7 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam } } if ( aim == null ) { - throw new UnknownIndexMethodException(); + throw new GenericRuntimeException( "The used Index method is not known." ); } method = aim.name; methodDisplayName = aim.displayName; @@ -714,7 +680,7 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam } - public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { + public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, Statement statement ) throws TransactionException { indexName = indexName.toLowerCase(); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { @@ -726,12 +692,12 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName // Make sure that this is a table of type TABLE (and not SOURCE) if ( catalogTable.entityType != EntityType.ENTITY && catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { - throw new RuntimeException( "It is only possible to add an index to a " + catalogTable.entityType.name() ); + throw new GenericRuntimeException( "It is only possible to add an index to a %s", catalogTable.entityType.name() ); } // Check if there is already an index with this name for this table if ( catalog.getSnapshot().rel().checkIfExistsIndex( catalogTable.id, indexName ) ) { - throw new IndexExistsException(); + throw new GenericRuntimeException( "The already exists an index with this name %s", indexName ); } String method; @@ -744,7 +710,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName } } if ( aim == null ) { - throw new UnknownIndexMethodException(); + throw new GenericRuntimeException( "The index method is not known" ); } method = aim.name; methodDisplayName = aim.displayName; @@ -768,15 +734,15 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName @Override - public void addDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) throws PlacementAlreadyExistsException { + public void addDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) { List addedColumns = new LinkedList<>(); List tempPartitionGroupList = new ArrayList<>(); if ( catalog.getSnapshot().alloc().getDataPlacement( catalogTable.id, dataStore.getAdapterId() ) == null ) { - throw new PlacementAlreadyExistsException(); + throw new GenericRuntimeException( "The placement does already exist" ); } else { - catalog.getAllocRel( catalogTable.namespaceId ).createAlloctionTable( dataStore.getAdapterId(), catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).createAllocationTable( dataStore.getAdapterId(), catalogTable.id ); } // Check whether the list is empty (this is a shorthand for a full placement) @@ -800,8 +766,8 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L // Abort if a manual partitionList has been specified even though the data placement has already been partitioned if ( isDataPlacementPartitioned ) { - throw new RuntimeException( "WARNING: The Data Placement for table: '" + catalogTable.name + "' on store: '" - + dataStore.getUniqueName() + "' already contains manually specified partitions: " + currentPartList + ". Use 'ALTER TABLE ... MODIFY PARTITIONS...' instead" ); + throw new GenericRuntimeException( "WARNING: The Data Placement for table: '%s' on store: " + + "'%s' already contains manually specified partitions: %s. Use 'ALTER TABLE ... MODIFY PARTITIONS...' instead", catalogTable.name, dataStore.getUniqueName(), currentPartList ); } log.debug( "Table is partitioned and concrete partitionList has been specified " ); @@ -811,15 +777,15 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L try { tempPartitionGroupList.add( property.partitionGroupIds.get( partitionGroupId ) ); } catch ( IndexOutOfBoundsException e ) { - throw new RuntimeException( "Specified Partition-Index: '" + partitionGroupId + "' is not part of table '" - + catalogTable.name + "', has only " + property.numPartitionGroups + " partitions" ); + throw new GenericRuntimeException( "Specified Partition-Index: '%s' is not part of table " + + "'%s', has only %s partitions", partitionGroupId, catalogTable.name, property.numPartitionGroups ); } } } else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { if ( isDataPlacementPartitioned ) { - throw new RuntimeException( "WARNING: The Data Placement for table: '" + catalogTable.name + "' on store: '" - + dataStore.getUniqueName() + "' already contains manually specified partitions: " + currentPartList + ". Use 'ALTER TABLE ... MODIFY PARTITIONS...' instead" ); + throw new GenericRuntimeException( "WARNING: The Data Placement for table: '%s' on store: " + + "'%s' already contains manually specified partitions: %s. Use 'ALTER TABLE ... MODIFY PARTITIONS...' instead", catalogTable.name, dataStore.getUniqueName(), currentPartList ); } List catalogPartitionGroups = catalog.getSnapshot().alloc().getPartitionGroups( tableId ); @@ -833,8 +799,8 @@ public void addDataPlacement( LogicalTable catalogTable, List columnIds, L } } if ( !isPartOfTable ) { - throw new RuntimeException( "Specified Partition-Name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getSnapshot().alloc().getPartitionGroupNames( tableId ) ) ); + throw new GenericRuntimeException( "Specified Partition-Name: '%s' is not part of table " + + "'%s'. Available partitions: %s", partitionName, catalogTable.name, String.join( ",", catalog.getSnapshot().alloc().getPartitionGroupNames( tableId ) ) ); } } @@ -906,7 +872,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { @Override - public void addPrimaryKey( LogicalTable catalogTable, List columnNames, Statement statement ) throws DdlOnSourceException { + public void addPrimaryKey( LogicalTable catalogTable, List columnNames, Statement statement ) { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -945,30 +911,27 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, @Override - public void addUniqueConstraint( LogicalTable catalogTable, List columnNames, String constraintName ) throws DdlOnSourceException { + public void addUniqueConstraint( LogicalTable catalogTable, List columnNames, String constraintName ) { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); checkModelLogic( catalogTable, null ); - try { - List columnIds = new LinkedList<>(); - for ( String columnName : columnNames ) { - LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); - columnIds.add( logicalColumn.id ); - } - catalog.getLogicalRel( catalogTable.namespaceId ).addUniqueConstraint( catalogTable.id, constraintName, columnIds ); - } catch ( GenericCatalogException | UnknownColumnException e ) { - throw new RuntimeException( e ); + List columnIds = new LinkedList<>(); + for ( String columnName : columnNames ) { + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); + columnIds.add( logicalColumn.id ); } + catalog.getLogicalRel( catalogTable.namespaceId ).addUniqueConstraint( catalogTable.id, constraintName, columnIds ); + } @Override - public void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { + public void dropColumn( LogicalTable catalogTable, String columnName, Statement statement ) { List columns = catalog.getSnapshot().rel().getColumns( catalogTable.id ); if ( columns.size() < 2 ) { - throw new RuntimeException( "Cannot drop sole column of table " + catalogTable.name ); + throw new GenericRuntimeException( "Cannot drop sole column of table %s", catalogTable.name ); } // check if model permits operation @@ -1033,7 +996,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement private void checkModelLogic( LogicalTable catalogTable ) { if ( catalogTable.namespaceType == NamespaceType.DOCUMENT ) { - throw new RuntimeException( "Modification operation is not allowed by schema type DOCUMENT" ); + throw new GenericRuntimeException( "Modification operation is not allowed by schema type DOCUMENT" ); } } @@ -1041,73 +1004,62 @@ private void checkModelLogic( LogicalTable catalogTable ) { private void checkModelLogic( LogicalTable catalogTable, String columnName ) { if ( catalogTable.namespaceType == NamespaceType.DOCUMENT && (columnName.equals( "_data" ) || columnName.equals( "_id" )) ) { - throw new RuntimeException( "Modification operation is not allowed by schema type DOCUMENT" ); + throw new GenericRuntimeException( "Modification operation is not allowed by schema type DOCUMENT" ); } } @Override - public void dropConstraint( LogicalTable catalogTable, String constraintName ) throws DdlOnSourceException { + public void dropConstraint( LogicalTable catalogTable, String constraintName ) { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); - try { - CatalogConstraint constraint = catalog.getSnapshot().rel().getConstraint( catalogTable.id, constraintName ); - catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); - } catch ( GenericCatalogException | UnknownConstraintException e ) { - throw new RuntimeException( e ); - } + CatalogConstraint constraint = catalog.getSnapshot().rel().getConstraint( catalogTable.id, constraintName ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); } @Override - public void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) throws DdlOnSourceException { + public void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); - try { - CatalogForeignKey foreignKey = catalog.getSnapshot().rel().getForeignKey( catalogTable.id, foreignKeyName ); - catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); - } catch ( GenericCatalogException | UnknownForeignKeyException e ) { - throw new RuntimeException( e ); - } + CatalogForeignKey foreignKey = catalog.getSnapshot().rel().getForeignKey( catalogTable.id, foreignKeyName ); + catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } @Override - public void dropIndex( LogicalTable catalogTable, String indexName, Statement statement ) throws DdlOnSourceException { + public void dropIndex( LogicalTable catalogTable, String indexName, Statement statement ) { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); - try { - CatalogIndex index = catalog.getSnapshot().rel().getIndex( catalogTable.id, indexName ); + CatalogIndex index = catalog.getSnapshot().rel().getIndex( catalogTable.id, indexName ); - if ( index.location == 0 ) { - IndexManager.getInstance().deleteIndex( index ); - } else { - DataStore storeInstance = AdapterManager.getInstance().getStore( index.location ); - storeInstance.dropIndex( statement.getPrepareContext(), index, catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); - } - - catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); - } catch ( UnknownIndexException e ) { - throw new RuntimeException( e ); + if ( index.location == 0 ) { + IndexManager.getInstance().deleteIndex( index ); + } else { + DataStore storeInstance = AdapterManager.getInstance().getStore( index.location ); + storeInstance.dropIndex( statement.getPrepareContext(), index, catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( index.location, catalogTable.id ) ); } + + catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); } @Override - public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstance, Statement statement ) throws PlacementNotExistsException, LastPlacementException { + public void dropTableAllocation( LogicalTable catalogTable, DataStore storeInstance, Statement statement ) { + AllocationEntity entity = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Check whether this placement exists - if ( catalog.getSnapshot().alloc().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) == null ) { - throw new PlacementNotExistsException(); + if ( entity == null ) { + throw new GenericRuntimeException( "The requested placement does not exist" ); } CatalogDataPlacement dataPlacement = catalog.getSnapshot().alloc().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), dataPlacement.columnPlacementsOnAdapter, dataPlacement.getAllPartitionIds() ) ) { - throw new LastPlacementException(); + throw new GenericRuntimeException( "The last placement cannot be deleted" ); } // Drop all indexes on this store @@ -1130,8 +1082,8 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc // Physically delete the data from the store storeInstance.dropTable( statement.getPrepareContext(), catalogTable, catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( storeInstance.getAdapterId(), catalogTable.id ) ); - // Remove physical stores afterwards - catalog.getAllocRel( catalogTable.namespaceId ).deleteAllocation( storeInstance.getAdapterId(), catalogTable.id ); + // Remove allocations, physical will be removed on next rebuild + catalog.getAllocRel( catalogTable.namespaceId ).deleteAllocation( entity.id ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1139,19 +1091,15 @@ public void dropDataPlacement( LogicalTable catalogTable, DataStore storeInstanc @Override - public void dropPrimaryKey( LogicalTable catalogTable ) throws DdlOnSourceException { - try { - // Make sure that this is a table of type TABLE (and not SOURCE) - checkIfDdlPossible( catalogTable.entityType ); - catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( e ); - } + public void dropPrimaryKey( LogicalTable catalogTable ) { + // Make sure that this is a table of type TABLE (and not SOURCE) + checkIfDdlPossible( catalogTable.entityType ); + catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); } @Override - public void setColumnType( LogicalTable catalogTable, String columnName, ColumnTypeInformation type, Statement statement ) throws DdlOnSourceException, ColumnNotExistsException, GenericCatalogException { + public void setColumnType( LogicalTable catalogTable, String columnName, ColumnTypeInformation type, Statement statement ) { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); @@ -1182,7 +1130,7 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT @Override - public void setColumnNullable( LogicalTable catalogTable, String columnName, boolean nullable, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException, GenericCatalogException { + public void setColumnNullable( LogicalTable catalogTable, String columnName, boolean nullable, Statement statement ) { LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Make sure that this is a table of type TABLE (and not SOURCE) @@ -1199,7 +1147,7 @@ public void setColumnNullable( LogicalTable catalogTable, String columnName, boo @Override - public void setColumnPosition( LogicalTable catalogTable, String columnName, String beforeColumnName, String afterColumnName, Statement statement ) throws ColumnNotExistsException { + public void setColumnPosition( LogicalTable catalogTable, String columnName, String beforeColumnName, String afterColumnName, Statement statement ) { // Check if model permits operation checkModelLogic( catalogTable, columnName ); @@ -1215,7 +1163,7 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str targetPosition = refColumn.position + 1; } if ( logicalColumn.id == refColumn.id ) { - throw new RuntimeException( "Same column!" ); + throw new GenericRuntimeException( "Same column!" ); } List columns = catalog.getSnapshot().rel().getColumns( catalogTable.id ); if ( targetPosition < logicalColumn.position ) { // Walk from last column to first column @@ -1250,7 +1198,7 @@ public void setColumnPosition( LogicalTable catalogTable, String columnName, Str @Override - public void setColumnCollation( LogicalTable catalogTable, String columnName, Collation collation, Statement statement ) throws ColumnNotExistsException, DdlOnSourceException { + public void setColumnCollation( LogicalTable catalogTable, String columnName, Collation collation, Statement statement ) { LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Check if model permits operation @@ -1267,7 +1215,7 @@ public void setColumnCollation( LogicalTable catalogTable, String columnName, Co @Override - public void setDefaultValue( LogicalTable catalogTable, String columnName, String defaultValue, Statement statement ) throws ColumnNotExistsException { + public void setDefaultValue( LogicalTable catalogTable, String columnName, String defaultValue, Statement statement ) { LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // Check if model permits operation @@ -1281,7 +1229,7 @@ public void setDefaultValue( LogicalTable catalogTable, String columnName, Strin @Override - public void dropDefaultValue( LogicalTable catalogTable, String columnName, Statement statement ) throws ColumnNotExistsException { + public void dropDefaultValue( LogicalTable catalogTable, String columnName, Statement statement ) { LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); // check if model permits operation @@ -1295,12 +1243,11 @@ public void dropDefaultValue( LogicalTable catalogTable, String columnName, Stat @Override - public void modifyDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) - throws PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException { + public void modifyDataPlacement( LogicalTable catalogTable, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore storeInstance, Statement statement ) { CatalogDataPlacement placements = statement.getDataContext().getSnapshot().alloc().getDataPlacement( storeInstance.getAdapterId(), catalogTable.id ); // Check whether this placement already exists if ( placements == null ) { - throw new PlacementNotExistsException(); + throw new GenericRuntimeException( "The requested placement does not exists" ); } // Check if views are dependent from this view @@ -1317,7 +1264,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Check whether there are any indexes located on the store requiring this column for ( CatalogIndex index : snapshot.getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( placement.columnId ) ) { - throw new IndexPreventsRemovalException( index.name, snapshot.getColumn( placement.columnId ).name ); + throw new GenericRuntimeException( "The index with name %s prevents the removal of the placement %s", index.name, snapshot.getColumn( placement.columnId ).name ); } } // Check whether the column is a primary key column @@ -1339,7 +1286,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds } if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), columnsToRemove, new ArrayList<>() ) ) { - throw new LastPlacementException(); + throw new GenericRuntimeException( "Cannot remove placement as it is the last" ); } boolean adjustPartitions = true; @@ -1368,8 +1315,8 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds int index = partition.partitionGroupIds.indexOf( partitionGroupId ); tempPartitionGroupList.add( partition.partitionGroupIds.get( index ) ); } catch ( IndexOutOfBoundsException e ) { - throw new RuntimeException( "Specified Partition-Index: '" + partitionGroupId + "' is not part of table '" - + catalogTable.name + "', has only " + partition.partitionGroupIds.size() + " partitions" ); + throw new GenericRuntimeException( "Specified Partition-Index: '%s' is not part of table " + + "'%s', has only %s partitions", partitionGroupId, catalogTable.name, partition.partitionGroupIds.size() ); } } } @@ -1386,8 +1333,8 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { } } if ( !isPartOfTable ) { - throw new RuntimeException( "Specified partition name: '" + partitionName + "' is not part of table '" - + catalogTable.name + "'. Available partitions: " + String.join( ",", catalog.getSnapshot().alloc().getPartitionGroupNames( tableId ) ) ); + throw new GenericRuntimeException( "Specified partition name: '%s' is not part of table " + + "'%s'. Available partitions: %s", partitionName, catalogTable.name, String.join( ",", catalog.getSnapshot().alloc().getPartitionGroupNames( tableId ) ) ); } } } else if ( partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { @@ -1475,7 +1422,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { @Override - public void modifyPartitionPlacement( LogicalTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ) throws LastPlacementException { + public void modifyPartitionPlacement( LogicalTable catalogTable, List partitionGroupIds, DataStore storeInstance, Statement statement ) { long storeId = storeInstance.getAdapterId(); List newPartitions = new ArrayList<>(); List removedPartitions = new ArrayList<>(); @@ -1490,7 +1437,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part } if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( catalogTable.id, storeInstance.getAdapterId(), new ArrayList<>(), removedPartitions ) ) { - throw new LastPlacementException(); + throw new GenericRuntimeException( "Cannot remove the placement as it is the last" ); } // Get PartitionGroups that have been newly added @@ -1544,15 +1491,12 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part @Override - public void addColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, PlacementAlreadyExistsException, ColumnNotExistsException { + public void addColumnPlacement( LogicalTable catalogTable, String columnName, @NonNull DataStore storeInstance, Statement statement ) { columnName = adjustNameIfNeeded( columnName, catalogTable.namespaceId ); Snapshot snapshot = statement.getTransaction().getSnapshot(); - if ( storeInstance == null ) { - throw new UnknownAdapterException( "" ); - } // Check whether this placement already exists if ( !snapshot.alloc().adapterHasPlacement( storeInstance.getAdapterId(), catalogTable.id ) ) { - throw new PlacementNotExistsException(); + throw new GenericRuntimeException( "The requested placement does not exist" ); } LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); @@ -1567,7 +1511,7 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da logicalColumn.id, PlacementType.MANUAL ); } else { - throw new PlacementAlreadyExistsException(); + throw new GenericRuntimeException( "There already exist a placement" ); } } else { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); @@ -1591,15 +1535,12 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, Da @Override - public void dropColumnPlacement( LogicalTable catalogTable, String columnName, DataStore storeInstance, Statement statement ) throws UnknownAdapterException, PlacementNotExistsException, IndexPreventsRemovalException, LastPlacementException, PlacementIsPrimaryException, ColumnNotExistsException { - if ( storeInstance == null ) { - throw new UnknownAdapterException( "" ); - } + public void dropColumnPlacement( LogicalTable catalogTable, String columnName, @NonNull DataStore storeInstance, Statement statement ) { Snapshot snapshot = statement.getTransaction().getSnapshot(); // Check whether this placement already exists if ( !snapshot.alloc().adapterHasPlacement( storeInstance.getAdapterId(), catalogTable.id ) ) { - throw new PlacementNotExistsException(); + throw new GenericRuntimeException( "The placement already exists" ); } LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); @@ -1636,15 +1577,15 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, D @Override - public void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) throws UnknownUserException { + public void alterTableOwner( LogicalTable catalogTable, String newOwnerName ) { throw new UnsupportedOperationException(); } @Override - public void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ) throws EntityAlreadyExistsException { + public void renameTable( LogicalTable catalogTable, String newTableName, Statement statement ) { if ( catalog.getSnapshot().rel().checkIfExistsEntity( newTableName ) ) { - throw new EntityAlreadyExistsException(); + throw new GenericRuntimeException( "An entity with name %s already exists", newTableName ); } // Check if views are dependent from this view checkViewDependencies( catalogTable ); @@ -1664,11 +1605,11 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme @Override - public void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) throws ColumnAlreadyExistsException, ColumnNotExistsException { + public void renameColumn( LogicalTable catalogTable, String columnName, String newColumnName, Statement statement ) { LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); if ( catalog.getSnapshot().rel().checkIfExistsColumn( logicalColumn.tableId, newColumnName ) ) { - throw new ColumnAlreadyExistsException( newColumnName, logicalColumn.getTableName() ); + throw new GenericRuntimeException( "There already exists a column with name %s on table %s", newColumnName, logicalColumn.getTableName() ); } // Check if views are dependent from this view checkViewDependencies( catalogTable ); @@ -1684,7 +1625,7 @@ public void renameColumn( LogicalTable catalogTable, String columnName, String n @Override - public void createView( String viewName, long namespaceId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) throws EntityAlreadyExistsException { + public void createView( String viewName, long namespaceId, AlgNode algNode, AlgCollation algCollation, boolean replace, Statement statement, PlacementType placementType, List projectedColumns, String query, QueryLanguage language ) { viewName = adjustNameIfNeeded( viewName, namespaceId ); if ( catalog.getSnapshot().rel().checkIfExistsEntity( viewName ) ) { @@ -1692,10 +1633,10 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC try { dropView( catalog.getSnapshot().rel().getTable( namespaceId, viewName ), statement ); } catch ( DdlOnSourceException e ) { - throw new RuntimeException( "Unable tp drop the existing View with this name." ); + throw new GenericRuntimeException( "Unable tp drop the existing View with this name." ); } } else { - throw new EntityAlreadyExistsException(); + throw new GenericRuntimeException( "There already exists a view with the name %s", viewName ); } } @@ -1749,7 +1690,7 @@ private String adjustNameIfNeeded( String name, long namespaceId ) { @Override - public void createMaterializedView( String viewName, long namespaceId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ) throws EntityAlreadyExistsException, GenericCatalogException { + public void createMaterializedView( String viewName, long namespaceId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ) { viewName = adjustNameIfNeeded( viewName, namespaceId ); // Check if there is already a table with this name if ( assertEntityExists( namespaceId, viewName, ifNotExists ) ) { @@ -1776,7 +1717,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a List entityTypes = new ArrayList<>(); underlying.keySet().forEach( t -> entityTypes.add( relSnapshot.getTable( t ).entityType ) ); if ( !(entityTypes.contains( EntityType.ENTITY )) ) { - throw new GenericCatalogException( "Not possible to use Materialized View with Update Freshness if underlying table does not include a modifiable table." ); + throw new GenericRuntimeException( "Not possible to use Materialized View with Update Freshness if underlying table does not include a modifiable table." ); } } @@ -1867,10 +1808,9 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a private void checkModelLangCompatibility( QueryLanguage language, long namespaceId, Long tableId ) { LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( tableId ); if ( catalogTable.getNamespaceType() != language.getNamespaceType() ) { - throw new RuntimeException( - String.format( - "The used language cannot execute schema changing queries on this entity with the data model %s.", - catalogTable.getNamespaceType() ) ); + throw new GenericRuntimeException( + "The used language cannot execute schema changing queries on this entity with the data model %s.", + catalogTable.getNamespaceType() ); } } @@ -1906,11 +1846,8 @@ public long createGraph( String graphName, boolean modifiable, @Nullable List stores, boolean onlyPlacement, Statement statement ) { - try { - catalog.getLogicalGraph( graphId ).addGraphLogistics( graphId, stores, onlyPlacement ); - } catch ( GenericCatalogException | UnknownTableException | UnknownColumnException e ) { - throw new RuntimeException(); - } + + //catalog.getLogicalGraph( graphId ).addGraphLogistics( graphId, stores, onlyPlacement ); LogicalGraph graph = catalog.getSnapshot().graph().getGraph( graphId ); Snapshot snapshot = statement.getTransaction().getSnapshot(); @@ -1974,10 +1911,10 @@ private void afterGraphPlacementAddLogistics( DataStore store, long graphId ) { LogicalTable edges = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.edgesId ); LogicalTable edgeProperty = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.edgesPropertyId ); - catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), mapping.nodesId ); - catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), mapping.nodesPropertyId ); - catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), mapping.edgesId ); - catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), mapping.edgesPropertyId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAllocationTable( store.getAdapterId(), mapping.nodesId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAllocationTable( store.getAdapterId(), mapping.nodesPropertyId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAllocationTable( store.getAdapterId(), mapping.edgesId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAllocationTable( store.getAdapterId(), mapping.edgesPropertyId ); catalog.getLogicalRel( catalogTable.namespaceId ).addPartitionPlacement( nodes.namespaceId, @@ -2055,7 +1992,7 @@ public void removeGraph( long graphId, boolean ifExists, Statement statement ) { if ( graph == null ) { if ( !ifExists ) { - throw new UnknownGraphException( graphId ); + throw new GenericRuntimeException( "There exists no graph with id %s", graphId ); } return; } @@ -2160,78 +2097,75 @@ private List getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList } - public void createTableOld( long namespaceId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { - name = adjustNameIfNeeded( name, namespaceId ); + public void createTableOld( long namespaceId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) { + String adjustedName = adjustNameIfNeeded( name, namespaceId ); - try { - // Check if there is already an entity with this name - if ( assertEntityExists( namespaceId, name, ifNotExists ) ) { - return; - } + // Check if there is already an entity with this name + if ( assertEntityExists( namespaceId, adjustedName, ifNotExists ) ) { + return; + } - fields = new ArrayList<>( fields ); - constraints = new ArrayList<>( constraints ); + fields = new ArrayList<>( fields ); + constraints = new ArrayList<>( constraints ); - checkDocumentModel( namespaceId, fields, constraints ); + checkDocumentModel( namespaceId, fields, constraints ); - boolean foundPk = false; - for ( ConstraintInformation constraintInformation : constraints ) { - if ( constraintInformation.type == ConstraintType.PRIMARY ) { - if ( foundPk ) { - throw new RuntimeException( "More than one primary key has been provided!" ); - } else { - foundPk = true; - } + boolean foundPk = false; + for ( ConstraintInformation constraintInformation : constraints ) { + if ( constraintInformation.type == ConstraintType.PRIMARY ) { + if ( foundPk ) { + throw new GenericRuntimeException( "More than one primary key has been provided!" ); + } else { + foundPk = true; } } - if ( !foundPk ) { - throw new RuntimeException( "No primary key has been provided!" ); - } + } + if ( !foundPk ) { + throw new GenericRuntimeException( "No primary key has been provided!" ); + } - if ( stores == null ) { - // Ask router on which store(s) the table should be placed - stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); - } + if ( stores == null ) { + // Ask router on which store(s) the table should be placed + stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); + } - LogicalTable table = catalog.getLogicalRel( namespaceId ).addTable( - name, - EntityType.ENTITY, - true ); + LogicalTable table = catalog.getLogicalRel( namespaceId ).addTable( + name, + EntityType.ENTITY, + true ); - // Initially create DataPlacement containers on every store the table should be placed. - // stores.forEach( store -> catalog.getAllocRel( namespaceId ).createAlloctionTable( store.getAdapterId(), tableId ) ); + // Initially create DataPlacement containers on every store the table should be placed. + // stores.forEach( store -> catalog.getAllocRel( namespaceId ).createAllocationTable( store.getAdapterId(), tableId ) ); - // catalog.updateSnapshot(); + // catalog.updateSnapshot(); - for ( FieldInformation information : fields ) { - addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, table.id, information.position, stores, placementType ); - } + for ( FieldInformation information : fields ) { + addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, table.id, information.position, stores, placementType ); + } - for ( ConstraintInformation constraint : constraints ) { - addConstraint( namespaceId, constraint.name, constraint.type, null, table.id ); - } - Snapshot snapshot = statement.getTransaction().getSnapshot(); - LogicalTable catalogTable = snapshot.rel().getTable( table.id ); + for ( ConstraintInformation constraint : constraints ) { + addConstraint( namespaceId, constraint.name, constraint.type, null, table.id ); + } + Snapshot snapshot = statement.getTransaction().getSnapshot(); + LogicalTable catalogTable = snapshot.rel().getTable( table.id ); - // Trigger rebuild of schema; triggers schema creation on adapters - catalog.updateSnapshot(); + // Trigger rebuild of schema; triggers schema creation on adapters + catalog.updateSnapshot(); - PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id ); - for ( DataStore store : stores ) { - catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( - catalogTable.namespaceId, - store.getAdapterId(), - catalogTable.id, - property.partitionIds.get( 0 ), - PlacementType.AUTOMATIC, - DataPlacementRole.UPTODATE ); - - } + for ( DataStore store : stores ) { + catalog.getAllocRel( catalogTable.namespaceId ).addPartitionPlacement( + catalogTable.namespaceId, + store.getAdapterId(), + catalogTable.id, + property.partitionIds.get( 0 ), + PlacementType.AUTOMATIC, + DataPlacementRole.UPTODATE ); - } catch ( GenericCatalogException | UnknownColumnException | UnknownCollationException e ) { - throw new RuntimeException( e ); } + + } @@ -2264,7 +2198,7 @@ public void createTable( long namespaceId, String name, List f // addATable for ( DataStore store : stores ) { - AllocationTable alloc = catalog.getAllocRel( namespaceId ).createAlloctionTable( store.getAdapterId(), logical.id ); + AllocationTable alloc = catalog.getAllocRel( namespaceId ).createAllocationTable( store.getAdapterId(), logical.id ); int i = 0; for ( Long id : ids.values() ) { alloc = catalog.getAllocRel( namespaceId ).addColumnPlacement( alloc.id, id, PlacementType.AUTOMATIC, i ); @@ -2283,7 +2217,7 @@ public void createTable( long namespaceId, String name, List f @Override - public void createCollection( long namespaceId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { + public void createCollection( long namespaceId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) { name = adjustNameIfNeeded( name, namespaceId ); if ( assertEntityExists( namespaceId, name, ifNotExists ) ) { @@ -2295,12 +2229,8 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); } - long collectionId; - try { - collectionId = catalog.getAllocDoc( namespaceId ).addCollectionLogistics( namespaceId, name, stores, false ); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( e ); - } + long collectionId = 0; + //collectionId = catalog.getAllocDoc( namespaceId ).addCollectionLogistics( namespaceId, name, stores, false ); catalog.getLogicalDoc( namespaceId ).addCollection( collectionId, @@ -2329,14 +2259,14 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists } - private boolean assertEntityExists( long namespaceId, String name, boolean ifNotExists ) throws EntityAlreadyExistsException { + private boolean assertEntityExists( long namespaceId, String name, boolean ifNotExists ) { // Check if there is already an entity with this name if ( catalog.getSnapshot().rel().checkIfExistsEntity( name ) ) { if ( ifNotExists ) { // It is ok that there is already a table with this name because "IF NOT EXISTS" was specified return true; } else { - throw new EntityAlreadyExistsException(); + throw new GenericRuntimeException( "There already exists a entity with the name %s", name ); } } return false; @@ -2366,8 +2296,8 @@ public void removeDocumentLogistics( LogicalCollection catalogCollection, Statem @Override public void addCollectionPlacement( long namespaceId, String name, List stores, Statement statement ) { - long collectionId; - collectionId = catalog.getLogicalDoc( namespaceId ).addCollectionLogistics( name, stores, true ); + long collectionId = 0; + //collectionId = catalog.getLogicalDoc( namespaceId ).addCollectionLogistics( name, stores, true ); // Initially create DataPlacement containers on every store the table should be placed. LogicalCollection catalogCollection = catalog.getSnapshot().doc().getCollection( collectionId ); @@ -2408,7 +2338,7 @@ private void removeDocumentPlacementLogistics( LogicalCollection collection, Dat /*CatalogCollectionMapping mapping = catalog.getAllocDoc( collection.namespaceId ).getCollectionMapping( collection.id ); LogicalTable table = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.collectionId ); try { - dropDataPlacement( table, store, statement ); + dropTableAllocation( table, store, statement ); } catch ( PlacementNotExistsException | LastPlacementException e ) { throw new RuntimeException( e ); }*/ @@ -2419,7 +2349,7 @@ private void afterDocumentLogistics( DataStore store, long collectionId ) { /*CatalogCollectionMapping mapping = catalog.getLogicalRel( catalogTable.namespaceId ).getCollectionMapping( collectionId ); LogicalTable table = catalog.getLogicalRel( catalogTable.namespaceId ).getTable( mapping.collectionId ); - catalog.getLogicalRel( catalogTable.namespaceId ).createAlloctionTable( store.getAdapterId(), collectionId ); + catalog.getLogicalRel( catalogTable.namespaceId ).createAllocationTable( store.getAdapterId(), collectionId ); catalog.getLogicalRel( catalogTable.namespaceId ).addPartitionPlacement( table.namespaceId, @@ -2478,7 +2408,7 @@ private void checkDocumentModel( long namespaceId, List column @Override - public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws GenericCatalogException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { + public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws PartitionGroupNamesNotUniqueException, TransactionException { Snapshot snapshot = statement.getTransaction().getSnapshot(); LogicalColumn logicalColumn = snapshot.rel().getColumn( partitionInfo.table.id, partitionInfo.columnName ); @@ -2506,7 +2436,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Check whether partition function supports type of partition column if ( !partitionManager.supportsColumnOfType( logicalColumn.type ) ) { - throw new RuntimeException( "The partition function " + actualPartitionType + " does not support columns of type " + logicalColumn.type ); + throw new GenericRuntimeException( "The partition function %s does not support columns of type %s", actualPartitionType, logicalColumn.type ); } int numberOfPartitionGroups = partitionInfo.numberOfPartitionGroups; @@ -2527,7 +2457,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List // Validate partition setup if ( !partitionManager.validatePartitionGroupSetup( partitionInfo.qualifiers, numberOfPartitionGroups, partitionInfo.partitionGroupNames, logicalColumn ) ) { - throw new RuntimeException( "Partitioning failed for table: " + partitionInfo.table.name ); + throw new GenericRuntimeException( "Partitioning failed for table: %s", partitionInfo.table.name ); } // Loop over value to create those partitions with partitionKey to uniquelyIdentify partition @@ -2763,7 +2693,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List @Override - public void removePartitioning( LogicalTable partitionedTable, Statement statement ) throws GenericCatalogException, UnknownTableException, TransactionException, UnknownSchemaException, UnknownUserException, UnknownKeyException { + public void removePartitioning( LogicalTable partitionedTable, Statement statement ) throws TransactionException { long tableId = partitionedTable.id; Snapshot snapshot = statement.getTransaction().getSnapshot(); @@ -2895,7 +2825,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme } - private long addColumn( long namespaceId, String columnName, ColumnTypeInformation typeInformation, Collation collation, String defaultValue, long tableId, int position, List stores, PlacementType placementType ) throws GenericCatalogException, UnknownCollationException, UnknownColumnException { + private long addColumn( long namespaceId, String columnName, ColumnTypeInformation typeInformation, Collation collation, String defaultValue, long tableId, int position, List stores, PlacementType placementType ) { columnName = adjustNameIfNeeded( columnName, namespaceId ); long addedColumnId = catalog.getLogicalRel( namespaceId ).addColumn( columnName, @@ -2928,7 +2858,7 @@ private long addColumn( long namespaceId, String columnName, ColumnTypeInformati @Override - public void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnIds, long tableId ) throws UnknownColumnException, GenericCatalogException { + public void addConstraint( long namespaceId, String constraintName, ConstraintType constraintType, List columnIds, long tableId ) { if ( constraintType == ConstraintType.PRIMARY ) { catalog.getLogicalRel( namespaceId ).addPrimaryKey( tableId, columnIds ); } else if ( constraintType == ConstraintType.UNIQUE ) { @@ -3089,13 +3019,9 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) throw } // Delete the self-referencing foreign keys - try { - for ( CatalogForeignKey foreignKey : selfRefsToDelete ) { - catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); - } - } catch ( GenericCatalogException e ) { - catalog.getLogicalRel( catalogTable.namespaceId ).flagTableForDeletion( catalogTable.id, true ); - throw new PolyphenyDbContextException( "Exception while deleting self-referencing foreign key constraints.", e ); + + for ( CatalogForeignKey foreignKey : selfRefsToDelete ) { + catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } // Delete indexes of this table @@ -3106,21 +3032,17 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) throw } // Delete keys and constraints - try { - // Remove primary key - catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); - // Delete all foreign keys of the table - List foreignKeys = relSnapshot.getForeignKeys( catalogTable.id ); - for ( CatalogForeignKey foreignKey : foreignKeys ) { - catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); - } - // Delete all constraints of the table - for ( CatalogConstraint constraint : relSnapshot.getConstraints( catalogTable.id ) ) { - catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); - } - } catch ( GenericCatalogException e ) { - catalog.getLogicalRel( catalogTable.namespaceId ).flagTableForDeletion( catalogTable.id, true ); - throw new PolyphenyDbContextException( "Exception while dropping keys.", e ); + + // Remove primary key + catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); + // Delete all foreign keys of the table + List foreignKeys = relSnapshot.getForeignKeys( catalogTable.id ); + for ( CatalogForeignKey foreignKey : foreignKeys ) { + catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); + } + // Delete all constraints of the table + for ( CatalogConstraint constraint : relSnapshot.getConstraints( catalogTable.id ) ) { + catalog.getLogicalRel( catalogTable.namespaceId ).deleteConstraint( constraint.id ); } // Delete columns @@ -3170,7 +3092,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) throws D public void truncate( LogicalTable catalogTable, Statement statement ) { // Make sure that the table can be modified if ( !catalogTable.modifiable ) { - throw new RuntimeException( "Unable to modify a read-only table!" ); + throw new GenericRuntimeException( "Unable to modify a read-only table." ); } // Monitor truncate for rowCount @@ -3206,25 +3128,25 @@ private void prepareMonitoring( Statement statement, Kind kind, LogicalTable cat @Override public void dropFunction() { - throw new RuntimeException( "Not supported yet" ); + throw new GenericRuntimeException( "Not supported yet" ); } @Override public void setOption() { - throw new RuntimeException( "Not supported yet" ); + throw new GenericRuntimeException( "Not supported yet" ); } @Override public void createType() { - throw new RuntimeException( "Not supported yet" ); + throw new GenericRuntimeException( "Not supported yet" ); } @Override public void dropType() { - throw new RuntimeException( "Not supported yet" ); + throw new GenericRuntimeException( "Not supported yet" ); } } diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index f5d3d5a6d3..34cdf09b1f 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -35,9 +35,6 @@ import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; @@ -313,7 +310,7 @@ private void redistributePartitions( LogicalTable table, List partitionsFr } transaction.commit(); - } catch ( GenericCatalogException | UnknownUserException | UnknownSchemaException | TransactionException e ) { + } catch ( TransactionException e ) { log.error( "Error while reassigning new location for temperature-based partitions", e ); if ( transaction != null ) { try { diff --git a/dbms/src/main/java/org/polypheny/db/processing/AuthenticatorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/AuthenticatorImpl.java index 697ee13473..d77532fbe4 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AuthenticatorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AuthenticatorImpl.java @@ -19,7 +19,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.iface.AuthenticationException; import org.polypheny.db.iface.Authenticator; @@ -31,15 +30,12 @@ public class AuthenticatorImpl implements Authenticator { @Override public CatalogUser authenticate( final String username, final String password ) throws AuthenticationException { - try { - CatalogUser catalogUser = Catalog.getInstance().getSnapshot().getUser( username ); - if ( catalogUser.password.equals( password ) ) { - return catalogUser; - } else { - throw new AuthenticationException( "Wrong password for user '" + username + "'!" ); - } - } catch ( UnknownUserException e ) { - throw new AuthenticationException( e ); + CatalogUser catalogUser = Catalog.getInstance().getSnapshot().getUser( username ); + if ( catalogUser.password.equals( password ) ) { + return catalogUser; + } else { + throw new AuthenticationException( "Wrong password for user '" + username + "'!" ); } } + } diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 382a2255f9..3891665047 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -57,9 +57,6 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -678,7 +675,7 @@ private boolean testConstraintsValid() { } - } catch ( UnknownSchemaException | UnknownUserException | TransactionException | GenericCatalogException e ) { + } catch ( TransactionException e ) { return false; } } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index bab3ff0f54..4990ddf334 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -167,7 +167,7 @@ public AlgNode routeDocument( RoutedAlgBuilder } return alg; } else if ( alg.getDocType() == DocType.SCAN ) { - builder.push( handleDocumentScan( (DocumentScan) alg, statement, builder, null ).build() ); + builder.push( handleDocumentScan( (DocumentScan) alg, statement, builder, null ).build() ); return alg; } else if ( alg.getDocType() == DocType.VALUES ) { return alg; @@ -216,7 +216,7 @@ protected List buildSelect( AlgNode node, List handleGeneric( AlgNode node, List builders ) { if ( node.getInputs().size() == 1 ) { - builders.forEach( + log.warn( "why?" ); + /*builders.forEach( builder -> builder.replaceTop( node.copy( node.getTraitSet(), ImmutableList.of( builder.peek( 0 ) ) ) ) - ); + );*/ } else if ( node.getInputs().size() == 2 ) { // Joins, SetOperations builders.forEach( builder -> builder.replaceTop( node.copy( node.getTraitSet(), ImmutableList.of( builder.peek( 1 ), builder.peek( 0 ) ) ), 2 ) diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java index 7eb0cb45cf..9e69a25001 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java @@ -24,9 +24,6 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; @@ -98,7 +95,7 @@ public Transaction startTransaction( CatalogUser user, LogicalNamespace defaultS @Override - public Transaction startTransaction( long userId, boolean analyze, String origin, MultimediaFlavor flavor ) throws UnknownUserException, UnknownSchemaException { + public Transaction startTransaction( long userId, boolean analyze, String origin, MultimediaFlavor flavor ) { /*Catalog catalog = Catalog.getInstance(); CatalogUser catalogUser = catalog.getUser( (int) userId ); LogicalNamespace logicalNamespace = catalog.getNamespace( catalogDatabase.defaultNamespaceName ); @@ -108,7 +105,7 @@ public Transaction startTransaction( long userId, boolean analyze, String origin @Override - public Transaction startTransaction( long userId, boolean analyze, String origin ) throws GenericCatalogException, UnknownUserException, UnknownSchemaException { + public Transaction startTransaction( long userId, boolean analyze, String origin ) { throw new RuntimeException(); // return startTransaction( userId, databaseId, analyze, origin, MultimediaFlavor.DEFAULT ); } diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index c308c21118..ec59ccf56f 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -51,9 +51,6 @@ import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; @@ -276,29 +273,26 @@ public void prepareToUpdate( Long materializedId ) { Catalog catalog = Catalog.getInstance(); LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( materializedId ).unwrap( LogicalTable.class ); - try { - Transaction transaction = getTransactionManager().startTransaction( - Catalog.defaultUserId, - false, - "Materialized View" ); + Transaction transaction = getTransactionManager().startTransaction( + Catalog.defaultUserId, + false, + "Materialized View" ); - try { - Statement statement = transaction.createStatement(); - Collection> idAccessMap = new ArrayList<>(); - // Get a shared global schema lock (only DDLs acquire an exclusive global schema lock) - idAccessMap.add( Pair.of( LockManager.GLOBAL_LOCK, LockMode.SHARED ) ); - // Get locks for individual tables - EntityAccessMap accessMap = new EntityAccessMap( ((CatalogMaterializedView) catalogTable).getDefinition(), new HashMap<>() ); - idAccessMap.addAll( accessMap.getAccessedEntityPair() ); - LockManager.INSTANCE.lock( idAccessMap, (TransactionImpl) statement.getTransaction() ); - } catch ( DeadlockException e ) { - throw new RuntimeException( "DeadLock while locking for materialized view update", e ); - } - updateData( transaction, materializedId ); - commitTransaction( transaction ); - } catch ( GenericCatalogException | UnknownUserException | UnknownSchemaException e ) { - throw new RuntimeException( "Not possible to create Transaction for Materialized View update", e ); + try { + Statement statement = transaction.createStatement(); + Collection> idAccessMap = new ArrayList<>(); + // Get a shared global schema lock (only DDLs acquire an exclusive global schema lock) + idAccessMap.add( Pair.of( LockManager.GLOBAL_LOCK, LockMode.SHARED ) ); + // Get locks for individual tables + EntityAccessMap accessMap = new EntityAccessMap( ((CatalogMaterializedView) catalogTable).getDefinition(), new HashMap<>() ); + idAccessMap.addAll( accessMap.getAccessedEntityPair() ); + LockManager.INSTANCE.lock( idAccessMap, (TransactionImpl) statement.getTransaction() ); + } catch ( DeadlockException e ) { + throw new RuntimeException( "DeadLock while locking for materialized view update", e ); } + updateData( transaction, materializedId ); + commitTransaction( transaction ); + updateMaterializedTime( materializedId ); } diff --git a/dbms/src/test/java/org/polypheny/db/TestHelper.java b/dbms/src/test/java/org/polypheny/db/TestHelper.java index 17c9a727b7..06e956e9f4 100644 --- a/dbms/src/test/java/org/polypheny/db/TestHelper.java +++ b/dbms/src/test/java/org/polypheny/db/TestHelper.java @@ -40,7 +40,6 @@ import java.util.Properties; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; -import java.util.function.Consumer; import java.util.stream.Collectors; import kong.unirest.HttpRequest; import kong.unirest.HttpResponse; @@ -51,9 +50,6 @@ import org.junit.AfterClass; import org.junit.Assert; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.runtime.functions.Functions; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionManager; @@ -82,17 +78,13 @@ private TestHelper() { log.info( "Starting Polypheny-DB..." ); Runnable runnable = () -> { - try { - polyphenyDb.testMode = true; - String defaultStoreName = System.getProperty( "store.default" ); - if ( defaultStoreName != null ) { - polyphenyDb.defaultStoreName = defaultStoreName; - } - // polyphenyDb.resetCatalog = true; - polyphenyDb.runPolyphenyDb(); - } catch ( GenericCatalogException e ) { - log.error( "Exception while starting Polypheny-DB", e ); + polyphenyDb.testMode = true; + String defaultStoreName = System.getProperty( "store.default" ); + if ( defaultStoreName != null ) { + polyphenyDb.defaultStoreName = defaultStoreName; } + // polyphenyDb.resetCatalog = true; + polyphenyDb.runPolyphenyDb(); }; Thread thread = new Thread( runnable ); thread.start(); @@ -128,11 +120,7 @@ private TestHelper() { public Transaction getTransaction() { - try { - return transactionManager.startTransaction( Catalog.defaultUserId, true, "Test Helper" ); - } catch ( GenericCatalogException | UnknownUserException | UnknownSchemaException e ) { - throw new RuntimeException( "Error while starting transaction", e ); - } + return transactionManager.startTransaction( Catalog.defaultUserId, true, "Test Helper" ); } diff --git a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java index ab87956d3f..5570ee246a 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java @@ -34,7 +34,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.excluded.CassandraExcluded; @@ -54,7 +53,7 @@ public void removeCollection() { @Test - public void addCollectionTest() throws UnknownSchemaException { + public void addCollectionTest() { Snapshot snapshot = Catalog.snapshot(); String name = "testCollection"; @@ -79,7 +78,7 @@ public void addCollectionTest() throws UnknownSchemaException { @Test - public void addPlacementTest() throws UnknownSchemaException, SQLException { + public void addPlacementTest() throws SQLException { Snapshot snapshot = Catalog.snapshot(); String placement = "store1"; @@ -112,7 +111,7 @@ public void addPlacementTest() throws UnknownSchemaException, SQLException { @Test - public void deletePlacementTest() throws UnknownSchemaException, SQLException { + public void deletePlacementTest() throws SQLException { Snapshot snapshot = Catalog.snapshot(); String placement = "store1"; diff --git a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java index 74ca539d17..4889966de6 100644 --- a/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java +++ b/dbms/src/test/java/org/polypheny/db/statistics/StatisticsTest.java @@ -31,10 +31,7 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.snapshot.Snapshot; @@ -253,19 +250,16 @@ public void testSimpleRowCount() throws SQLException { REGION_TEST_DATA ); waiter.await( 20, TimeUnit.SECONDS ); - try { - Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalTable catalogTableNation = snapshot.rel().getTable( "statisticschema", "nation" ); - LogicalTable catalogTableRegion = snapshot.rel().getTable( "statisticschema", "region" ); - - Integer rowCountNation = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); - Integer rowCountRegion = StatisticsManager.getInstance().rowCountPerTable( catalogTableRegion.id ); - - Assert.assertEquals( Integer.valueOf( 3 ), rowCountNation ); - Assert.assertEquals( Integer.valueOf( 2 ), rowCountRegion ); - } catch ( UnknownTableException | UnknownSchemaException e ) { - log.error( "Caught exception test", e ); - } + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + LogicalTable catalogTableNation = snapshot.rel().getTable( "statisticschema", "nation" ); + LogicalTable catalogTableRegion = snapshot.rel().getTable( "statisticschema", "region" ); + + Integer rowCountNation = StatisticsManager.getInstance().rowCountPerTable( catalogTableNation.id ); + Integer rowCountRegion = StatisticsManager.getInstance().rowCountPerTable( catalogTableRegion.id ); + + Assert.assertEquals( Integer.valueOf( 3 ), rowCountNation ); + Assert.assertEquals( Integer.valueOf( 2 ), rowCountRegion ); + connection.commit(); } catch ( InterruptedException e ) { log.error( "Caught exception test", e ); @@ -335,7 +329,7 @@ private void assertStatisticsConvertTo( int maxSeconds, int target ) { log.warn( "Collection was already removed from the catalog, therefore the count will be null, which is correct" ); } - } catch ( UnknownTableException | UnknownSchemaException | InterruptedException e ) { + } catch ( InterruptedException e ) { log.error( "Caught exception test", e ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 8798a80567..62a4831074 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -22,7 +22,6 @@ import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import org.apache.calcite.avatica.Meta.Pat; import org.polypheny.db.PolyImplementation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; @@ -31,8 +30,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; @@ -44,7 +41,6 @@ import org.polypheny.db.transaction.Transaction.MultimediaFlavor; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.transaction.TransactionManager; -import org.polypheny.db.util.Pair; @Slf4j @@ -182,11 +178,7 @@ private StatisticResult executeColStat( AlgNode node, Transaction transaction, S private Transaction getTransaction() { - try { - return transactionManager.startTransaction( userId, false, "Statistics", MultimediaFlavor.FILE ); - } catch ( UnknownUserException | UnknownSchemaException e ) { - throw new RuntimeException( "Error while starting transaction", e ); - } + return transactionManager.startTransaction( userId, false, "Statistics", MultimediaFlavor.FILE ); } // ----------------------------------------------------------------------- diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java index 39efd63ce2..0571628034 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java @@ -20,10 +20,6 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.cql.exception.UnknownIndexException; /** @@ -52,17 +48,12 @@ public ColumnIndex( } - public static ColumnIndex createIndex( String inDatabase, String schemaName, String tableName, String columnName ) throws UnknownIndexException { - try { - log.debug( "Creating ColumnIndex." ); - Catalog catalog = Catalog.getInstance(); - LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schemaName ); - LogicalColumn column = catalog.getSnapshot().rel().getColumn( tableName, columnName ); - return new ColumnIndex( column, schemaName, tableName, columnName ); - } catch ( UnknownTableException | UnknownSchemaException | UnknownColumnException e ) { - log.error( "Cannot find a underlying column for the specified column name: {}.{}.{}.", schemaName, tableName, columnName, e ); - throw new UnknownIndexException( "Cannot find a underlying column for the specified column name: " + schemaName + "." + tableName + "." + columnName + "." ); - } + public static ColumnIndex createIndex( String inDatabase, String schemaName, String tableName, String columnName ) { + log.debug( "Creating ColumnIndex." ); + Catalog catalog = Catalog.getInstance(); + LogicalNamespace namespace = catalog.getSnapshot().getNamespace( schemaName ); + LogicalColumn column = catalog.getSnapshot().rel().getColumn( tableName, columnName ); + return new ColumnIndex( column, schemaName, tableName, columnName ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java index 1cb1dd91b6..df45ef56e4 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java @@ -62,6 +62,7 @@ public abstract class CsvTable extends PhysicalTable { allocationTable.name, allocationTable.getNamespaceName(), allocationTable.getColumnNames(), + allocationTable.getColumnNames(), allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> AlgDataTypeFactory.DEFAULT.createPolyType( c.type ) ) ), allocationTable.getColumnOrder() ); this.source = source; diff --git a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java index 786e9a96db..0d0ccc9017 100644 --- a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java +++ b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreQueryProcessor.java @@ -27,8 +27,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.Authenticator; import org.polypheny.db.languages.QueryLanguage; @@ -64,11 +62,7 @@ public ExploreQueryProcessor( final TransactionManager transactionManager, Authe private Transaction getTransaction() { - try { - return transactionManager.startTransaction( userId, false, "Explore-by-Example", MultimediaFlavor.FILE ); - } catch ( UnknownUserException | UnknownSchemaException e ) { - throw new RuntimeException( "Error while starting transaction", e ); - } + return transactionManager.startTransaction( userId, false, "Explore-by-Example", MultimediaFlavor.FILE ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 84e56ec413..17cd4e35cb 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -114,6 +114,7 @@ public JdbcEntity( getPhysicalTableName( jdbcSchema.adapter, logicalTable, allocationTable ), getPhysicalSchemaName( jdbcSchema.adapter ), getPhysicalColumnNames( jdbcSchema.adapter, allocationTable ), + allocationTable.getColumnNames(), allocationTable.getColumnTypes(), allocationTable.getColumnOrder() ); this.logical = logicalTable; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java index 001ff15d8e..68df25d64e 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateCollection.java @@ -21,7 +21,6 @@ import org.bson.BsonDocument; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -67,21 +66,17 @@ public void execute( Context context, Statement statement, QueryParameters param PlacementType placementType = PlacementType.AUTOMATIC; - try { - List dataStores = stores - .stream() - .map( store -> (DataStore) adapterManager.getAdapter( store ) ) - .collect( Collectors.toList() ); - DdlManager.getInstance().createCollection( - schemaId, - name, - true, - dataStores.size() == 0 ? null : dataStores, - placementType, - statement ); - } catch ( EntityAlreadyExistsException e ) { - throw new RuntimeException( "The generation of the collection was not possible, due to: " + e.getMessage() ); - } + List dataStores = stores + .stream() + .map( store -> (DataStore) adapterManager.getAdapter( store ) ) + .collect( Collectors.toList() ); + DdlManager.getInstance().createCollection( + schemaId, + name, + true, + dataStores.size() == 0 ? null : dataStores, + placementType, + statement ); } } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java index aa63140e92..16967eeb27 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateView.java @@ -21,9 +21,6 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -70,21 +67,18 @@ public void execute( Context context, Statement statement, QueryParameters param AlgNode algNode = algRoot.alg; AlgCollation algCollation = algRoot.collation; - try { - DdlManager.getInstance().createView( - name, - schemaId, - algNode, - algCollation, - true, - statement, - placementType, - algRoot.alg.getRowType().getFieldNames(), - buildQuery(), - QueryLanguage.from( "mongo" ) ); - } catch ( EntityAlreadyExistsException | GenericCatalogException | UnknownColumnException e ) { - throw new RuntimeException( e ); - } // we just added the table/column, so it has to exist, or we have an internal problem + DdlManager.getInstance().createView( + name, + schemaId, + algNode, + algCollation, + true, + statement, + placementType, + algRoot.alg.getRowType().getFieldNames(), + buildQuery(), + QueryLanguage.from( "mongo" ) ); + } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index 18108eba22..076d36ae4a 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -18,9 +18,7 @@ import java.util.List; import java.util.Optional; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -76,7 +74,7 @@ public void execute( Context context, Statement statement, QueryParameters param } DdlManager.getInstance().renameTable( table.get(), newName, statement ); - } catch ( DdlOnSourceException | EntityAlreadyExistsException e ) { + } catch ( DdlOnSourceException e ) { throw new RuntimeException( "The rename was not successful, due to an error: " + e.getMessage() ); } } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java index 948377dabd..4809322fa6 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlUseDatabase.java @@ -18,7 +18,6 @@ import lombok.Getter; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.exceptions.NamespaceAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -42,11 +41,7 @@ public MqlUseDatabase( ParserPos pos, String database ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - try { - DdlManager.getInstance().createNamespace( this.database, NamespaceType.DOCUMENT, true, false ); - } catch ( NamespaceAlreadyExistsException e ) { - throw new RuntimeException( "The schema creation failed" ); - } + DdlManager.getInstance().createNamespace( this.database, NamespaceType.DOCUMENT, true, false ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index aecaf15041..da2f954d89 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -25,6 +25,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -51,7 +52,6 @@ import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.catalog.logical.DocumentCatalog; import org.polypheny.db.catalog.logical.GraphCatalog; import org.polypheny.db.catalog.logical.RelationalCatalog; @@ -149,7 +149,7 @@ public PolyCatalog( /** * Fills the catalog database with default data, skips if data is already inserted */ - private void insertDefaultData() throws UnknownAdapterException { + private void insertDefaultData() { ////////////// // init users @@ -270,7 +270,7 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String updateSnapshot(); AllocationEntity alloc; if ( !getSnapshot().alloc().adapterHasPlacement( csv.id, table.id ) ) { - alloc = getAllocRel( table.namespaceId ).createAlloctionTable( csv.id, table.id ); + alloc = getAllocRel( table.namespaceId ).createAllocationTable( csv.id, table.id ); } else { alloc = getSnapshot().alloc().getAllocation( csv.id, table.id ); } @@ -301,8 +301,8 @@ private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, Strin @Override public void updateSnapshot() { // reset physical catalogs - // Set keys = this.physicalCatalogs.keySet(); - // keys.forEach( k -> this.physicalCatalogs.replace( k, new PolyPhysicalCatalog() ) ); + Set keys = this.physicalCatalogs.keySet(); + keys.forEach( k -> this.physicalCatalogs.replace( k, new PolyPhysicalCatalog() ) ); // generate new physical entities, atm only relational this.allocationCatalogs.forEach( ( k, v ) -> { @@ -312,9 +312,11 @@ public void updateSnapshot() { if ( adapter.getCurrentSchema() == null || adapter.getCurrentSchema().getId() != v2.namespaceId ) { adapter.createNewSchema( getSnapshot(), v2.name, v2.namespaceId ); - getPhysical( v2.namespaceId ).addNamespace( adapter.getAdapterId(), adapter.getCurrentSchema() ); } + // re-add physical namespace, we could check first, but not necessary + getPhysical( v2.namespaceId ).addNamespace( adapter.getAdapterId(), adapter.getCurrentSchema() ); + LogicalTable table = getSnapshot().getLogicalEntity( v2.logicalId ).unwrap( LogicalTable.class ); List physicals = AdapterManager.getInstance().getAdapter( v2.adapterId ).createAdapterTable( idBuilder, table, v2 ); getPhysical( table.namespaceId ).addEntities( physicals ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java index 6f5c4ad8b2..5518adf299 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java @@ -17,16 +17,12 @@ package org.polypheny.db.catalog.allocation; import io.activej.serializer.BinarySerializer; -import java.util.List; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; -import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.PolyCatalog; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.PlacementType; public class PolyAllocDocCatalog implements Serializable, AllocationDocumentCatalog { @@ -53,12 +49,6 @@ public PolyAllocDocCatalog copy() { } - @Override - public long addCollectionLogistics( long schemaId, String name, List stores, boolean onlyPlacement ) throws GenericCatalogException { - return 0; - } - - @Override public void addCollectionPlacement( long namespaceId, long adapterId, long id, PlacementType placementType ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index 09c8af6705..e0fef8b775 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -27,10 +27,8 @@ import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; @@ -82,14 +80,12 @@ public AllocationTable addColumnPlacement( long allocationId, long columnId, Pla } - @Override public void deleteColumnPlacement( long allocationId, long columnId, boolean columnOnly ) { tables.put( allocationId, tables.get( allocationId ).withRemovedColumn( columnId ) ); } - @Override public void updateColumnPlacementType( long adapterId, long columnId, PlacementType placementType ) { @@ -115,7 +111,7 @@ public void updateColumnPlacementPhysicalNames( long adapterId, long columnId, S @Override - public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { + public long addPartitionGroup( long tableId, String partitionGroupName, long schemaId, PartitionType partitionType, long numberOfInternalPartitions, List effectivePartitionGroupQualifier, boolean isUnbound ) { return 0; } @@ -127,7 +123,7 @@ public void deletePartitionGroup( long tableId, long schemaId, long partitionGro @Override - public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) throws GenericCatalogException { + public long addPartition( long tableId, long schemaId, long partitionGroupId, List effectivePartitionGroupQualifier, boolean isUnbound ) { return 0; } @@ -150,30 +146,12 @@ public void mergeTable( long tableId ) { } - @Override - public void updateTablePartitionProperties( long tableId, PartitionProperty partitionProperty ) { - - } - - @Override public void updatePartitionGroup( long partitionGroupId, List partitionIds ) { } - @Override - public void addPartitionToGroup( long partitionGroupId, Long partitionId ) { - - } - - - @Override - public void removePartitionFromGroup( long partitionGroupId, Long partitionId ) { - - } - - @Override public void updatePartition( long partitionId, Long partitionGroupId ) { @@ -193,7 +171,7 @@ public void addPartitionPlacement( long namespaceId, long adapterId, long tableI @Override - public AllocationTable createAlloctionTable( long adapterId, long tableId ) { + public AllocationTable createAllocationTable( long adapterId, long tableId ) { long id = idBuilder.getNewAllocId(); AllocationTable table = new AllocationTable( id, tableId, namespace.id, adapterId, List.of() ); tables.put( id, table ); @@ -201,18 +179,6 @@ public AllocationTable createAlloctionTable( long adapterId, long tableId ) { } - @Override - public CatalogDataPlacement addDataPlacementIfNotExists( long adapterId, long tableId ) { - return null; - } - - - @Override - public void modifyDataPlacement( long adapterId, long tableId, CatalogDataPlacement catalogDataPlacement ) { - - } - - @Override public void deleteAllocation( long adapterId, long tableId ) { @@ -225,48 +191,6 @@ public void deleteAllocation( long allocId ) { } - @Override - public void addSingleDataPlacementToTable( long adapterId, long tableId ) { - - } - - - @Override - public void removeSingleDataPlacementFromTable( long adapterId, long tableId ) { - - } - - - @Override - public void updateDataPlacementsOnTable( long tableId, List newDataPlacements ) { - - } - - - @Override - public void addColumnsToDataPlacement( long adapterId, long tableId, List columnIds ) { - - } - - - @Override - public void removeColumnsFromDataPlacement( long adapterId, long tableId, List columnIds ) { - - } - - - @Override - public void addPartitionsToDataPlacement( long adapterId, long tableId, List partitionIds ) { - - } - - - @Override - public void removePartitionsFromDataPlacement( long adapterId, long tableId, List partitionIds ) { - - } - - @Override public void updateDataPlacement( long adapterId, long tableId, List columnIds, List partitionIds ) { @@ -278,16 +202,4 @@ public void deletePartitionPlacement( long adapterId, long partitionId ) { } - - @Override - public void addTableToPeriodicProcessing( long tableId ) { - - } - - - @Override - public void removeTableFromPeriodicProcessing( long tableId ) { - - } - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java index c7ec47bc76..5270b4ff6a 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java @@ -19,7 +19,6 @@ import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; -import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Builder; @@ -27,9 +26,7 @@ import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; -import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.PolyCatalog; import org.polypheny.db.catalog.PusherMap; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalCatalog; @@ -91,11 +88,6 @@ public void deleteCollection( long id ) { } - @Override - public long addCollectionLogistics( String name, List stores, boolean placementOnly ) { - return 0; - } - @Override public LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java index b4d7cd3f41..ab1e8ae987 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java @@ -27,15 +27,11 @@ import lombok.experimental.SuperBuilder; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.PolyCatalog; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; @Value @SuperBuilder(toBuilder = true) @@ -102,10 +98,4 @@ public void deleteGraph( long id ) { } - - @Override - public void addGraphLogistics( long id, List stores, boolean onlyPlacement ) throws GenericCatalogException, UnknownTableException, UnknownColumnException { - - } - } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 842a8ac190..8f889a7c2e 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -55,7 +55,7 @@ import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.EntityType; @@ -169,7 +169,7 @@ public long addView( String name, long namespaceId, EntityType entityType, boole @Override - public long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) throws GenericCatalogException { + public long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) { throw new NotImplementedException(); } @@ -198,7 +198,7 @@ public void setPrimaryKey( long tableId, Long keyId ) { @Override - public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ) throws GenericCatalogException { + public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ) { long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); if ( unique ) { // TODO: Check if the current values are unique @@ -227,28 +227,20 @@ private long getOrAddKey( long tableId, List columnIds, EnforcementTime en if ( keyId != null ) { return keyId; } - try { - return addKey( tableId, columnIds, enforcementTime ); - } catch ( GenericCatalogException e ) { - throw new RuntimeException( e ); - } + return addKey( tableId, columnIds, enforcementTime ); } - private long addKey( long tableId, List columnIds, EnforcementTime enforcementTime ) throws GenericCatalogException { - try { - LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); - long id = idBuilder.getNewKeyId(); - CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); - synchronized ( this ) { - keys.put( id, key ); - keyColumns.put( columnIds.stream().mapToLong( Long::longValue ).toArray(), id ); - } - listeners.firePropertyChange( "key", null, key ); - return id; - } catch ( NullPointerException e ) { - throw new GenericCatalogException( e ); + private long addKey( long tableId, List columnIds, EnforcementTime enforcementTime ) { + LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); + long id = idBuilder.getNewKeyId(); + CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); + synchronized ( this ) { + keys.put( id, key ); + keyColumns.put( columnIds.stream().mapToLong( Long::longValue ).toArray(), id ); } + listeners.firePropertyChange( "key", null, key ); + return id; } @@ -286,7 +278,7 @@ public void setColumnPosition( long columnId, int position ) { @Override - public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality ) throws GenericCatalogException { + public void setColumnType( long columnId, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality ) { if ( scale != null && scale > length ) { throw new RuntimeException( "Invalid scale! Scale can not be larger than length." ); } @@ -296,7 +288,7 @@ public void setColumnType( long columnId, PolyType type, PolyType collectionsTyp @Override - public void setNullable( long columnId, boolean nullable ) throws GenericCatalogException { + public void setNullable( long columnId, boolean nullable ) { columns.put( columnId, columns.get( columnId ).toBuilder().nullable( nullable ).build() ); } @@ -326,9 +318,9 @@ public void deleteDefaultValue( long columnId ) { @Override - public void addPrimaryKey( long tableId, List columnIds ) throws GenericCatalogException { + public void addPrimaryKey( long tableId, List columnIds ) { if ( columnIds.stream().anyMatch( id -> columns.get( id ).nullable ) ) { - throw new GenericCatalogException( "Primary key is not allowed to use nullable columns." ); + throw new GenericRuntimeException( "Primary key is not allowed to use nullable columns." ); } // TODO: Check if the current values are unique @@ -340,7 +332,7 @@ public void addPrimaryKey( long tableId, List columnIds ) throws GenericCa // CatalogCombinedKey combinedKey = getCombinedKey( table.primaryKey ); if ( getKeyUniqueCount( table.primaryKey ) == 1 && isForeignKey( table.primaryKey ) ) { // This primary key is the only constraint for the uniqueness of this key. - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key, first drop the foreign keys or create a unique constraint." ); + throw new GenericRuntimeException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key, first drop the foreign keys or create a unique constraint." ); } synchronized ( this ) { setPrimaryKey( tableId, null ); @@ -415,7 +407,7 @@ private int getKeyUniqueCount( long keyId ) { @Override - public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws GenericCatalogException { + public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) { LogicalTable table = tables.get( tableId ); List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); @@ -427,7 +419,7 @@ public void addForeignKey( long tableId, List columnIds, long referencesTa LogicalColumn referencingColumn = columns.get( columnIds.get( i++ ) ); LogicalColumn referencedColumn = columns.get( referencedColumnId ); if ( referencedColumn.type != referencingColumn.type ) { - throw new GenericCatalogException( "The data type of the referenced columns does not match the data type of the referencing column: " + referencingColumn.type.name() + " != " + referencedColumn.type ); + throw new GenericRuntimeException( "The data type of the referenced columns does not match the data type of the referencing column: %s != %s", referencingColumn.type.name(), referencedColumn.type ); } } // TODO same keys for key and foreign key @@ -457,14 +449,14 @@ public void addForeignKey( long tableId, List columnIds, long referencesTa @Override - public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) throws GenericCatalogException { + public void addUniqueConstraint( long tableId, String constraintName, List columnIds ) { long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); // Check if there is already a unique constraint List catalogConstraints = constraints.values().stream() .filter( c -> c.keyId == keyId && c.type == ConstraintType.UNIQUE ) .collect( Collectors.toList() ); if ( catalogConstraints.size() > 0 ) { - throw new GenericCatalogException( "There is already a unique constraint!" ); + throw new GenericRuntimeException( "There is already a unique constraint!" ); } long id = idBuilder.getNewConstraintId(); synchronized ( this ) { @@ -474,7 +466,7 @@ public void addUniqueConstraint( long tableId, String constraintName, List @Override - public void deletePrimaryKey( long tableId ) throws GenericCatalogException { + public void deletePrimaryKey( long tableId ) { LogicalTable table = tables.get( tableId ); // TODO: Check if the currently stored values are unique @@ -483,7 +475,7 @@ public void deletePrimaryKey( long tableId ) throws GenericCatalogException { // CatalogCombinedKey key = getCombinedKey( table.primaryKey ); if ( isForeignKey( table.primaryKey ) ) { if ( getKeyUniqueCount( table.primaryKey ) < 2 ) { - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key either drop the foreign key or create a unique constraint." ); + throw new GenericRuntimeException( "This key is referenced by at least one foreign key which requires this key to be unique. To drop this primary key either drop the foreign key or create a unique constraint." ); } } @@ -494,7 +486,7 @@ public void deletePrimaryKey( long tableId ) throws GenericCatalogException { @Override - public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException { + public void deleteForeignKey( long foreignKeyId ) { CatalogForeignKey catalogForeignKey = (CatalogForeignKey) keys.get( foreignKeyId ); synchronized ( this ) { keys.remove( catalogForeignKey.id ); @@ -504,12 +496,12 @@ public void deleteForeignKey( long foreignKeyId ) throws GenericCatalogException @Override - public void deleteConstraint( long constraintId ) throws GenericCatalogException { + public void deleteConstraint( long constraintId ) { CatalogConstraint catalogConstraint = Objects.requireNonNull( constraints.get( constraintId ) ); //CatalogCombinedKey key = getCombinedKey( catalogConstraint.keyId ); if ( catalogConstraint.type == ConstraintType.UNIQUE && isForeignKey( catalogConstraint.keyId ) ) { if ( getKeyUniqueCount( catalogConstraint.keyId ) < 2 ) { - throw new GenericCatalogException( "This key is referenced by at least one foreign key which requires this key to be unique. Unable to drop unique constraint." ); + throw new GenericRuntimeException( "This key is referenced by at least one foreign key which requires this key to be unique. Unable to drop unique constraint." ); } } synchronized ( this ) { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java index 86dded6330..1dfe8f66d0 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/physical/PolyPhysicalCatalog.java @@ -50,12 +50,6 @@ PhysicalEntity getPhysicalEntity( long id ) { } - @Override - public void addPhysicalEntity( PhysicalEntity physicalEntity ) { - physicals.put( physicalEntity.id, physicalEntity ); - } - - @Override public void addEntities( List physicals ) { physicals.forEach( p -> this.physicals.put( p.id, p ) ); diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 3280f24da2..47c8a47710 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -47,9 +47,6 @@ import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.iface.AuthenticationException; import org.polypheny.db.iface.Authenticator; @@ -328,15 +325,8 @@ List generateRequestColumnsWithProject( String projectionString, Matcher matcher = PROJECTION_ENTRY_PATTERN.matcher( projectionToParse ); if ( matcher.find() ) { String columnName = matcher.group( "column" ); - LogicalColumn logicalColumn; - - try { - logicalColumn = this.getCatalogColumnFromString( columnName ); - log.debug( "Fetched catalog column for projection key: {}.", columnName ); - } catch ( UnknownColumnException | UnknownSchemaException | UnknownTableException e ) { - log.warn( "Unable to fetch column: {}.", columnName, e ); - throw new ParserException( ParserErrorCode.PROJECTION_MALFORMED, columnName ); - } + LogicalColumn logicalColumn = this.getCatalogColumnFromString( columnName ); + log.debug( "Fetched catalog column for projection key: {}.", columnName ); if ( !validColumns.contains( logicalColumn.id ) ) { log.warn( "Column isn't valid. Column: {}.", columnName ); @@ -408,7 +398,7 @@ AggFunction decodeAggregateFunction( String function ) { } - private LogicalColumn getCatalogColumnFromString( String name ) throws ParserException, UnknownColumnException, UnknownSchemaException, UnknownTableException { + private LogicalColumn getCatalogColumnFromString( String name ) throws ParserException { String[] splitString = name.split( "\\." ); if ( splitString.length != 3 ) { log.warn( "Column name is not 3 fields long. Got: {}", name ); diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java index 927150bdf2..4766874347 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/Rest.java @@ -47,10 +47,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; @@ -549,11 +546,7 @@ AlgBuilder sort( AlgBuilder algBuilder, RexBuilder rexBuilder, List getCatalogColumn( catalogTable.namespaceId, catalogTable.id, (SqlIdentifier) c ).id ) - .collect( Collectors.toList() ), - partitionGroupList, - partitionGroupNamesList.stream() - .map( SqlIdentifier::toString ) - .collect( Collectors.toList() ), - storeInstance, - statement ); - } catch ( PlacementNotExistsException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.placementDoesNotExist( storeName.getSimple(), catalogTable.name ) ); - } catch ( IndexPreventsRemovalException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.indexPreventsRemovalOfPlacement( e.getIndexName(), e.getColumnName() ) ); - } catch ( LastPlacementException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.onlyOnePlacementLeft() ); - } + DdlManager.getInstance().modifyDataPlacement( + catalogTable, + columnList.getList().stream() + .map( c -> getCatalogColumn( catalogTable.namespaceId, catalogTable.id, (SqlIdentifier) c ).id ) + .collect( Collectors.toList() ), + partitionGroupList, + partitionGroupNamesList.stream() + .map( SqlIdentifier::toString ) + .collect( Collectors.toList() ), + storeInstance, + statement ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java index ad86afa64f..1b969f2fec 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java @@ -17,18 +17,12 @@ package org.polypheny.db.sql.language.ddl.altertable; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.ColumnNotExistsException; -import org.polypheny.db.ddl.exception.PlacementAlreadyExistsException; -import org.polypheny.db.ddl.exception.PlacementNotExistsException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; @@ -38,7 +32,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -101,29 +94,11 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); } - try { - DdlManager.getInstance().addColumnPlacement( - catalogTable, - columnName.getSimple(), - storeInstance, - statement ); - } catch ( UnknownAdapterException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.unknownAdapter( storeName.getSimple() ) ); - } catch ( PlacementNotExistsException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.placementDoesNotExist( storeName.getSimple(), catalogTable.name ) ); - } catch ( PlacementAlreadyExistsException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.placementAlreadyExists( catalogTable.name, storeName.getSimple() ) ); - } catch ( ColumnNotExistsException e ) { - throw CoreUtil.newContextException( - columnName.getPos(), - RESOURCE.columnNotFoundInTable( e.columnName, e.tableName ) ); - } + DdlManager.getInstance().addColumnPlacement( + catalogTable, + columnName.getSimple(), + storeInstance, + statement ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java index 9477a13d83..e5b7a735a7 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java @@ -17,20 +17,12 @@ package org.polypheny.db.sql.language.ddl.altertable; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.ColumnNotExistsException; -import org.polypheny.db.ddl.exception.IndexPreventsRemovalException; -import org.polypheny.db.ddl.exception.LastPlacementException; -import org.polypheny.db.ddl.exception.PlacementIsPrimaryException; -import org.polypheny.db.ddl.exception.PlacementNotExistsException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; @@ -40,7 +32,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -99,38 +90,11 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); } - try { - DdlManager.getInstance().dropColumnPlacement( - catalogTable, - columnName.getSimple(), - storeInstance, - statement ); - } catch ( UnknownAdapterException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.unknownAdapter( storeName.getSimple() ) ); - } catch ( PlacementNotExistsException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.placementDoesNotExist( storeName.getSimple(), catalogTable.name ) ); - } catch ( IndexPreventsRemovalException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.indexPreventsRemovalOfPlacement( e.getIndexName(), columnName.getSimple() ) ); - } catch ( LastPlacementException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.onlyOnePlacementLeft() ); - } catch ( PlacementIsPrimaryException e ) { - throw CoreUtil.newContextException( - storeName.getPos(), - RESOURCE.placementIsPrimaryKey( columnName.getSimple() ) ); - } catch ( ColumnNotExistsException e ) { - throw CoreUtil.newContextException( - columnName.getPos(), - RESOURCE.columnNotFoundInTable( e.columnName, e.tableName ) - ); - } + DdlManager.getInstance().dropColumnPlacement( + catalogTable, + columnName.getSimple(), + storeInstance, + statement ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java index 1b91a26760..6f85e3d1dd 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java @@ -17,13 +17,10 @@ package org.polypheny.db.sql.language.ddl.altertable; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -34,7 +31,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -89,11 +85,8 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "No FQDN allowed here: " + owner.toString() ); } - try { - DdlManager.getInstance().alterTableOwner( catalogTable, owner.getSimple() ); - } catch ( UnknownUserException e ) { - throw CoreUtil.newContextException( owner.getPos(), RESOURCE.userNotFound( owner.getSimple() ) ); - } + DdlManager.getInstance().alterTableOwner( catalogTable, owner.getSimple() ); + } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java index 4a7bcf6832..90a65e944b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java @@ -17,12 +17,9 @@ package org.polypheny.db.sql.language.ddl.altertable; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -33,7 +30,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -81,14 +77,10 @@ public void execute( Context context, Statement statement, QueryParameters param LogicalTable table = getCatalogTable( context, oldName ); if ( newName.names.size() != 1 ) { - throw new RuntimeException( "No FQDN allowed here: " + newName.toString() ); + throw new RuntimeException( "No FQDN allowed here: " + newName ); } - try { - DdlManager.getInstance().renameTable( table, newName.getSimple(), statement ); - } catch ( EntityAlreadyExistsException e ) { - throw CoreUtil.newContextException( newName.getPos(), RESOURCE.tableExists( newName.getSimple() ) ); - } + DdlManager.getInstance().renameTable( table, newName.getSimple(), statement ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java index 5a715c6c9c..01e3dfd149 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java @@ -17,14 +17,10 @@ package org.polypheny.db.sql.language.ddl.altertable; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; @@ -34,7 +30,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -85,13 +80,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { public void execute( Context context, Statement statement, QueryParameters parameters ) { LogicalTable catalogTable = getCatalogTable( context, table ); - try { - DdlManager.getInstance().renameColumn( catalogTable, columnOldName.getSimple(), columnNewName.getSimple(), statement ); - } catch ( ColumnAlreadyExistsException e ) { - throw CoreUtil.newContextException( columnNewName.getPos(), RESOURCE.columnExists( columnNewName.getSimple() ) ); - } catch ( ColumnNotExistsException e ) { - throw CoreUtil.newContextException( columnOldName.getPos(), RESOURCE.columnNotFoundInTable( e.columnName, e.tableName ) ); - } + DdlManager.getInstance().renameColumn( catalogTable, columnOldName.getSimple(), columnNewName.getSimple(), statement ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java index 17ab88d7ca..1788eadbd3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java @@ -16,13 +16,11 @@ package org.polypheny.db.sql.language.ddl.alterview; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -33,7 +31,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterView; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; /** @@ -83,17 +80,14 @@ public void execute( Context context, Statement statement, QueryParameters param LogicalTable catalogTable = getCatalogTable( context, oldName ); if ( catalogTable.entityType != EntityType.VIEW ) { - throw new RuntimeException( "Not Possible to use ALTER VIEW because " + catalogTable.name + " is not a View." ); + throw new GenericRuntimeException( "Not Possible to use ALTER VIEW because %s is not a View.", catalogTable.name ); } if ( newName.names.size() != 1 ) { - throw new RuntimeException( "No FQDN allowed here: " + newName.toString() ); - } - try { - DdlManager.getInstance().renameTable( catalogTable, newName.getSimple(), statement ); - } catch ( EntityAlreadyExistsException e ) { - throw CoreUtil.newContextException( oldName.getPos(), RESOURCE.schemaExists( newName.getSimple() ) ); + throw new GenericRuntimeException( "No FQDN allowed here: %s", newName ); } + + DdlManager.getInstance().renameTable( catalogTable, newName.getSimple(), statement ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java index 7ee7dd3cf2..13b4f21b6a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java @@ -16,15 +16,11 @@ package org.polypheny.db.sql.language.ddl.alterview; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; @@ -34,7 +30,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterView; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; public class SqlAlterViewRenameColumn extends SqlAlterView { @@ -85,13 +80,7 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "Not Possible to use ALTER VIEW because " + catalogTable.name + " is not a View." ); } - try { - DdlManager.getInstance().renameColumn( catalogTable, columnOldName.getSimple(), columnNewName.getSimple(), statement ); - } catch ( ColumnAlreadyExistsException e ) { - throw CoreUtil.newContextException( columnNewName.getPos(), RESOURCE.columnExists( columnNewName.getSimple() ) ); - } catch ( ColumnNotExistsException e ) { - throw CoreUtil.newContextException( columnOldName.getPos(), RESOURCE.columnNotFoundInTable( e.columnName, e.tableName ) ); - } + DdlManager.getInstance().renameColumn( catalogTable, columnOldName.getSimple(), columnNewName.getSimple(), statement ); } } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 1069d3a341..37b19523b5 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -115,14 +115,6 @@ import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; -import org.polypheny.db.catalog.exceptions.GenericCatalogException; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; @@ -134,7 +126,6 @@ import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.docker.DockerManager; import org.polypheny.db.iface.QueryInterface; import org.polypheny.db.iface.QueryInterfaceManager; @@ -1159,7 +1150,7 @@ void getColumns( final Context ctx ) { } - void getDataSourceColumns( final Context ctx ) throws UnknownTableException, UnknownSchemaException { + void getDataSourceColumns( final Context ctx ) { UIRequest request = ctx.bodyAsClass( UIRequest.class ); LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( request.getSchemaName(), request.getTableName() ); @@ -1217,7 +1208,7 @@ void getDataSourceColumns( final Context ctx ) throws UnknownTableException, Unk /** * Get additional columns of the DataSource that are not mapped to the table. */ - void getAvailableSourceColumns( final Context ctx ) throws UnknownTableException { + void getAvailableSourceColumns( final Context ctx ) { UIRequest request = ctx.bodyAsClass( UIRequest.class ); LogicalTable table = catalog.getSnapshot().rel().getTable( request.getSchemaName(), request.getTableName() ); @@ -1260,7 +1251,7 @@ void getAvailableSourceColumns( final Context ctx ) throws UnknownTableException } - void getMaterializedInfo( final Context ctx ) throws UnknownTableException, UnknownSchemaException { + void getMaterializedInfo( final Context ctx ) { EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); @@ -1291,7 +1282,7 @@ void getMaterializedInfo( final Context ctx ) throws UnknownTableException, Unkn } - private LogicalTable getLogicalTable( String schema, String table ) throws UnknownTableException { + private LogicalTable getLogicalTable( String schema, String table ) { return catalog.getSnapshot().rel().getTable( schema, table ); } @@ -1591,46 +1582,41 @@ void getConstraints( final Context ctx ) { ArrayList resultList = new ArrayList<>(); Map> temp = new HashMap<>(); - try { - LogicalTable catalogTable = getLogicalTable( t[0], t[1] ); - - // get primary key - if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); - for ( String columnName : primaryKey.getColumnNames() ) { - if ( !temp.containsKey( "" ) ) { - temp.put( "", new ArrayList<>() ); - } - temp.get( "" ).add( columnName ); - } - for ( Map.Entry> entry : temp.entrySet() ) { - resultList.add( new TableConstraint( entry.getKey(), "PRIMARY KEY", entry.getValue() ) ); - } - } + LogicalTable catalogTable = getLogicalTable( t[0], t[1] ); - // get unique constraints. - temp.clear(); - List constraints = catalog.getSnapshot().rel().getConstraints( catalogTable.id ); - for ( CatalogConstraint catalogConstraint : constraints ) { - if ( catalogConstraint.type == ConstraintType.UNIQUE ) { - temp.put( catalogConstraint.name, new ArrayList<>( catalogConstraint.key.getColumnNames() ) ); + // get primary key + if ( catalogTable.primaryKey != null ) { + CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + for ( String columnName : primaryKey.getColumnNames() ) { + if ( !temp.containsKey( "" ) ) { + temp.put( "", new ArrayList<>() ); } + temp.get( "" ).add( columnName ); } for ( Map.Entry> entry : temp.entrySet() ) { - resultList.add( new TableConstraint( entry.getKey(), "UNIQUE", entry.getValue() ) ); + resultList.add( new TableConstraint( entry.getKey(), "PRIMARY KEY", entry.getValue() ) ); } + } - // the foreign keys are listed separately + // get unique constraints. + temp.clear(); + List constraints = catalog.getSnapshot().rel().getConstraints( catalogTable.id ); + for ( CatalogConstraint catalogConstraint : constraints ) { + if ( catalogConstraint.type == ConstraintType.UNIQUE ) { + temp.put( catalogConstraint.name, new ArrayList<>( catalogConstraint.key.getColumnNames() ) ); + } + } + for ( Map.Entry> entry : temp.entrySet() ) { + resultList.add( new TableConstraint( entry.getKey(), "UNIQUE", entry.getValue() ) ); + } - DbColumn[] header = { new DbColumn( "Name" ), new DbColumn( "Type" ), new DbColumn( "Columns" ) }; - ArrayList data = new ArrayList<>(); - resultList.forEach( c -> data.add( c.asRow() ) ); + // the foreign keys are listed separately - result = new Result( header, data.toArray( new String[0][2] ) ); - } catch ( UnknownTableException e ) { - log.error( "Caught exception while fetching constraints", e ); - result = new Result( e ); - } + DbColumn[] header = { new DbColumn( "Name" ), new DbColumn( "Type" ), new DbColumn( "Columns" ) }; + ArrayList data = new ArrayList<>(); + resultList.forEach( c -> data.add( c.asRow() ) ); + + result = new Result( header, data.toArray( new String[0][2] ) ); ctx.json( result ); } @@ -1749,70 +1735,68 @@ void addUniqueConstraint( final Context ctx ) { void getIndexes( final Context ctx ) { EditTableRequest request = ctx.bodyAsClass( EditTableRequest.class ); Result result; - try { - LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); - List catalogIndexes = catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ); - DbColumn[] header = { - new DbColumn( "Name" ), - new DbColumn( "Columns" ), - new DbColumn( "Location" ), - new DbColumn( "Method" ), - new DbColumn( "Type" ) }; + LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); + List catalogIndexes = catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ); - ArrayList data = new ArrayList<>(); + DbColumn[] header = { + new DbColumn( "Name" ), + new DbColumn( "Columns" ), + new DbColumn( "Location" ), + new DbColumn( "Method" ), + new DbColumn( "Type" ) }; - // Get explicit indexes - for ( CatalogIndex catalogIndex : catalogIndexes ) { - String[] arr = new String[5]; - String storeUniqueName; - if ( catalogIndex.location == 0 ) { - // a polystore index - storeUniqueName = "Polypheny-DB"; - } else { - storeUniqueName = catalog.getSnapshot().getAdapter( catalogIndex.location ).uniqueName; - } - arr[0] = catalogIndex.name; - arr[1] = String.join( ", ", catalogIndex.key.getColumnNames() ); - arr[2] = storeUniqueName; - arr[3] = catalogIndex.methodDisplayName; - arr[4] = catalogIndex.type.name(); - data.add( arr ); + ArrayList data = new ArrayList<>(); + + // Get explicit indexes + for ( CatalogIndex catalogIndex : catalogIndexes ) { + String[] arr = new String[5]; + String storeUniqueName; + if ( catalogIndex.location == 0 ) { + // a polystore index + storeUniqueName = "Polypheny-DB"; + } else { + storeUniqueName = catalog.getSnapshot().getAdapter( catalogIndex.location ).uniqueName; } + arr[0] = catalogIndex.name; + arr[1] = String.join( ", ", catalogIndex.key.getColumnNames() ); + arr[2] = storeUniqueName; + arr[3] = catalogIndex.methodDisplayName; + arr[4] = catalogIndex.type.name(); + data.add( arr ); + } - // Get functional indexes - List placements = catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ); - for ( CatalogDataPlacement placement : placements ) { - Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); - DataStore store; - if ( adapter instanceof DataStore ) { - store = (DataStore) adapter; - } else { - break; - } - for ( FunctionalIndexInfo fif : store.getFunctionalIndexes( catalogTable ) ) { - String[] arr = new String[5]; - arr[0] = ""; - arr[1] = String.join( ", ", fif.getColumnNames() ); - arr[2] = store.getUniqueName(); - arr[3] = fif.methodDisplayName; - arr[4] = "FUNCTIONAL"; - data.add( arr ); - } + // Get functional indexes + List placements = catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ); + for ( CatalogDataPlacement placement : placements ) { + Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); + DataStore store; + if ( adapter instanceof DataStore ) { + store = (DataStore) adapter; + } else { + break; } + for ( FunctionalIndexInfo fif : store.getFunctionalIndexes( catalogTable ) ) { + String[] arr = new String[5]; + arr[0] = ""; + arr[1] = String.join( ", ", fif.getColumnNames() ); + arr[2] = store.getUniqueName(); + arr[3] = fif.methodDisplayName; + arr[4] = "FUNCTIONAL"; + data.add( arr ); + } + } - result = new Result( header, data.toArray( new String[0][2] ) ); + result = new Result( header, data.toArray( new String[0][2] ) ); - } catch ( UnknownTableException e ) { - log.error( "Caught exception while fetching indexes", e ); - result = new Result( e ); - } ctx.json( result ); } /** * Drop an index of a table + * + * @param ctx */ void dropIndex( final Context ctx ) { Index index = ctx.bodyAsClass( Index.class ); @@ -1875,7 +1859,7 @@ void createIndex( final Context ctx ) { } - void getUnderlyingTable( final Context ctx ) throws UnknownTableException { + void getUnderlyingTable( final Context ctx ) { UIRequest request = ctx.bodyAsClass( UIRequest.class ); @@ -1911,33 +1895,29 @@ private Placement getPlacements( final Index index ) { String schemaName = index.getSchema(); String tableName = index.getTable(); Snapshot snapshot = Catalog.getInstance().getSnapshot(); - try { - LogicalTable table = getLogicalTable( schemaName, tableName ); - Placement p = new Placement( snapshot.alloc().isPartitioned( table.id ), snapshot.alloc().getPartitionGroupNames( table.id ), table.entityType ); - if ( table.entityType == EntityType.VIEW ) { - return p; - } else { - long pkid = table.primaryKey; - List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; - LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); - for ( CatalogColumnPlacement placement : pkPlacements ) { - Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); - PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); - p.addAdapter( new RelationalStore( - adapter.getUniqueName(), - adapter.getUniqueName(), - snapshot.alloc().getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), - snapshot.alloc().getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), - property.numPartitionGroups, - property.partitionType ) ); - } - return p; + LogicalTable table = getLogicalTable( schemaName, tableName ); + Placement p = new Placement( snapshot.alloc().isPartitioned( table.id ), snapshot.alloc().getPartitionGroupNames( table.id ), table.entityType ); + if ( table.entityType == EntityType.VIEW ) { + + return p; + } else { + long pkid = table.primaryKey; + List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; + LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); + List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); + for ( CatalogColumnPlacement placement : pkPlacements ) { + Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); + PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); + p.addAdapter( new RelationalStore( + adapter.getUniqueName(), + adapter.getUniqueName(), + snapshot.alloc().getColumnPlacementsOnAdapterPerTable( adapter.getAdapterId(), table.id ), + snapshot.alloc().getPartitionGroupsIndexOnDataPlacement( placement.adapterId, placement.tableId ), + property.numPartitionGroups, + property.partitionType ) ); } - } catch ( UnknownTableException e ) { - log.error( "Caught exception while getting placements", e ); - return new Placement( e ); + return p; } } @@ -2044,7 +2024,7 @@ private List buildPartitionFunctionRow( PartitioningReq } - void getPartitionFunctionModel( final Context ctx ) throws UnknownColumnException, UnknownTableException, UnknownSchemaException { + void getPartitionFunctionModel( final Context ctx ) { PartitioningRequest request = ctx.bodyAsClass( PartitioningRequest.class ); // Get correct partition function @@ -2107,12 +2087,7 @@ void partitionTable( final Context ctx ) { // Get correct partition function PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); - PartitionManager partitionManager = null; - try { - partitionManager = partitionManagerFactory.getPartitionManager( PartitionType.getByName( request.functionName ) ); - } catch ( UnknownPartitionTypeException e ) { - throw new RuntimeException( e ); - } + PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( PartitionType.getByName( request.functionName ) ); PartitionFunctionInfo functionInfo = partitionManager.getPartitionFunctionInfo(); @@ -2708,33 +2683,22 @@ Result executeRelAlg( final RelAlgRequest request, Session session ) { Gson gson = new Gson(); - try { - DdlManager.getInstance().createMaterializedView( - viewName, - schemaId, - root, - replace, - statement, - stores, - placementType, - columns, - materializedCriteria, - gson.toJson( request.topNode ), - QueryLanguage.from( "rel" ), - false, - false - ); - } catch ( EntityAlreadyExistsException | GenericCatalogException | UnknownColumnException e ) { - log.error( "Not possible to create Materialized View because the name is already used", e ); - Result finalResult = new Result( e ); - finalResult.setGeneratedQuery( "Execute logical query plan" ); - return finalResult; - } catch ( ColumnNotExistsException | ColumnAlreadyExistsException e ) { - log.error( "Error while creating materialized view", e ); - Result finalResult = new Result( e ); - finalResult.setGeneratedQuery( "Execute logical query plan" ); - return finalResult; - } + DdlManager.getInstance().createMaterializedView( + viewName, + schemaId, + root, + replace, + statement, + stores, + placementType, + columns, + materializedCriteria, + gson.toJson( request.topNode ), + QueryLanguage.from( "rel" ), + false, + false + ); + } else { @@ -2750,25 +2714,19 @@ Result executeRelAlg( final RelAlgRequest request, Session session ) { Gson gson = new Gson(); - try { - DdlManager.getInstance().createView( - viewName, - schemaId, - root.alg, - root.collation, - replace, - statement, - placementType, - columns, - gson.toJson( request.topNode ), - QueryLanguage.from( "rel" ) - ); - } catch ( EntityAlreadyExistsException | GenericCatalogException | UnknownColumnException e ) { - log.error( "Not possible to create View because the Name is already used", e ); - Result finalResult = new Result( e ); - finalResult.setGeneratedQuery( "Execute logical query plan" ); - return finalResult; - } + DdlManager.getInstance().createView( + viewName, + schemaId, + root.alg, + root.collation, + replace, + statement, + placementType, + columns, + gson.toJson( request.topNode ), + QueryLanguage.from( "rel" ) + ); + } try { @@ -3564,14 +3522,10 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Tra public static Transaction getTransaction( boolean analyze, boolean useCache, TransactionManager transactionManager, long userId, long databaseId, String origin ) { - try { - Snapshot snapshot = Catalog.getInstance().getSnapshot(); - Transaction transaction = transactionManager.startTransaction( snapshot.getUser( Catalog.defaultUserId ), snapshot.getNamespace( Catalog.defaultNamespaceId ), analyze, origin, MultimediaFlavor.FILE ); - transaction.setUseCache( useCache ); - return transaction; - } catch ( UnknownUserException | UnknownSchemaException e ) { - throw new RuntimeException( "Error while starting transaction", e ); - } + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + Transaction transaction = transactionManager.startTransaction( snapshot.getUser( Catalog.defaultUserId ), snapshot.getNamespace( Catalog.defaultNamespaceId ), analyze, origin, MultimediaFlavor.FILE ); + transaction.setUseCache( useCache ); + return transaction; } @@ -3589,15 +3543,13 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Cru */ private Map getCatalogColumns( String schemaName, String tableName ) { Map dataTypes = new HashMap<>(); - try { - LogicalTable table = getLogicalTable( schemaName, tableName ); - List logicalColumns = catalog.getSnapshot().rel().getColumns( table.id ); - for ( LogicalColumn logicalColumn : logicalColumns ) { - dataTypes.put( logicalColumn.name, logicalColumn ); - } - } catch ( UnknownTableException e ) { - log.error( "Caught exception", e ); + + LogicalTable table = getLogicalTable( schemaName, tableName ); + List logicalColumns = catalog.getSnapshot().rel().getColumns( table.id ); + for ( LogicalColumn logicalColumn : logicalColumns ) { + dataTypes.put( logicalColumn.name, logicalColumn ); } + return dataTypes; } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 44841274af..07f4019eac 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -45,9 +45,6 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.exceptions.UnknownColumnException; -import org.polypheny.db.catalog.exceptions.UnknownSchemaException; -import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.iface.Authenticator; import org.polypheny.db.information.InformationDuration; @@ -200,9 +197,6 @@ private void attachExceptions( Javalin server ) { defaultException( IOException.class, server ); defaultException( ServletException.class, server ); - defaultException( UnknownSchemaException.class, server ); - defaultException( UnknownTableException.class, server ); - defaultException( UnknownColumnException.class, server ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index c44259c3c2..b31cbd7f5e 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -41,7 +41,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.exceptions.UnknownCollectionException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; @@ -365,7 +364,7 @@ public void getCollectionPlacements( Context context ) { List collections = catalog.getSnapshot().doc().getCollections( namespaceId, new Pattern( collectionName ) ); if ( collections.size() != 1 ) { - context.json( new Placement( new UnknownCollectionException( 0 ) ) ); + context.json( new Placement( new RuntimeException( "The collation is not know" ) ) ); return; } From dfaaeb872d46d0ef29c7c807b38fb7e26dd4f98b Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 12 Apr 2023 18:15:26 +0200 Subject: [PATCH 058/436] adjusted missing exceptions --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 50 +++++++------------ .../db/languages/mql/MqlDropDatabase.java | 10 +--- .../db/languages/mql/MqlRenameCollection.java | 36 ++++++------- .../ddl/SqlCreateMaterializedView.java | 34 ++++++------- .../language/ddl/SqlDropMaterializedView.java | 13 ++--- .../db/sql/language/ddl/SqlDropSchema.java | 13 +---- .../db/sql/language/ddl/SqlDropTable.java | 10 +--- .../db/sql/language/ddl/SqlDropView.java | 13 ++--- .../SqlAlterTableAddPartitions.java | 6 --- 9 files changed, 55 insertions(+), 130 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 641f1da6b7..6cb7b94d4d 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -19,7 +19,6 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -92,15 +91,6 @@ import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.ddl.exception.DdlOnSourceException; -import org.polypheny.db.ddl.exception.IndexPreventsRemovalException; -import org.polypheny.db.ddl.exception.LastPlacementException; -import org.polypheny.db.ddl.exception.NotMaterializedViewException; -import org.polypheny.db.ddl.exception.NotViewException; -import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; -import org.polypheny.db.ddl.exception.PlacementIsPrimaryException; -import org.polypheny.db.ddl.exception.PlacementNotExistsException; -import org.polypheny.db.ddl.exception.SchemaNotExistException; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.monitoring.events.DdlEvent; import org.polypheny.db.monitoring.events.StatementEvent; @@ -1547,23 +1537,23 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, @ // Check whether this store actually contains a placement of this column if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { - throw new PlacementNotExistsException(); + throw new GenericRuntimeException( "The placement does not exist on the store" ); } // Check whether there are any indexes located on the store requiring this column for ( CatalogIndex index : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( logicalColumn.id ) ) { - throw new IndexPreventsRemovalException( index.name, columnName ); + throw new GenericRuntimeException( "Cannot remove the column %s, as there is a index %s using it", columnName, index.name ); } } - if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( logicalColumn.tableId, storeInstance.getAdapterId(), Arrays.asList( logicalColumn.id ), new ArrayList<>() ) ) { - throw new LastPlacementException(); + if ( !catalog.getAllocRel( catalogTable.namespaceId ).validateDataPlacementsConstraints( logicalColumn.tableId, storeInstance.getAdapterId(), List.of( logicalColumn.id ), new ArrayList<>() ) ) { + throw new GenericRuntimeException( "Cannot drop the placement as it is the last" ); } // Check whether the column to drop is a primary key CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); if ( primaryKey.columnIds.contains( logicalColumn.id ) ) { - throw new PlacementIsPrimaryException(); + throw new GenericRuntimeException( "Cannot drop primary key" ); } // Drop Column on store storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); @@ -1630,11 +1620,7 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC if ( catalog.getSnapshot().rel().checkIfExistsEntity( viewName ) ) { if ( replace ) { - try { - dropView( catalog.getSnapshot().rel().getTable( namespaceId, viewName ), statement ); - } catch ( DdlOnSourceException e ) { - throw new GenericRuntimeException( "Unable tp drop the existing View with this name." ); - } + dropView( catalog.getSnapshot().rel().getTable( namespaceId, viewName ), statement ); } else { throw new GenericRuntimeException( "There already exists a view with the name %s", viewName ); } @@ -2408,7 +2394,7 @@ private void checkDocumentModel( long namespaceId, List column @Override - public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws PartitionGroupNamesNotUniqueException, TransactionException { + public void addPartitioning( PartitionInformation partitionInfo, List stores, Statement statement ) throws TransactionException { Snapshot snapshot = statement.getTransaction().getSnapshot(); LogicalColumn logicalColumn = snapshot.rel().getColumn( partitionInfo.table.id, partitionInfo.columnName ); @@ -2420,7 +2406,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List .map( name -> name.trim().toLowerCase() ) .collect( Collectors.toList() ); if ( sanitizedPartitionGroupNames.size() != new HashSet<>( sanitizedPartitionGroupNames ).size() ) { - throw new PartitionGroupNamesNotUniqueException(); + throw new GenericRuntimeException( "Name is not unique" ); } // Check if specified partitionColumn is even part of the table @@ -2871,7 +2857,7 @@ public void addConstraint( long namespaceId, String constraintName, ConstraintTy @Override - public void dropNamespace( String namespaceName, boolean ifExists, Statement statement ) throws SchemaNotExistException, DdlOnSourceException { + public void dropNamespace( String namespaceName, boolean ifExists, Statement statement ) { namespaceName = namespaceName.toLowerCase(); // Check if there is a schema with this name @@ -2897,18 +2883,18 @@ public void dropNamespace( String namespaceName, boolean ifExists, Statement sta // This is ok because "IF EXISTS" was specified return; } else { - throw new SchemaNotExistException(); + throw new GenericRuntimeException( "The namespace does not exist" ); } } } @Override - public void dropView( LogicalTable catalogView, Statement statement ) throws DdlOnSourceException { + public void dropView( LogicalTable catalogView, Statement statement ) { Snapshot snapshot = statement.getTransaction().getSnapshot(); // Make sure that this is a table of type VIEW if ( catalogView.entityType != EntityType.VIEW ) { - throw new NotViewException(); + throw new GenericRuntimeException( "Can only drop views with this method" ); } // Check if views are dependent from this view @@ -2932,12 +2918,10 @@ public void dropView( LogicalTable catalogView, Statement statement ) throws Ddl @Override - public void dropMaterializedView( LogicalTable materializedView, Statement statement ) throws DdlOnSourceException { + public void dropMaterializedView( LogicalTable materializedView, Statement statement ) { // Make sure that this is a table of type Materialized View - if ( materializedView.entityType == EntityType.MATERIALIZED_VIEW ) { - // Empty on purpose - } else { - throw new NotMaterializedViewException(); + if ( materializedView.entityType != EntityType.MATERIALIZED_VIEW ) { + throw new GenericRuntimeException( "Only materialized views can be dropped with this method" ); } // Check if views are dependent from this view checkViewDependencies( materializedView ); @@ -2953,7 +2937,7 @@ public void dropMaterializedView( LogicalTable materializedView, Statement state } - public void dropTableOld( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException { + public void dropTableOld( LogicalTable catalogTable, Statement statement ) { Snapshot snapshot = catalog.getSnapshot(); // Make sure that this is a table of type TABLE (and not SOURCE) //checkIfDdlPossible( catalogEntity.tableType ); @@ -3066,7 +3050,7 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) throw @Override - public void dropTable( LogicalTable catalogTable, Statement statement ) throws DdlOnSourceException { + public void dropTable( LogicalTable catalogTable, Statement statement ) { // Make sure that all adapters are of type store (and not source) Snapshot snapshot = catalog.getSnapshot(); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDropDatabase.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDropDatabase.java index 61e767d9fa..ea6dde9b41 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDropDatabase.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDropDatabase.java @@ -16,10 +16,7 @@ package org.polypheny.db.languages.mql; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.DdlOnSourceException; -import org.polypheny.db.ddl.exception.SchemaNotExistException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.languages.mql.Mql.Type; @@ -37,14 +34,9 @@ public MqlDropDatabase( ParserPos pos ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - Catalog catalog = Catalog.getInstance(); String database = ((MqlQueryParameters) parameters).getDatabase(); - try { - DdlManager.getInstance().dropNamespace( database, true, statement ); - } catch ( SchemaNotExistException | DdlOnSourceException e ) { - throw new RuntimeException( e ); - } + DdlManager.getInstance().dropNamespace( database, true, statement ); } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index 076d36ae4a..41f155bf99 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -20,7 +20,6 @@ import java.util.Optional; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.languages.mql.Mql.Type; @@ -52,31 +51,26 @@ public Type getMqlKind() { public void execute( Context context, Statement statement, QueryParameters parameters ) { String database = ((MqlQueryParameters) parameters).getDatabase(); - try { - List tables = context.getSnapshot().rel().getTables( database, null ); + List tables = context.getSnapshot().rel().getTables( database, null ); - if ( dropTarget ) { - Optional newTable = tables.stream() - .filter( t -> t.name.equals( newName ) ) - .findAny(); - - if ( newTable.isPresent() ) { - DdlManager.getInstance().dropTable( newTable.get(), statement ); - } - } - - Optional table = tables.stream() - .filter( t -> t.name.equals( getCollection() ) ) + if ( dropTarget ) { + Optional newTable = tables.stream() + .filter( t -> t.name.equals( newName ) ) .findAny(); - if ( table.isEmpty() ) { - throw new RuntimeException( "The target for the rename is not valid." ); - } + newTable.ifPresent( logicalTable -> DdlManager.getInstance().dropTable( logicalTable, statement ) ); + } + + Optional table = tables.stream() + .filter( t -> t.name.equals( getCollection() ) ) + .findAny(); - DdlManager.getInstance().renameTable( table.get(), newName, statement ); - } catch ( DdlOnSourceException e ) { - throw new RuntimeException( "The rename was not successful, due to an error: " + e.getMessage() ); + if ( table.isEmpty() ) { + throw new RuntimeException( "The target for the rename is not valid." ); } + + DdlManager.getInstance().renameTable( table.get(), newName, statement ); + } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java index 851389054b..d6d386b862 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateMaterializedView.java @@ -33,7 +33,6 @@ import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.languages.QueryParameters; @@ -171,25 +170,20 @@ public void execute( Context context, Statement statement, QueryParameters param boolean ordered = query.getKind().belongsTo( Kind.ORDER ); - try { - DdlManager.getInstance().createMaterializedView( - viewName.replaceAll( "[^A-Za-z0-9]", "_" ), - schemaId, - algRoot, - replace, - statement, - stores, - placementType, - columns, - materializedCriteria, - String.valueOf( query.toSqlString( PolyphenyDbSqlDialect.DEFAULT ) ), - QueryLanguage.from( "sql" ), - ifNotExists, - ordered ); - } catch ( ColumnNotExistsException e ) { - // we just added the table/column, so it has to exist, or we have an internal problem - throw new RuntimeException( e ); - } + DdlManager.getInstance().createMaterializedView( + viewName.replaceAll( "[^A-Za-z0-9]", "_" ), + schemaId, + algRoot, + replace, + statement, + stores, + placementType, + columns, + materializedCriteria, + String.valueOf( query.toSqlString( PolyphenyDbSqlDialect.DEFAULT ) ), + QueryLanguage.from( "sql" ), + ifNotExists, + ordered ); MaterializedViewManager.getInstance().isCreatingMaterialized = false; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java index a200758f67..5a8d7e7949 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java @@ -16,13 +16,10 @@ package org.polypheny.db.sql.language.ddl; -import static org.polypheny.db.util.Static.RESOURCE; - import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.prepare.Context; @@ -31,7 +28,6 @@ import org.polypheny.db.sql.language.SqlOperator; import org.polypheny.db.sql.language.SqlSpecialOperator; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.view.MaterializedViewManager; public class SqlDropMaterializedView extends SqlDropObject { @@ -70,11 +66,8 @@ public void execute( Context context, Statement statement, QueryParameters param materializedManager.isDroppingMaterialized = true; materializedManager.deleteMaterializedViewFromInfo( catalogTable.id ); - try { - DdlManager.getInstance().dropMaterializedView( catalogTable, statement ); - } catch ( DdlOnSourceException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.ddlOnSourceTable() ); - } + DdlManager.getInstance().dropMaterializedView( catalogTable, statement ); + materializedManager.isDroppingMaterialized = false; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropSchema.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropSchema.java index 2d745ce4da..679808f60f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropSchema.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropSchema.java @@ -17,14 +17,10 @@ package org.polypheny.db.sql.language.ddl; -import static org.polypheny.db.util.Static.RESOURCE; - import com.google.common.collect.ImmutableList; import java.util.List; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.DdlOnSourceException; -import org.polypheny.db.ddl.exception.SchemaNotExistException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.ExecutableStatement; @@ -37,7 +33,6 @@ import org.polypheny.db.sql.language.SqlSpecialOperator; import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; /** @@ -83,13 +78,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - try { - DdlManager.getInstance().dropNamespace( name.getSimple(), ifExists, statement ); - } catch ( SchemaNotExistException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.schemaNotFound( name.getSimple() ) ); - } catch ( DdlOnSourceException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.ddlOnSourceTable() ); - } + DdlManager.getInstance().dropNamespace( name.getSimple(), ifExists, statement ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java index 450df2bbef..ccb5e8bae4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java @@ -17,12 +17,9 @@ package org.polypheny.db.sql.language.ddl; -import static org.polypheny.db.util.Static.RESOURCE; - import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.prepare.Context; @@ -31,7 +28,6 @@ import org.polypheny.db.sql.language.SqlOperator; import org.polypheny.db.sql.language.SqlSpecialOperator; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; /** @@ -65,11 +61,7 @@ public void execute( Context context, Statement statement, QueryParameters param } } - try { - DdlManager.getInstance().dropTable( table, statement ); - } catch ( DdlOnSourceException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.ddlOnSourceTable() ); - } + DdlManager.getInstance().dropTable( table, statement ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java index 733a98cf55..93f0e8dc06 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java @@ -17,13 +17,10 @@ package org.polypheny.db.sql.language.ddl; -import static org.polypheny.db.util.Static.RESOURCE; - import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.prepare.Context; @@ -32,7 +29,6 @@ import org.polypheny.db.sql.language.SqlOperator; import org.polypheny.db.sql.language.SqlSpecialOperator; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; /** @@ -70,11 +66,8 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "Not Possible to use DROP VIEW because " + catalogTable.name + " is not a View." ); } - try { - DdlManager.getInstance().dropView( catalogTable, statement ); - } catch ( DdlOnSourceException e ) { - throw CoreUtil.newContextException( name.getPos(), RESOURCE.ddlOnSourceTable() ); - } + DdlManager.getInstance().dropView( catalogTable, statement ); + } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java index e31d2394a5..5ce3be656d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java @@ -17,8 +17,6 @@ package org.polypheny.db.sql.language.ddl.altertable; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; import java.util.stream.Collectors; @@ -28,7 +26,6 @@ import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManager.PartitionInformation; -import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Identifier; @@ -41,7 +38,6 @@ import org.polypheny.db.sql.language.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.TransactionException; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -165,8 +161,6 @@ public void execute( Context context, Statement statement, QueryParameters param } } catch ( TransactionException e ) { throw new RuntimeException( e ); - } catch ( PartitionGroupNamesNotUniqueException e ) { - throw CoreUtil.newContextException( partitionColumn.getPos(), RESOURCE.partitionNamesNotUnique() ); } } From 996ea2bb9af4b00c83bd115e143495874b4ede9f Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 12 Apr 2023 23:55:11 +0200 Subject: [PATCH 059/436] Separation of dataplacement and allocationTable --- .../org/polypheny/db/adapter/DataStore.java | 11 +- .../catalogs/AllocationRelationalCatalog.java | 27 +-- .../catalogs/LogicalRelationalCatalog.java | 2 +- ...mnPlacement.java => AllocationColumn.java} | 22 ++- .../entity/allocation/AllocationTable.java | 37 +--- .../db/catalog/snapshot/AllocSnapshot.java | 18 +- .../snapshot/impl/AllocSnapshotImpl.java | 58 +++--- .../snapshot/impl/LogicalRelSnapshotImpl.java | 7 +- .../db/partition/PartitionManager.java | 6 +- .../polypheny/db/processing/DataMigrator.java | 10 +- .../polypheny/db/tools/RoutedAlgBuilder.java | 6 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 169 +++++++++--------- .../partition/AbstractPartitionManager.java | 20 +-- .../db/partition/FrequencyMapImpl.java | 2 +- .../TemperatureAwarePartitionManager.java | 6 +- .../db/processing/DataMigratorImpl.java | 40 ++--- .../db/routing/UiRoutingPageUtil.java | 6 +- .../db/routing/routers/BaseRouter.java | 8 +- .../db/routing/routers/CachedPlanRouter.java | 6 +- .../db/routing/routers/DmlRouterImpl.java | 26 +-- .../routers/FullPlacementQueryRouter.java | 18 +- .../db/routing/routers/IcarusRouter.java | 12 +- .../db/routing/routers/SimpleRouter.java | 4 +- .../CreateSinglePlacementStrategy.java | 8 +- .../db/view/MaterializedViewManagerImpl.java | 6 +- .../polypheny/db/adapter/csv/CsvSchema.java | 4 +- .../db/hsqldb/stores/HsqldbStore.java | 4 +- .../jdbc/stores/AbstractJdbcStore.java | 40 +++-- .../org/polypheny/db/catalog/PolyCatalog.java | 18 +- .../allocation/PolyAllocRelCatalog.java | 38 ++-- .../db/catalog/logical/RelationalCatalog.java | 4 +- .../altertable/SqlAlterTableAddColumn.java | 6 +- .../java/org/polypheny/db/webui/Crud.java | 8 +- .../polypheny/db/webui/models/Placement.java | 18 +- 34 files changed, 335 insertions(+), 340 deletions(-) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogColumnPlacement.java => AllocationColumn.java} (83%) diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index 9b65df1f5d..e83239f7b7 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -28,8 +28,8 @@ import lombok.extern.slf4j.Slf4j; import org.pf4j.ExtensionPoint; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.allocation.AllocationTable; @@ -38,7 +38,6 @@ import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; -import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.prepare.Context; import org.polypheny.db.type.PolyType; @@ -66,19 +65,19 @@ public List getSupportedSchemaType() { } - public abstract List createPhysicalTable( Context context, LogicalTable combinedTable, AllocationTable allocationTable ); + public abstract List createPhysicalTable( Context context, LogicalTable combinedTable, AllocationTable alloc, List allocationTable ); public abstract void dropTable( Context context, LogicalTable combinedTable, List partitionIds ); - public abstract void addColumn( Context context, LogicalTable catalogTable, LogicalColumn logicalColumn ); + public abstract void addColumn( Context context, AllocationTable catalogTable, LogicalColumn logicalColumn ); - public abstract void dropColumn( Context context, CatalogColumnPlacement columnPlacement ); + public abstract void dropColumn( Context context, AllocationColumn columnPlacement ); public abstract void addIndex( Context context, CatalogIndex catalogIndex, List partitionIds ); public abstract void dropIndex( Context context, CatalogIndex catalogIndex, List partitionIds ); - public abstract void updateColumnType( Context context, CatalogColumnPlacement columnPlacement, LogicalColumn logicalColumn, PolyType oldType ); + public abstract void updateColumnType( Context context, AllocationColumn columnPlacement, LogicalColumn logicalColumn, PolyType oldType ); public abstract List getAvailableIndexMethods(); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 46fd204b68..0fd024f820 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.Map; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; @@ -36,7 +37,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param position * @return */ - AllocationTable addColumnPlacement( long allocationId, long columnId, PlacementType placementType, int position ); + AllocationColumn addColumn( long allocationId, long columnId, PlacementType placementType, int position ); /** * Deletes all dependent column placements @@ -45,7 +46,7 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * @param columnId The id of the column * @param columnOnly columnOnly If delete originates from a dropColumn */ - void deleteColumnPlacement( long allocationId, long columnId, boolean columnOnly ); + void deleteColumn( long allocationId, long columnId, boolean columnOnly ); /** @@ -66,26 +67,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { */ void updateColumnPlacementPhysicalPosition( long allocId, long columnId, long position ); - /** - * Update physical position of a column placement on a specified adapter. Uses auto-increment to get the globally increasing number. - * - * @param adapterId The id of the adapter - * @param columnId The id of the column - */ - void updateColumnPlacementPhysicalPosition( long adapterId, long columnId ); - - /** - * Change physical names of all column placements. - * - * @param adapterId The id of the adapter - * @param columnId The id of the column - * @param physicalSchemaName The physical schema name - * @param physicalColumnName The physical column name - * @param updatePhysicalColumnPosition Whether to reset the column position (the highest number in the table; represents that the column is now at the last position) - */ - void updateColumnPlacementPhysicalNames( long adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ); - - /** * Adds a partition to the catalog * @@ -227,4 +208,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { Map getTables(); + Map getColumns(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 7208c33320..bbfc5ba30a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -118,7 +118,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param collation The collation of the field (if applicable, else null) * @return The id of the inserted column */ - long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ); + LogicalColumn addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java b/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java similarity index 83% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java rename to core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java index bb5099e712..c44836dab4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogColumnPlacement.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java @@ -23,12 +23,15 @@ import lombok.NonNull; import lombok.SneakyThrows; import lombok.Value; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.PlacementType; @EqualsAndHashCode @Value -public class CatalogColumnPlacement implements CatalogObject { +public class AllocationColumn implements CatalogObject { private static final long serialVersionUID = -1909757888176291095L; @@ -39,26 +42,26 @@ public class CatalogColumnPlacement implements CatalogObject { @Serialize public long columnId; @Serialize - public long adapterId; - @Serialize public PlacementType placementType; @Serialize public long position; + @Serialize + public long adapterId; - public CatalogColumnPlacement( + public AllocationColumn( @Deserialize("namespaceId") final long namespaceId, @Deserialize("tableId") final long tableId, @Deserialize("columnId") final long columnId, - @Deserialize("adapterId") final long adapterId, @Deserialize("placementType") @NonNull final PlacementType placementType, - @Deserialize("position") final long position ) { + @Deserialize("position") final long position, + @Deserialize("adapterId") final long adapterId ) { this.namespaceId = namespaceId; this.tableId = tableId; this.columnId = columnId; - this.adapterId = adapterId; this.placementType = placementType; this.position = position; + this.adapterId = adapterId; } @@ -91,4 +94,9 @@ public Serializable[] getParameterArray() { placementType.name() }; } + + public AlgDataType getAlgDataType() { + return Catalog.snapshot().rel().getColumn( columnId ).getAlgDataType( AlgDataTypeFactory.DEFAULT ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 55c92b0089..525f10889f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -17,7 +17,6 @@ package org.polypheny.db.catalog.entity.allocation; import io.activej.serializer.annotations.Deserialize; -import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.ArrayList; import java.util.List; @@ -29,29 +28,21 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PlacementType; @EqualsAndHashCode(callSuper = true) @Value public class AllocationTable extends AllocationEntity { - @Serialize - public List placements; - public AllocationTable( @Deserialize("id") long id, @Deserialize("logicalId") long logicalId, @Deserialize("namespaceId") long namespaceId, - @Deserialize("adapterId") long adapterId, - @Deserialize("placements") List placements ) { + @Deserialize("adapterId") long adapterId ) { super( id, logicalId, namespaceId, adapterId, NamespaceType.RELATIONAL ); - this.placements = placements; } @@ -68,12 +59,12 @@ public Expression asExpression() { public Map getColumnNames() { - return getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ); + return getColumns().values().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getLogicalColumnName ) ); } - public Map getColumns() { - return Catalog.getInstance().getSnapshot().rel().getColumns( logicalId ).stream().collect( Collectors.toMap( c -> c.id, c -> c ) ); + public Map getColumns() { + return Catalog.snapshot().alloc().getColumns( id ).stream().collect( Collectors.toMap( c -> c.columnId, c -> c ) ); } @@ -82,22 +73,8 @@ public String getNamespaceName() { } - public AllocationTable withAddedColumn( long columnId, PlacementType placementType, int position ) { - List placements = new ArrayList<>( this.placements ); - placements.add( new CatalogColumnPlacement( namespaceId, id, columnId, adapterId, placementType, position ) ); - - return new AllocationTable( id, logicalId, namespaceId, adapterId, placements ); - } - - - public AllocationTable withRemovedColumn( long columnId ) { - List placements = new ArrayList<>( this.placements ); - return new AllocationTable( id, logicalId, namespaceId, adapterId, placements.stream().filter( p -> p.columnId != columnId ).collect( Collectors.toList() ) ); - } - - public Map getColumnTypes() { - return placements.stream().collect( Collectors.toMap( p -> p.columnId, p -> Catalog.snapshot().rel().getColumn( p.columnId ).getAlgDataType( AlgDataTypeFactory.DEFAULT ) ) ); + return getColumns().values().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getAlgDataType ) ); } @@ -107,7 +84,7 @@ public Map getColumnNamesId() { public List getColumnOrder() { - List columns = new ArrayList<>( placements ); + List columns = new ArrayList<>( getColumns().values() ); columns.sort( ( a, b ) -> Math.toIntExact( a.position - b.position ) ); return columns.stream().map( c -> c.columnId ).collect( Collectors.toList() ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index 6b7d914ead..221c37ffc0 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -18,10 +18,10 @@ import java.util.List; import java.util.Map; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; @@ -54,7 +54,7 @@ public interface AllocSnapshot { * @param columnId The id of the column * @return The specific column placement */ - CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ); + AllocationColumn getColumnPlacement( long adapterId, long columnId ); /** * Checks if there is a column with the specified name in the specified table. @@ -71,7 +71,7 @@ public interface AllocSnapshot { * @param columnId The id of the specific column * @return List of column placements of specific column */ - List getColumnPlacements( long columnId ); + List getColumnPlacements( long columnId ); /** * Get column placements of a specific table on a specific adapter on column detail level. @@ -80,7 +80,7 @@ public interface AllocSnapshot { * @param adapterId The id of the adapter * @return List of column placements of the table on the specified adapter */ - List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ); + List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ); /** * Get column placements on a adapter. On column detail level @@ -89,7 +89,7 @@ public interface AllocSnapshot { * @param adapterId The id of the adapter * @return List of column placements on the specified adapter */ - List getColumnPlacementsOnAdapter( long adapterId ); + List getColumnPlacementsOnAdapter( long adapterId ); /** * Gets a collection of column placements for a given column. @@ -97,7 +97,7 @@ public interface AllocSnapshot { * @param columnId The id of the column of requested column placements * @return The collection of placements sorted */ - List getColumnPlacementsByColumn( long columnId ); + List getColumnPlacementsByColumn( long columnId ); /** * Gets all column placements of a table structured by the id of the adapters. @@ -122,7 +122,7 @@ public interface AllocSnapshot { * @param schemaId The id of the schema * @return List of column placements on this adapter and schema */ - List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ); + List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ); /** * Get a partition object by its unique id @@ -202,7 +202,7 @@ public interface AllocSnapshot { * @param columnId The id of tje column * @return List of CatalogColumnPlacements */ - List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ); + List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ); /** * Get adapters by partition. Identify the location of partitions/replicas @@ -409,4 +409,6 @@ public interface AllocSnapshot { AllocationEntity getAllocation( long adapterId, long entityId ); + List getColumns( long allocId ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index 248e60b4cc..25dc1217d2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -28,10 +28,10 @@ import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionMapping; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; @@ -54,17 +54,18 @@ public class AllocSnapshotImpl implements AllocSnapshot { ImmutableMap tables; + ImmutableMap, AllocationColumn> columns; ImmutableMap collections; ImmutableMap graphs; - ImmutableMap, CatalogColumnPlacement> adapterColumnPlacement; + ImmutableMap, AllocationColumn> adapterColumnPlacement; ImmutableMap allocs; ImmutableMap> allocsOnAdapters; - ImmutableMap> columPlacements; + ImmutableMap> columPlacements; - ImmutableMap> tablePlacements; - ImmutableMap, List> adapterLogicalTablePlacements; + ImmutableMap> tablePlacements; + ImmutableMap, List> adapterLogicalTablePlacements; ImmutableMap, AllocationEntity> adapterLogicalTableAlloc; ImmutableMap> logicalAllocs; ImmutableMap>> tableAdapterColumns; @@ -90,6 +91,14 @@ public AllocSnapshotImpl( Map allocationCatalogs ) { .map( c -> (AllocationGraphCatalog) c ) .collect( Collectors.toList() ) ); + this.columns = allocationCatalogs.values() + .stream() + .filter( a -> a.getNamespace().namespaceType == NamespaceType.RELATIONAL ) + .map( c -> (AllocationRelationalCatalog) c ) + .map( c -> c.getColumns() ) + .collect( Collectors.toList() ); + this.tableAdapterColumns = buildTableAdapterColumns(); + this.allocs = mergeAllocs(); this.allocsOnAdapters = buildAllocsOnAdapters(); this.adapterColumnPlacement = buildAdapterColumnPlacement(); @@ -98,7 +107,6 @@ public AllocSnapshotImpl( Map allocationCatalogs ) { this.adapterLogicalTableAlloc = buildAdapterLogicalTableAlloc(); this.tablePlacements = buildTablePlacements(); this.logicalAllocs = buildLogicalAllocs(); - this.tableAdapterColumns = buildTableAdapterColumns(); } @@ -130,9 +138,9 @@ private ImmutableMap> buildLogicalAllocs() { } - private ImmutableMap> buildTablePlacements() { - Map> map = new HashMap<>(); - this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { + private ImmutableMap> buildTablePlacements() { + Map> map = new HashMap<>(); + this.columns.forEach( ( k, v ) -> v.placements.forEach( p -> { if ( !map.containsKey( v.id ) ) { map.put( v.id, new ArrayList<>() ); } @@ -150,8 +158,8 @@ private ImmutableMap, AllocationEntity> buildAdapterLogicalTabl } - private ImmutableMap, List> buildAdapterLogicalTablePlacements() { - Map, List> map = new HashMap<>(); + private ImmutableMap, List> buildAdapterLogicalTablePlacements() { + Map, List> map = new HashMap<>(); this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { if ( !map.containsKey( Pair.of( p.adapterId, p.tableId ) ) ) { map.put( Pair.of( p.adapterId, p.tableId ), new ArrayList<>() ); @@ -163,8 +171,8 @@ private ImmutableMap, List> buildAdapte } - private ImmutableMap> buildColumnPlacements() { - Map> map = new HashMap<>(); + private ImmutableMap> buildColumnPlacements() { + Map> map = new HashMap<>(); this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { if ( !map.containsKey( p.columnId ) ) { map.put( p.columnId, new ArrayList<>() ); @@ -176,8 +184,8 @@ private ImmutableMap> buildColumnPlacements() } - private ImmutableMap, CatalogColumnPlacement> buildAdapterColumnPlacement() { - Map, CatalogColumnPlacement> map = new HashMap<>(); + private ImmutableMap, AllocationColumn> buildAdapterColumnPlacement() { + Map, AllocationColumn> map = new HashMap<>(); this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> map.put( Pair.of( v.adapterId, p.columnId ), p ) ) ); return ImmutableMap.copyOf( map ); } @@ -243,7 +251,7 @@ public AllocationEntity getAllocEntity( long id ) { @Override - public CatalogColumnPlacement getColumnPlacement( long adapterId, long columnId ) { + public AllocationColumn getColumnPlacement( long adapterId, long columnId ) { return adapterColumnPlacement.get( Pair.of( adapterId, columnId ) ); } @@ -255,25 +263,25 @@ public boolean checkIfExistsColumnPlacement( long adapterId, long columnId ) { @Override - public List getColumnPlacements( long columnId ) { + public List getColumnPlacements( long columnId ) { return columPlacements.get( columnId ); } @Override - public List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ) { + public List getColumnPlacementsOnAdapterPerTable( long adapterId, long tableId ) { return adapterLogicalTablePlacements.get( Pair.of( adapterId, tableId ) ); } @Override - public List getColumnPlacementsOnAdapter( long adapterId ) { + public List getColumnPlacementsOnAdapter( long adapterId ) { return null; } @Override - public List getColumnPlacementsByColumn( long columnId ) { + public List getColumnPlacementsByColumn( long columnId ) { return null; } @@ -291,7 +299,7 @@ public long getPartitionGroupByPartition( long partitionId ) { @Override - public List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ) { + public List getColumnPlacementsOnAdapterAndSchema( long adapterId, long schemaId ) { return null; } @@ -345,7 +353,7 @@ public List getPartitionGroupNames( long tableId ) { @Override - public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { + public List getColumnPlacementsByPartitionGroup( long tableId, long partitionGroupId, long columnId ) { return null; } @@ -536,4 +544,10 @@ public AllocationEntity getAllocation( long adapterId, long entityId ) { return adapterLogicalTableAlloc.get( Pair.of( adapterId, entityId ) ); } + + @Override + public List getColumns( long allocId ) { + + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index d03cec8831..9d23b71fd4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -81,7 +81,7 @@ public LogicalRelSnapshotImpl( Map catalogs ) { namespaceNames = ImmutableMap.copyOf( namespaces.values().stream().collect( Collectors.toMap( n -> n.name, n -> n ) ) ); tables = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getTables().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> Pair.of( e.getValue().namespaceId, namespaces.get( e.getValue().namespaceId ).caseSensitive ? e.getValue().name : e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); + tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> Pair.of( e.getValue().namespaceId, getAdjustedName( e.getValue().namespaceId, e.getValue().name ) ), Entry::getValue ) ) ); columns = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getColumns().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespaces.get( e.getValue().namespaceId ).caseSensitive ? Pair.of( e.getValue().tableId, e.getValue().name ) : Pair.of( e.getValue().tableId, e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); @@ -154,6 +154,11 @@ public LogicalRelSnapshotImpl( Map catalogs ) { } + public String getAdjustedName( long namespaceId, String entityName ) { + return namespaces.get( namespaceId ).caseSensitive ? entityName : entityName.toLowerCase(); + } + + @Override public List getTables( @javax.annotation.Nullable Pattern namespace, Pattern name ) { if ( name == null ) { diff --git a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java index 91d2d394e8..ccb6849c31 100644 --- a/core/src/main/java/org/polypheny/db/partition/PartitionManager.java +++ b/core/src/main/java/org/polypheny/db/partition/PartitionManager.java @@ -18,7 +18,7 @@ import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.type.PolyType; @@ -33,7 +33,7 @@ public interface PartitionManager { boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ); - Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ); + Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ); boolean validatePartitionGroupSetup( List> partitionGroupQualifiers, long numPartitionGroups, List partitionGroupNames, LogicalColumn partitionColumn ); @@ -44,7 +44,7 @@ public interface PartitionManager { * @param partitionIds List of all requested partitions ids * @return Returns map of AdapterId {@code ->} [Map PartitionsId {@code ->}needed Columns Placements] */ - Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ); + Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ); int getNumberOfPartitionsPerGroup( int numberOfPartitions ); diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 48a716a758..a0b54ec3ff 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -19,8 +19,8 @@ import java.util.List; import java.util.Map; import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -52,7 +52,7 @@ void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, - Map> placementDistribution, + Map> placementDistribution, List targetPartitionIds ); /** @@ -76,14 +76,14 @@ void copyPartitionData( List sourcePartitionIds, List targetPartitionIds ); - AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); + AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); //is used within copyData void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); - AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); + AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); - AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ); + AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ); void copyGraphData( LogicalGraph graph, Transaction transaction, Long existingAdapterId, CatalogAdapter adapter ); diff --git a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java index a360054a9f..73d978fde1 100644 --- a/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/RoutedAlgBuilder.java @@ -27,7 +27,7 @@ import org.bson.BsonValue; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.Context; @@ -98,14 +98,14 @@ public RoutedAlgBuilder documents( ImmutableList tuples, AlgDataType } - public void addPhysicalInfo( Map> physicalPlacements ) { + public void addPhysicalInfo( Map> physicalPlacements ) { final Map>> map = physicalPlacements.entrySet().stream() .collect( Collectors.toMap( Entry::getKey, entry -> map( entry.getValue() ) ) ); physicalPlacementsOfPartitions.putAll( map ); } - private List> map( List catalogCols ) { + private List> map( List catalogCols ) { return catalogCols.stream().map( col -> new Pair<>( col.adapterId, col.columnId ) ).collect( Collectors.toList() ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 6cb7b94d4d..20532a2b12 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -51,10 +51,10 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -226,7 +226,7 @@ private void handleSource( DataSource adapter ) { String physicalSchemaName = null; String physicalTableName = null; for ( ExportedColumn exportedColumn : entry.getValue() ) { - long columnId = catalog.getLogicalRel( defaultNamespaceId ).addColumn( + LogicalColumn column = catalog.getLogicalRel( defaultNamespaceId ).addColumn( exportedColumn.name, table.id, colPos++, @@ -239,14 +239,14 @@ private void handleSource( DataSource adapter ) { exportedColumn.nullable, Collation.getDefaultCollation() ); AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapter.getAdapterId(), table.id ); - catalog.getAllocRel( defaultNamespaceId ).addColumnPlacement( + catalog.getAllocRel( defaultNamespaceId ).addColumn( allocation.id, - columnId, + column.id, PlacementType.STATIC, exportedColumn.physicalPosition ); // Not a valid partitionGroupID --> placeholder - catalog.getAllocRel( defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), columnId, exportedColumn.physicalPosition ); + catalog.getAllocRel( defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), column.id, exportedColumn.physicalPosition ); if ( exportedColumn.primary ) { - primaryKeyColIds.add( columnId ); + primaryKeyColIds.add( column.id ); } if ( physicalSchemaName == null ) { physicalSchemaName = exportedColumn.physicalSchemaName; @@ -306,7 +306,7 @@ public void dropAdapter( String name, Statement statement ) { // Remove table Set tablesToDrop = new HashSet<>(); - for ( CatalogColumnPlacement ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapter( catalogAdapter.id ) ) { + for ( AllocationColumn ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapter( catalogAdapter.id ) ) { tablesToDrop.add( ccp.tableId ); } @@ -339,7 +339,7 @@ public void dropAdapter( String name, Statement statement ) { // Delete column placement in catalog for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( tableId ) ) { if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { - catalog.getAllocRel( defaultNamespaceId ).deleteColumnPlacement( entity.id, column.id, false ); + catalog.getAllocRel( defaultNamespaceId ).deleteColumn( entity.id, column.id, false ); } } @@ -414,7 +414,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys } // Make sure this physical column has not already been added to this table - for ( CatalogColumnPlacement ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { + for ( AllocationColumn ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { /*if ( ccp.physicalColumnName.equalsIgnoreCase( columnPhysicalName ) ) { throw new RuntimeException( "The physical column '" + columnPhysicalName + "' has already been added to this table!" ); }*/ @@ -422,7 +422,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys int position = updateAdjacentPositions( catalogTable, beforeColumn, afterColumn ); - long columnId = catalog.getLogicalRel( catalogTable.namespaceId ).addColumn( + LogicalColumn addedColumn = catalog.getLogicalRel( catalogTable.namespaceId ).addColumn( columnLogicalName, catalogTable.id, position, @@ -437,19 +437,18 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys ); // Add default value - addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); - LogicalColumn addedColumn = catalog.getSnapshot().rel().getColumn( columnId ); + addDefaultValue( catalogTable.namespaceId, defaultValue, addedColumn.id ); AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapterId, catalogTable.id ); // Add column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumn( allocation.id, addedColumn.id, PlacementType.STATIC, position );//Not a valid partitionID --> placeholder // Set column position - catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, columnId, exportedColumn.physicalPosition ); + catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, addedColumn.id, exportedColumn.physicalPosition ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -467,13 +466,21 @@ private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn be } // Update position of the other columns for ( int i = columns.size(); i >= position; i-- ) { - catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( columns.get( i - 1 ).id, i + 1 ); + updateColumnPosition( catalogTable, columns, i ); } } return position; } + private void updateColumnPosition( LogicalTable catalogTable, List columns, int i ) { + catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( columns.get( i - 1 ).id, i + 1 ); + for ( AllocationEntity allocation : catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ) ) { + catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( allocation.id, columns.get( i - 1 ).id, i + 1 ); + } + } + + @Override public void addColumn( String columnName, LogicalTable catalogTable, String beforeColumnName, String afterColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) { columnName = adjustNameIfNeeded( columnName, catalogTable.namespaceId ); @@ -491,7 +498,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo int position = updateAdjacentPositions( catalogTable, beforeColumn, afterColumn ); - long columnId = catalog.getLogicalRel( catalogTable.namespaceId ).addColumn( + LogicalColumn addedColumn = catalog.getLogicalRel( catalogTable.namespaceId ).addColumn( columnName, catalogTable.id, position, @@ -506,8 +513,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo ); // Add default value - addDefaultValue( catalogTable.namespaceId, defaultValue, columnId ); - LogicalColumn addedColumn = catalog.getSnapshot().rel().getColumn( columnId ); + addDefaultValue( catalogTable.namespaceId, defaultValue, addedColumn.id ); // Ask router on which stores this column shall be placed List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); @@ -515,14 +521,15 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo // Add column on underlying data stores and insert default value for ( DataStore store : stores ) { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( store.getAdapterId(), catalogTable.id ); - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumn( allocation.id, addedColumn.id, // Will be set later PlacementType.AUTOMATIC, // Will be set later position );//Not a valid partitionID --> placeholder - AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); + AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), allocation.unwrap( AllocationTable.class ), addedColumn ); } + catalog.updateSnapshot(); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); } @@ -817,7 +824,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( dataStore.getAdapterId(), catalogTable.id ); // Create column placements for ( long cid : columnIds ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumn( allocation.id, cid, PlacementType.MANUAL, @@ -828,7 +835,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( long cid : primaryKey.columnIds ) { if ( !columnIds.contains( cid ) ) { - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumn( allocation.id, cid, PlacementType.AUTOMATIC, @@ -851,7 +858,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { Catalog.getInstance().getSnapshot(); // Create table on store - dataStore.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); + dataStore.createPhysicalTable( statement.getPrepareContext(), catalogTable, null, null ); // Copy data to the newly added placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( dataStore.getAdapterId() ), addedColumns, partitionIds ); @@ -878,20 +885,20 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, catalog.getLogicalRel( catalogTable.namespaceId ).addPrimaryKey( catalogTable.id, columnIds ); // Add new column placements - long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores - List oldPkPlacements = catalog.getSnapshot().alloc().getColumnPlacements( pkColumnId ); - for ( CatalogColumnPlacement ccp : oldPkPlacements ) { + // long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores + List allocations = catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ); + for ( AllocationEntity allocation : allocations ) { + Map allocColumns = allocation.unwrap( AllocationTable.class ).getColumns(); for ( long columnId : columnIds ) { - if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( ccp.adapterId, columnId ) ) { - AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( ccp.adapterId, catalogTable.id ); - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + if ( !allocColumns.containsKey( columnId ) ) { + catalog.getAllocRel( catalogTable.namespaceId ).addColumn( allocation.id, columnId, // Will be set later PlacementType.AUTOMATIC, 0 ); - AdapterManager.getInstance().getStore( ccp.adapterId ).addColumn( + AdapterManager.getInstance().getStore( allocation.adapterId ).addColumn( statement.getPrepareContext(), - catalog.getSnapshot().rel().getTable( ccp.tableId ), + allocation.unwrap( AllocationTable.class ), catalog.getSnapshot().rel().getColumn( columnId ) ); } } @@ -955,15 +962,15 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement if ( catalogTable.entityType == EntityType.ENTITY ) { AdapterManager.getInstance().getStore( dp.adapterId ).dropColumn( statement.getPrepareContext(), dp ); } - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( dp.adapterId, dp.columnId, true ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( dp.adapterId, dp.columnId, true ); }*/ for ( AllocationEntity table : catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ) ) { - for ( CatalogColumnPlacement placement : table.unwrap( AllocationTable.class ).placements ) { + for ( AllocationColumn placement : catalog.getSnapshot().alloc().getColumns( table.id ) ) { if ( catalogTable.entityType == EntityType.ENTITY ) { AdapterManager.getInstance().getStore( table.adapterId ).dropColumn( statement.getPrepareContext(), placement ); } - AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( placement.adapterId, catalogTable.id ); - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, placement.columnId, true ); + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( table.adapterId, catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, placement.columnId, true ); } } @@ -1106,7 +1113,7 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT type.scale, type.dimension, type.cardinality ); - for ( CatalogColumnPlacement placement : catalog.getSnapshot().alloc().getColumnPlacements( logicalColumn.id ) ) { + for ( AllocationColumn placement : catalog.getSnapshot().alloc().getColumnPlacements( logicalColumn.id ) ) { AdapterManager.getInstance().getStore( placement.adapterId ).updateColumnType( statement.getPrepareContext(), placement, @@ -1249,7 +1256,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Checks before physically removing of placement that the partition distribution is still valid and sufficient // Identifies which columns need to be removed - for ( CatalogColumnPlacement placement : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { + for ( AllocationColumn placement : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { if ( !columnIds.contains( placement.columnId ) ) { // Check whether there are any indexes located on the store requiring this column for ( CatalogIndex index : snapshot.getIndexes( catalogTable.id, false ) ) { @@ -1286,7 +1293,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); // Drop column placement AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, columnId, true ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, columnId, true ); } List tempPartitionGroupList = new ArrayList<>(); @@ -1348,7 +1355,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { for ( long cid : columnIds ) { if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { - CatalogColumnPlacement placement = catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), cid ); + AllocationColumn placement = catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), cid ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); @@ -1356,13 +1363,13 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { } else { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Create column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumn( allocation.id, cid, PlacementType.MANUAL, 0 ); // Add column on store - storeInstance.addColumn( statement.getPrepareContext(), catalogTable, snapshot.getColumn( cid ) ); + storeInstance.addColumn( statement.getPrepareContext(), allocation.unwrap( AllocationTable.class ), snapshot.getColumn( cid ) ); // Add to list of columns for which we need to copy data addedColumns.add( snapshot.getColumn( cid ) ); } @@ -1397,7 +1404,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { PlacementType.MANUAL, DataPlacementRole.UPTODATE ) ); - storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); + storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, null, null ); } // Copy the data to the newly added column placements @@ -1450,7 +1457,7 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part DataPlacementRole.UPTODATE ); } - storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, null ); + storeInstance.createPhysicalTable( statement.getPrepareContext(), catalogTable, null, null ); // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); @@ -1493,7 +1500,7 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, @N // Make sure that this store does not contain a placement of this column if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { - CatalogColumnPlacement placement = catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); + AllocationColumn placement = catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( @@ -1506,13 +1513,13 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, @N } else { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Create column placement - catalog.getAllocRel( catalogTable.namespaceId ).addColumnPlacement( + catalog.getAllocRel( catalogTable.namespaceId ).addColumn( allocation.id, logicalColumn.id, PlacementType.MANUAL, 0 ); // Add column on store - storeInstance.addColumn( statement.getPrepareContext(), catalogTable, logicalColumn ); + storeInstance.addColumn( statement.getPrepareContext(), allocation.unwrap( AllocationTable.class ), logicalColumn ); // Copy the data to the newly added column placements DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( storeInstance.getAdapterId() ), @@ -1559,7 +1566,7 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, @ storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Drop column placement - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, logicalColumn.id, false ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, logicalColumn.id, false ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1728,31 +1735,31 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a List columnIds = new ArrayList<>(); - for ( FieldInformation column : columns ) { - long columnId = catalog.getLogicalRel( namespaceId ).addColumn( - column.name, + for ( FieldInformation field : columns ) { + LogicalColumn column = catalog.getLogicalRel( namespaceId ).addColumn( + field.name, tableId, - column.position, - column.typeInformation.type, - column.typeInformation.collectionType, - column.typeInformation.precision, - column.typeInformation.scale, - column.typeInformation.dimension, - column.typeInformation.cardinality, - column.typeInformation.nullable, - column.collation ); + field.position, + field.typeInformation.type, + field.typeInformation.collectionType, + field.typeInformation.precision, + field.typeInformation.scale, + field.typeInformation.dimension, + field.typeInformation.cardinality, + field.typeInformation.nullable, + field.collation ); // Created primary key is added to list - if ( column.name.startsWith( "_matid_" ) ) { - columnIds.add( columnId ); + if ( field.name.startsWith( "_matid_" ) ) { + columnIds.add( column.id ); } for ( DataStore s : stores ) { long adapterId = s.getAdapterId(); AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapterId, tableId ); - catalog.getAllocRel( namespaceId ).addColumnPlacement( + catalog.getAllocRel( namespaceId ).addColumn( allocation.id, - columnId, + column.id, placementType, 0 ); @@ -1762,7 +1769,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a } else { logicalColumns = new ArrayList<>(); } - logicalColumns.add( relSnapshot.getColumn( columnId ) ); + logicalColumns.add( relSnapshot.getColumn( column.id ) ); addedColumns.put( adapterId, logicalColumns ); } @@ -1782,7 +1789,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); - store.createPhysicalTable( statement.getPrepareContext(), catalogMaterializedView, null ); + store.createPhysicalTable( statement.getPrepareContext(), catalogMaterializedView, null, null ); } // Selected data from tables is added into the newly crated materialized view @@ -2185,13 +2192,15 @@ public void createTable( long namespaceId, String name, List f // addATable for ( DataStore store : stores ) { AllocationTable alloc = catalog.getAllocRel( namespaceId ).createAllocationTable( store.getAdapterId(), logical.id ); + List columns = new ArrayList<>(); + int i = 0; - for ( Long id : ids.values() ) { - alloc = catalog.getAllocRel( namespaceId ).addColumnPlacement( alloc.id, id, PlacementType.AUTOMATIC, i ); + for ( long id : ids.values() ) { + columns.add( catalog.getAllocRel( namespaceId ).addColumn( alloc.id, id, PlacementType.AUTOMATIC, i ) ); i++; } - catalog.getPhysical( namespaceId ).addEntities( store.createPhysicalTable( statement.getPrepareContext(), logical, alloc ) ); + catalog.getPhysical( namespaceId ).addEntities( store.createPhysicalTable( statement.getPrepareContext(), logical, alloc, columns ) ); } catalog.updateSnapshot(); @@ -2597,8 +2606,8 @@ public void addPartitioning( PartitionInformation partitionInfo, List stores = new ArrayList<>(); fillStores = true; } - List catalogColumnPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); - for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { + List allocationColumns = snapshot.alloc().getColumnPlacements( pkColumn.id ); + for ( AllocationColumn ccp : allocationColumns ) { if ( fillStores ) { // Ask router on which store(s) the table should be placed Adapter adapter = AdapterManager.getInstance().getAdapter( ccp.adapterId ); @@ -2623,7 +2632,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // First create new tables - store.createPhysicalTable( statement.getPrepareContext(), partitionedTable, null ); + store.createPhysicalTable( statement.getPrepareContext(), partitionedTable, null, null ); // Copy data from unpartitioned to partitioned // Get only columns that are actually on that store @@ -2694,7 +2703,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme // Need to gather the partitionDistribution before actually merging // We need a columnPlacement for every partition - Map> placementDistribution = new HashMap<>(); + Map> placementDistribution = new HashMap<>(); PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( partition.partitionType ); placementDistribution = partitionManager.getRelevantPlacements( partitionedTable, partition.partitionIds, new ArrayList<>( List.of( -1L ) ) ); @@ -2713,8 +2722,8 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme LogicalColumn pkColumn = relSnapshot.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) - List catalogColumnPlacements = catalog.getSnapshot().alloc().getColumnPlacements( pkColumn.id ); - for ( CatalogColumnPlacement ccp : catalogColumnPlacements ) { + List allocationColumns = catalog.getSnapshot().alloc().getColumnPlacements( pkColumn.id ); + for ( AllocationColumn ccp : allocationColumns ) { // Ask router on which store(s) the table should be placed Adapter adapter = AdapterManager.getInstance().getAdapter( ccp.adapterId ); if ( adapter instanceof DataStore ) { @@ -2737,7 +2746,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme DataPlacementRole.UPTODATE ); // First create new tables - store.createPhysicalTable( statement.getPrepareContext(), mergedTable, null ); + store.createPhysicalTable( statement.getPrepareContext(), mergedTable, null, null ); // Get only columns that are actually on that store List necessaryColumns = new LinkedList<>(); @@ -2813,7 +2822,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme private long addColumn( long namespaceId, String columnName, ColumnTypeInformation typeInformation, Collation collation, String defaultValue, long tableId, int position, List stores, PlacementType placementType ) { columnName = adjustNameIfNeeded( columnName, namespaceId ); - long addedColumnId = catalog.getLogicalRel( namespaceId ).addColumn( + LogicalColumn addedColumn = catalog.getLogicalRel( namespaceId ).addColumn( columnName, tableId, position, @@ -2828,18 +2837,18 @@ private long addColumn( long namespaceId, String columnName, ColumnTypeInformati ); // Add default value - addDefaultValue( namespaceId, defaultValue, addedColumnId ); + addDefaultValue( namespaceId, defaultValue, addedColumn.id ); /*for ( DataStore s : stores ) { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( s.getAdapterId(), tableId ); - catalog.getAllocRel( namespaceId ).addColumnPlacement( + catalog.getAllocRel( namespaceId ).addColumn( allocation.id, addedColumnId, placementType, null, null, null, position ); }*/ - return addedColumnId; + return addedColumn.id; } @@ -2997,7 +3006,7 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) { for ( LogicalColumn column : columns ) { if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( placement.adapterId, column.id ) ) { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( placement.getAdapterId(), catalogTable.id ); - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumnPlacement( allocation.id, column.id, false ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, column.id, false ); } } } diff --git a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java index 6fc8aa4176..77824851db 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/AbstractPartitionManager.java @@ -22,8 +22,8 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -45,9 +45,9 @@ public abstract class AbstractPartitionManager implements PartitionManager { public boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, int storeId, long columnId, int threshold ) { // Check for the specified columnId if we still have a ColumnPlacement for every partitionGroup for ( Long partitionGroupId : Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( catalogTable.id ).partitionGroupIds ) { - List ccps = catalog.getSnapshot().alloc().getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); + List ccps = catalog.getSnapshot().alloc().getColumnPlacementsByPartitionGroup( catalogTable.id, partitionGroupId, columnId ); if ( ccps.size() <= threshold ) { - for ( CatalogColumnPlacement placement : ccps ) { + for ( AllocationColumn placement : ccps ) { if ( placement.adapterId == storeId ) { return false; } @@ -59,18 +59,18 @@ public boolean probePartitionGroupDistributionChange( LogicalTable catalogTable, @Override - public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { + public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { Catalog catalog = Catalog.getInstance(); - Map> placementDistribution = new HashMap<>(); + Map> placementDistribution = new HashMap<>(); if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { CatalogPartition catalogPartition = catalog.getSnapshot().alloc().getPartition( partitionId ); - List relevantCcps = new ArrayList<>(); + List relevantCcps = new ArrayList<>(); for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( catalogTable.id ) ) { - List ccps = catalog.getSnapshot().alloc().getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, column.id ); + List ccps = catalog.getSnapshot().alloc().getColumnPlacementsByPartitionGroup( catalogTable.id, catalogPartition.partitionGroupId, column.id ); ccps.removeIf( ccp -> excludedAdapters.contains( ccp.adapterId ) ); if ( !ccps.isEmpty() ) { // Get first column placement which contains partition @@ -127,8 +127,8 @@ public String getUnifiedNullValue() { @Override - public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { - Map>> adapterPlacements = new HashMap<>(); // adapterId -> partitionId ; placements + public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { + Map>> adapterPlacements = new HashMap<>(); // adapterId -> partitionId ; placements if ( partitionIds != null ) { for ( long partitionId : partitionIds ) { List adapters = catalog.getSnapshot().alloc().getAdaptersByPartitionGroup( catalogTable.id, partitionId ); @@ -137,7 +137,7 @@ public Map>> getAllPlacements( Logi if ( !adapterPlacements.containsKey( adapter.id ) ) { adapterPlacements.put( adapter.id, new HashMap<>() ); } - List placements = catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapter.id, catalogTable.id ); + List placements = catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapter.id, catalogTable.id ); adapterPlacements.get( adapter.id ).put( partitionId, placements ); } } diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 34cdf09b1f..0c637e5730 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -345,7 +345,7 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT DataPlacementRole.UPTODATE ); } - store.createPhysicalTable( statement.getPrepareContext(), table, null ); + store.createPhysicalTable( statement.getPrepareContext(), table, alloc, null ); List logicalColumns = new ArrayList<>(); catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getSnapshot().rel().getColumn( cp.columnId ) ) ); diff --git a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java index 704d67315d..61c3112c39 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java +++ b/dbms/src/main/java/org/polypheny/db/partition/TemperatureAwarePartitionManager.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Map; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionFunctionInfo.PartitionFunctionInfoColumn; @@ -53,7 +53,7 @@ public long getTargetPartitionId( LogicalTable catalogTable, String columnValue @Override - public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { + public Map> getRelevantPlacements( LogicalTable catalogTable, List partitionIds, List excludedAdapters ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionProperty property = Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( catalogTable.id ); @@ -66,7 +66,7 @@ public Map> getRelevantPlacements( LogicalTab @Override - public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { + public Map>> getAllPlacements( LogicalTable catalogTable, List partitionIds ) { // Get partition manager PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionProperty property = Catalog.getInstance().getSnapshot().alloc().getPartitionProperty( catalogTable.id ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 338c02003d..cf1787e37b 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -48,8 +48,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; @@ -173,7 +173,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List targetColumnPlacements = new LinkedList<>(); + List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { targetColumnPlacements.add( Catalog.getInstance().getSnapshot().alloc().getColumnPlacement( store.id, logicalColumn.id ) ); } @@ -189,7 +189,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List> placementDistribution = new HashMap<>(); + Map> placementDistribution = new HashMap<>(); PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); if ( property.isPartitioned ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); @@ -205,7 +205,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List> subDistribution = new HashMap<>( placementDistribution ); + Map> subDistribution = new HashMap<>( placementDistribution ); subDistribution.keySet().retainAll( List.of( partitionId ) ); AlgRoot sourceAlg = getSourceIterator( sourceStatement, subDistribution ); AlgRoot targetAlg; @@ -320,7 +320,7 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl @Override - public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { + public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { PhysicalTable physical = statement.getTransaction().getSnapshot().physical().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -331,7 +331,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); - for ( CatalogColumnPlacement ccp : to ) { + for ( AllocationColumn ccp : to ) { LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); @@ -355,7 +355,7 @@ public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { + public AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ) { PhysicalTable physical = statement.getTransaction().getSnapshot().physical().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -366,11 +366,11 @@ public AlgRoot buildInsertStatement( Statement statement, List placements = to.stream().sorted( Comparator.comparingLong( p -> p.columnId ) ).collect( Collectors.toList() ); + List placements = to.stream().sorted( Comparator.comparingLong( p -> p.columnId ) ).collect( Collectors.toList() ); List columnNames = new LinkedList<>(); List values = new LinkedList<>(); - for ( CatalogColumnPlacement ccp : placements ) { + for ( AllocationColumn ccp : placements ) { LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); @@ -392,7 +392,7 @@ public AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ) { + private AlgRoot buildUpdateStatement( Statement statement, List to, long partitionId ) { PhysicalTable physical = statement.getTransaction().getSnapshot().physical().getPhysicalTable( partitionId ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -410,7 +410,7 @@ private AlgRoot buildUpdateStatement( Statement statement, List columnNames = new LinkedList<>(); List values = new LinkedList<>(); - for ( CatalogColumnPlacement ccp : to ) { + for ( AllocationColumn ccp : to ) { LogicalColumn logicalColumn = snapshot.getColumn( ccp.columnId ); columnNames.add( ccp.getLogicalColumnName() ); values.add( new RexDynamicParam( logicalColumn.getAlgDataType( typeFactory ), (int) logicalColumn.id ) ); @@ -454,7 +454,7 @@ private AlgRoot buildUpdateStatement( Statement statement, List> placementDistribution ) { + public AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ) { // Build Query AlgOptCluster cluster = AlgOptCluster.create( @@ -466,7 +466,7 @@ public AlgRoot getSourceIterator( Statement statement, Map selectSourcePlacements( LogicalTable table, List columns, long excludingAdapterId ) { + public static List selectSourcePlacements( LogicalTable table, List columns, long excludingAdapterId ) { // Find the adapter with the most column placements Catalog catalog = Catalog.getInstance(); Snapshot snapshot = catalog.getSnapshot(); @@ -485,13 +485,13 @@ public static List selectSourcePlacements( LogicalTable } // Take the adapter with most placements as base and add missing column placements - List placementList = new LinkedList<>(); + List placementList = new LinkedList<>(); for ( LogicalColumn column : snapshot.rel().getColumns( table.id ) ) { if ( columnIds.contains( column.id ) ) { if ( snapshot.alloc().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { placementList.add( snapshot.alloc().getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); } else { - for ( CatalogColumnPlacement placement : snapshot.alloc().getColumnPlacements( column.id ) ) { + for ( AllocationColumn placement : snapshot.alloc().getColumnPlacements( column.id ) ) { if ( placement.adapterId != excludingAdapterId ) { placementList.add( placement ); break; @@ -518,12 +518,12 @@ public static List selectSourcePlacements( LogicalTable * @param targetPartitionIds Target Partitions where data should be inserted */ @Override - public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { + public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getSnapshot().rel().getPrimaryKey( sourceTable.primaryKey ); AllocSnapshot snapshot = Catalog.getInstance().getSnapshot().alloc(); // Check Lists - List targetColumnPlacements = new LinkedList<>(); + List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { targetColumnPlacements.add( snapshot.getColumnPlacement( store.id, logicalColumn.id ) ); } @@ -625,7 +625,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo CatalogPrimaryKey primaryKey = snapshot.rel().getPrimaryKey( sourceTable.primaryKey ); // Check Lists - List targetColumnPlacements = new LinkedList<>(); + List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { targetColumnPlacements.add( snapshot.alloc().getColumnPlacement( store.id, logicalColumn.id ) ); } @@ -652,7 +652,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( targetProperty.partitionType ); //We need a columnPlacement for every partition - Map> placementDistribution = new HashMap<>(); + Map> placementDistribution = new HashMap<>(); PartitionProperty sourceProperty = snapshot.alloc().getPartitionProperty( sourceTable.id ); placementDistribution.put( sourceProperty.partitionIds.get( 0 ), selectSourcePlacements( sourceTable, selectColumnList, -1 ) ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index c25961fd0e..74e3097a0f 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -23,7 +23,7 @@ import org.polypheny.db.algebra.constant.ExplainFormat; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; @@ -103,9 +103,9 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P CatalogPartitionGroup catalogPartitionGroup = snapshot.alloc().getPartitionGroup( catalogPartition.partitionGroupId ); v.forEach( p -> { - CatalogColumnPlacement catalogColumnPlacement = snapshot.alloc().getColumnPlacement( p.left, p.right ); + AllocationColumn allocationColumn = snapshot.alloc().getColumnPlacement( p.left, p.right ); CatalogPartitionPlacement catalogPartitionPlacement = snapshot.alloc().getPartitionPlacement( p.left, k ); - LogicalColumn logicalColumn = snapshot.rel().getColumn( catalogColumnPlacement.columnId ); + LogicalColumn logicalColumn = snapshot.rel().getColumn( allocationColumn.columnId ); table.addRow( snapshot.getNamespace( catalogTable.namespaceId ) + "." + catalogTable.name, logicalColumn.name, diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 6a56d07d5b..e5b27e3382 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -52,8 +52,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.LogicalNamespace; @@ -108,7 +108,7 @@ public abstract class BaseRouter implements Router { /** * Execute the table scan on the first placement of a table */ - protected static Map> selectPlacement( LogicalTable table ) { + protected static Map> selectPlacement( LogicalTable table ) { // Find the adapter with the most column placements long adapterIdWithMostPlacements = -1; int numOfPlacements = 0; @@ -120,7 +120,7 @@ protected static Map> selectPlacement( Logica } // Take the adapter with most placements as base and add missing column placements - List placementList = new LinkedList<>(); + List placementList = new LinkedList<>(); for ( LogicalColumn column : Catalog.snapshot().rel().getColumns( table.id ) ) { placementList.add( Catalog.snapshot().alloc().getColumnPlacements( column.id ).get( 0 ) ); } @@ -365,7 +365,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List joinedScanCache.put( allocationEntities.hashCode(), node ); } - CatalogColumnPlacement placement = allocationEntities.get( 0 ).unwrap( AllocationTable.class ).placements.get( 0 ); + AllocationColumn placement = allocationEntities.get( 0 ).unwrap( AllocationTable.class ).placements.get( 0 ); // todo dl: remove after RowType refactor if ( Catalog.snapshot().getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index 5eb3b26f32..0f2dc9a1ee 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -27,7 +27,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.properties.PartitionProperty; import org.polypheny.db.plan.AlgOptCluster; @@ -65,10 +65,10 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build LogicalTable catalogTable = node.getEntity().unwrap( LogicalTable.class ); PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); List partitionIds = property.partitionIds; - Map> placement = new HashMap<>(); + Map> placement = new HashMap<>(); for ( long partition : partitionIds ) { if ( cachedPlan.physicalPlacementsOfPartitions.get( partition ) != null ) { - List colPlacements = cachedPlan.physicalPlacementsOfPartitions.get( partition ).stream() + List colPlacements = cachedPlan.physicalPlacementsOfPartitions.get( partition ).stream() .map( placementInfo -> catalog.getSnapshot().alloc().getColumnPlacement( placementInfo.left, placementInfo.right ) ) .collect( Collectors.toList() ); placement.put( partition, colPlacements ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index d675736614..d92cae4288 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -74,9 +74,9 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; @@ -209,11 +209,11 @@ public AlgNode routeDmlOld( LogicalRelModify modify, Statement statement ) { LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); // Essentially gets a list of all stores where this table resides - List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); + List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); List allocs = snapshot.alloc().getAllocationsFromLogical( catalogTable.id );//.getPartitionProperty( catalogTable.id ); if ( !allocs.isEmpty() && log.isDebugEnabled() ) { log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, -1 );//property.partitionGroupIds ); - for ( CatalogColumnPlacement dataPlacement : pkPlacements ) { + for ( AllocationColumn dataPlacement : pkPlacements ) { log.debug( "\t\t -> '{}' {}\t{}", dataPlacement.adapterId, @@ -234,10 +234,10 @@ public AlgNode routeDmlOld( LogicalRelModify modify, Statement statement ) { List> allValues = statement.getDataContext().getParameterValues(); Map newParameterValues = new HashMap<>(); - for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { + for ( AllocationColumn pkPlacement : pkPlacements ) { // Get placements on store - List placementsOnAdapter = snapshot.alloc().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); + List placementsOnAdapter = snapshot.alloc().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, catalogTable.id ); // If this is an update, check whether we need to execute on this store at all List updateColumnList = modify.getUpdateColumnList(); @@ -1241,7 +1241,7 @@ private AlgBuilder buildDml( AlgNode node, RoutedAlgBuilder builder, LogicalTable catalogTable, - List placements, + List placements, CatalogPartitionPlacement partitionPlacement, Statement statement, AlgOptCluster cluster, @@ -1253,7 +1253,7 @@ private AlgBuilder buildDml( if ( log.isDebugEnabled() ) { log.debug( "List of Store specific ColumnPlacements: " ); - for ( CatalogColumnPlacement ccp : placements ) { + for ( AllocationColumn ccp : placements ) { log.debug( "{}.{}", ccp.adapterId, ccp.getLogicalColumnName() ); } } @@ -1300,7 +1300,7 @@ private AlgBuilder buildDml( return builder; } else { // partitioned, add additional project ArrayList rexNodes = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : placements ) { + for ( AllocationColumn ccp : placements ) { rexNodes.add( builder.field( ccp.getLogicalColumnName() ) ); } return builder.project( rexNodes ); @@ -1321,13 +1321,13 @@ private AlgBuilder buildDml( } builder.push( node.copy( node.getTraitSet(), ImmutableList.of( builder.peek( 0 ) ) ) ); ArrayList rexNodes = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : placements ) { + for ( AllocationColumn ccp : placements ) { rexNodes.add( builder.field( ccp.getLogicalColumnName() ) ); } return builder.project( rexNodes ); } else { ArrayList rexNodes = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : placements ) { + for ( AllocationColumn ccp : placements ) { rexNodes.add( builder.field( ccp.getLogicalColumnName() ) ); } for ( RexNode rexNode : ((LogicalProject) node).getProjects() ) { @@ -1365,10 +1365,10 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical long pkid = fromTable.primaryKey; List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); + List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); List nodes = new ArrayList<>(); - for ( CatalogColumnPlacement pkPlacement : pkPlacements ) { + for ( AllocationColumn pkPlacement : pkPlacements ) { snapshot.alloc().getColumnPlacementsOnAdapterPerTable( pkPlacement.adapterId, fromTable.id ); @@ -1392,7 +1392,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical } - private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, List placements, RexNode operand ) { + private void dmlConditionCheck( LogicalFilter node, LogicalTable catalogTable, List placements, RexNode operand ) { if ( operand instanceof RexInputRef ) { int index = ((RexInputRef) operand).getIndex(); AlgDataTypeField field = node.getInput().getRowType().getFieldList().get( index ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index d76950a1ad..4e1e013e4a 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -27,7 +27,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -59,10 +59,10 @@ protected List handleHorizontalPartitioning( log.debug( "{} is horizontally partitioned", catalogTable.name ); } - Collection>> placements = selectPlacementHorizontalPartitioning( node, catalogTable, queryInformation ); + Collection>> placements = selectPlacementHorizontalPartitioning( node, catalogTable, queryInformation ); List newBuilders = new ArrayList<>(); - for ( Map> placementCombination : placements ) { + for ( Map> placementCombination : placements ) { for ( RoutedAlgBuilder builder : builders ) { RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); newBuilder.addPhysicalInfo( placementCombination ); @@ -105,7 +105,7 @@ protected List handleNonePartitioning( log.debug( "{} is NOT partitioned - Routing will be easy", catalogTable.name ); } - final Set> placements = selectPlacement( catalogTable, queryInformation ); + final Set> placements = selectPlacement( catalogTable, queryInformation ); List newBuilders = new ArrayList<>(); /*for ( List placementCombination : placements ) { @@ -130,7 +130,7 @@ protected List handleNonePartitioning( } - protected Collection>> selectPlacementHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { + protected Collection>> selectPlacementHorizontalPartitioning( AlgNode node, LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { PartitionManagerFactory partitionManagerFactory = PartitionManagerFactory.getInstance(); PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); PartitionManager partitionManager = partitionManagerFactory.getPartitionManager( property.partitionType ); @@ -138,13 +138,13 @@ protected Collection>> selectPlacementHor // Utilize scanId to retrieve Partitions being accessed List partitionIds = queryInformation.getAccessedPartitions().get( node.getId() ); - Map>> allPlacements = partitionManager.getAllPlacements( catalogTable, partitionIds ); + Map>> allPlacements = partitionManager.getAllPlacements( catalogTable, partitionIds ); return allPlacements.values(); } - protected Set> selectPlacement( LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { + protected Set> selectPlacement( LogicalTable catalogTable, LogicalQueryInformation queryInformation ) { // Get used columns from analyze List usedColumns = queryInformation.getAllColumnsPerTable( catalogTable.id ); @@ -155,9 +155,9 @@ protected Set> selectPlacement( LogicalTable catalo .map( Entry::getKey ) .collect( Collectors.toList() ); - final Set> result = new HashSet<>(); + final Set> result = new HashSet<>(); for ( long adapterId : adapters ) { - List placements = usedColumns.stream() + List placements = usedColumns.stream() .map( colId -> Catalog.snapshot().alloc().getColumnPlacement( adapterId, colId ) ) .collect( Collectors.toList() ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java index 981832dfd8..3aa0720308 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/IcarusRouter.java @@ -28,7 +28,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.properties.PartitionProperty; @@ -62,7 +62,7 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa log.debug( "{} is NOT partitioned - Routing will be easy", catalogTable.name ); } - final Set> placements = selectPlacement( catalogTable, queryInformation ); + final Set> placements = selectPlacement( catalogTable, queryInformation ); List newBuilders = new ArrayList<>(); if ( placements.isEmpty() ) { this.cancelQuery = true; @@ -71,8 +71,8 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa // Initial case with empty single builder if ( builders.size() == 1 && builders.get( 0 ).getPhysicalPlacementsOfPartitions().isEmpty() ) { - for ( List currentPlacement : placements ) { - final Map> currentPlacementDistribution = new HashMap<>(); + for ( List currentPlacement : placements ) { + final Map> currentPlacementDistribution = new HashMap<>(); PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); currentPlacementDistribution.put( property.partitionIds.get( 0 ), currentPlacement ); @@ -89,8 +89,8 @@ protected List handleNonePartitioning( AlgNode node, LogicalTa throw new RuntimeException( "Not allowed! With Icarus, this should not happen" ); } - for ( List currentPlacement : placements ) { - final Map> currentPlacementDistribution = new HashMap<>(); + for ( List currentPlacement : placements ) { + final Map> currentPlacementDistribution = new HashMap<>(); PartitionProperty property = Catalog.snapshot().alloc().getPartitionProperty( catalogTable.id ); currentPlacementDistribution.put( property.partitionIds.get( 0 ), currentPlacement ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java index 3a4814e9cc..9086f0d6ea 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java @@ -23,7 +23,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -76,7 +76,7 @@ protected List handleHorizontalPartitioning( AlgNode node, Log // Utilize scanId to retrieve Partitions being accessed List partitionIds = queryInformation.getAccessedPartitions().get( node.getId() ); - Map> placementDistribution = partitionIds != null + Map> placementDistribution = partitionIds != null ? partitionManager.getRelevantPlacements( catalogTable, partitionIds, Collections.emptyList() ) : partitionManager.getRelevantPlacements( catalogTable, property.partitionIds, Collections.emptyList() ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index bdc256f949..07c1a6c526 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -22,9 +22,8 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; @@ -33,9 +32,8 @@ public class CreateSinglePlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalTable catalogTable = snapshot.rel().getTable( addedColumn.tableId ); - List dataPlacement = snapshot.alloc().getDataPlacements( catalogTable.id ); - return ImmutableList.of( AdapterManager.getInstance().getStore( dataPlacement.get( 0 ).adapterId ) ); + List allocations = snapshot.alloc().getAllocationsFromLogical( addedColumn.tableId ); + return ImmutableList.of( AdapterManager.getInstance().getStore( allocations.get( 0 ).adapterId ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index ec59ccf56f..2139be343a 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -43,7 +43,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; @@ -304,7 +304,7 @@ public void prepareToUpdate( Long materializedId ) { public void addData( Transaction transaction, List stores, Map> columns, AlgRoot algRoot, CatalogMaterializedView materializedView ) { addMaterializedInfo( materializedView.id, materializedView.getMaterializedCriteria() ); - List columnPlacements = new LinkedList<>(); + List columnPlacements = new LinkedList<>(); DataMigrator dataMigrator = transaction.getDataMigrator(); List dataPlacements = transaction.getSnapshot().alloc().getDataPlacements( materializedView.id ); for ( CatalogDataPlacement placement : dataPlacements ) { @@ -333,7 +333,7 @@ public void updateData( Transaction transaction, Long materializedId ) { DataMigrator dataMigrator = transaction.getDataMigrator(); - List columnPlacements = new LinkedList<>(); + List columnPlacements = new LinkedList<>(); Map> columns = new HashMap<>(); List ids = new ArrayList<>(); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index a7b46842f8..58d1d783c2 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -46,7 +46,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -92,7 +92,7 @@ public PhysicalTable createCsvTable( long id, LogicalTable catalogTable, Allocat List columns = csvSource.getExportedColumns().get( catalogTable.name ); - for ( CatalogColumnPlacement placement : allocationTable.placements ) { + for ( AllocationColumn placement : allocationTable.placements ) { LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( placement.columnId ); AlgDataType sqlType = sqlType( typeFactory, logicalColumn.type, logicalColumn.length, logicalColumn.scale, null ); fieldInfo.add( logicalColumn.name, columns.get( (int) placement.position ).physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 51cce5bfcd..0bab4957ec 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -36,7 +36,7 @@ import org.polypheny.db.adapter.jdbc.stores.AbstractJdbcStore; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; @@ -120,7 +120,7 @@ public Namespace getCurrentSchema() { @Override public void addIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { - List ccps = context.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); + List ccps = context.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); List partitionPlacements = new ArrayList<>(); partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().alloc().getPartitionPlacement( getAdapterId(), id ) ) ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index d86d7fb691..165072c3b0 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -32,7 +32,7 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -126,7 +126,7 @@ public void createUdfs() { @Override - public List createPhysicalTable( Context context, LogicalTable logicalTable, AllocationTable allocationTable ) { + public List createPhysicalTable( Context context, LogicalTable logicalTable, AllocationTable allocationTable, List columns ) { String physicalTableName = getPhysicalTableName( logicalTable.id, allocationTable.id ); if ( log.isDebugEnabled() ) { @@ -151,13 +151,13 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica .append( dialect.quoteIdentifier( physicalTableName ) ) .append( " ( " ); boolean first = true; - for ( CatalogColumnPlacement placement : allocationTable.placements ) { - LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( placement.columnId ); + for ( AllocationColumn column : catalog.getSnapshot().alloc().getColumns( allocationTable.id ) ) { + LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( column.columnId ); if ( !first ) { builder.append( ", " ); } first = false; - builder.append( dialect.quoteIdentifier( getPhysicalColumnName( placement.columnId ) ) ).append( " " ); + builder.append( dialect.quoteIdentifier( getPhysicalColumnName( column.columnId ) ) ).append( " " ); createColumnDefinition( logicalColumn, builder ); builder.append( " NULL" ); } @@ -167,18 +167,20 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica @Override - public void addColumn( Context context, LogicalTable catalogTable, LogicalColumn logicalColumn ) { + public void addColumn( Context context, AllocationTable catalogTable, LogicalColumn logicalColumn ) { String physicalColumnName = getPhysicalColumnName( logicalColumn.id ); - for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( this.getAdapterId(), catalogTable.id ) ) { - String physicalTableName = partitionPlacement.physicalTableName; - String physicalSchemaName = partitionPlacement.physicalSchemaName; - StringBuilder query = buildAddColumnQuery( physicalSchemaName, physicalTableName, physicalColumnName, catalogTable, logicalColumn ); + PhysicalTable physicalTable = context.getSnapshot().physical().getPhysicalTable( this.getAdapterId(), catalogTable.id ); + + String physicalTableName = physicalTable.name; + String physicalSchemaName = physicalTable.namespaceName; + StringBuilder query = buildAddColumnQuery( physicalSchemaName, physicalTableName, physicalColumnName, logicalColumn ); + executeUpdate( query, context ); + // Insert default value + if ( logicalColumn.defaultValue != null ) { + query = buildInsertDefaultValueQuery( physicalSchemaName, physicalTableName, physicalColumnName, logicalColumn ); executeUpdate( query, context ); - // Insert default value - if ( logicalColumn.defaultValue != null ) { - query = buildInsertDefaultValueQuery( physicalSchemaName, physicalTableName, physicalColumnName, logicalColumn ); - executeUpdate( query, context ); - } + } + /*for ( String column : physicalTable.columns.values() ) { // Add physical name to placement catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalNames( getAdapterId(), @@ -186,11 +188,11 @@ public void addColumn( Context context, LogicalTable catalogTable, LogicalColumn physicalSchemaName, physicalColumnName, false ); - } + }*/ } - protected StringBuilder buildAddColumnQuery( String physicalSchemaName, String physicalTableName, String physicalColumnName, LogicalTable catalogTable, LogicalColumn logicalColumn ) { + protected StringBuilder buildAddColumnQuery( String physicalSchemaName, String physicalTableName, String physicalColumnName, LogicalColumn logicalColumn ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) .append( dialect.quoteIdentifier( physicalSchemaName ) ) @@ -264,7 +266,7 @@ protected StringBuilder buildInsertDefaultValueQuery( String physicalSchemaName, // Make sure to update overridden methods as well @Override - public void updateColumnType( Context context, CatalogColumnPlacement columnPlacement, LogicalColumn logicalColumn, PolyType oldType ) { + public void updateColumnType( Context context, AllocationColumn columnPlacement, LogicalColumn logicalColumn, PolyType oldType ) { if ( !this.dialect.supportsNestedArrays() && logicalColumn.collectionsType != null ) { return; } @@ -321,7 +323,7 @@ public void dropTable( Context context, LogicalTable catalogTable, List pa @Override - public void dropColumn( Context context, CatalogColumnPlacement columnPlacement ) { + public void dropColumn( Context context, AllocationColumn columnPlacement ) { for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { StringBuilder builder = new StringBuilder(); builder.append( "ALTER TABLE " ) diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index da2f954d89..3abcdd0ee1 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -50,6 +50,7 @@ import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.logical.DocumentCatalog; @@ -261,7 +262,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) { private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { if ( !getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { - long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); + LogicalColumn column = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); String filename = table.name + ".csv"; if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { filename += ".gz"; @@ -275,8 +276,8 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String alloc = getSnapshot().alloc().getAllocation( csv.id, table.id ); } - getAllocRel( table.namespaceId ).addColumnPlacement( alloc.id, colId, PlacementType.AUTOMATIC, position ); - //getAllocRel( table.namespaceId ).addColumnPlacement( alloc.id, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); + getAllocRel( table.namespaceId ).addColumn( alloc.id, column.id, PlacementType.AUTOMATIC, position ); + //getAllocRel( table.namespaceId ).addColumn( alloc.id, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); //getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( allocId, colId, position ); updateSnapshot(); @@ -289,11 +290,11 @@ private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { if ( !getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { - long colId = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); + LogicalColumn column = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); AllocationEntity entity = getSnapshot().alloc().getAllocation( adapter.id, table.id ); - getAllocRel( table.namespaceId ).addColumnPlacement( entity.id, colId, PlacementType.AUTOMATIC, position ); - //getAllocRel( table.namespaceId ).addColumnPlacement( entity.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); - getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, colId, position ); + getAllocRel( table.namespaceId ).addColumn( entity.id, column.id, PlacementType.AUTOMATIC, position ); + //getAllocRel( table.namespaceId ).addColumn( entity.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); + getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, column.id, position ); } } @@ -304,6 +305,8 @@ public void updateSnapshot() { Set keys = this.physicalCatalogs.keySet(); keys.forEach( k -> this.physicalCatalogs.replace( k, new PolyPhysicalCatalog() ) ); + // update all except physicals, so information can be accessed + this.snapshot = SnapshotBuilder.createSnapshot( idBuilder.getNewSnapshotId(), this, logicalCatalogs, allocationCatalogs, physicalCatalogs ); // generate new physical entities, atm only relational this.allocationCatalogs.forEach( ( k, v ) -> { if ( v.getNamespace().namespaceType == NamespaceType.RELATIONAL ) { @@ -324,6 +327,7 @@ public void updateSnapshot() { } } ); + // update with newly generated physical entities this.snapshot = SnapshotBuilder.createSnapshot( idBuilder.getNewSnapshotId(), this, logicalCatalogs, allocationCatalogs, physicalCatalogs ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index e0fef8b775..d714241c07 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -27,6 +27,7 @@ import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.logistic.DataPlacementRole; @@ -50,17 +51,23 @@ public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Seriali @Getter public final ConcurrentHashMap tables; + @Serialize + @Getter + public final ConcurrentHashMap columns; + public PolyAllocRelCatalog( LogicalNamespace namespace ) { - this( namespace, new ConcurrentHashMap<>() ); + this( namespace, new ConcurrentHashMap<>(), new ConcurrentHashMap<>() ); } public PolyAllocRelCatalog( @Deserialize("namespace") LogicalNamespace namespace, - @Deserialize("tables") Map tables ) { + @Deserialize("tables") Map tables, + @Deserialize("columns") Map columns ) { this.tables = new ConcurrentHashMap<>( tables ); this.namespace = namespace; + this.columns = new ConcurrentHashMap<>( columns ); } @@ -73,16 +80,16 @@ public PolyAllocRelCatalog copy() { @Override - public AllocationTable addColumnPlacement( long allocationId, long columnId, PlacementType placementType, int position ) { - AllocationTable alloc = tables.get( allocationId ).withAddedColumn( columnId, placementType, position ); - tables.put( allocationId, alloc ); - return alloc; + public AllocationColumn addColumn( long allocationId, long columnId, PlacementType placementType, int position ) { + AllocationColumn column = new AllocationColumn( namespace.id, allocationId, columnId, placementType, position, tables.get( allocationId ).adapterId ); + columns.put( columnId, column ); + return column; } @Override - public void deleteColumnPlacement( long allocationId, long columnId, boolean columnOnly ) { - tables.put( allocationId, tables.get( allocationId ).withRemovedColumn( columnId ) ); + public void deleteColumn( long allocationId, long columnId, boolean columnOnly ) { + columns.remove( columnId ); } @@ -94,19 +101,6 @@ public void updateColumnPlacementType( long adapterId, long columnId, PlacementT @Override public void updateColumnPlacementPhysicalPosition( long allocId, long columnId, long position ) { - - } - - - @Override - public void updateColumnPlacementPhysicalPosition( long adapterId, long columnId ) { - - } - - - @Override - public void updateColumnPlacementPhysicalNames( long adapterId, long columnId, String physicalSchemaName, String physicalColumnName, boolean updatePhysicalColumnPosition ) { - } @@ -173,7 +167,7 @@ public void addPartitionPlacement( long namespaceId, long adapterId, long tableI @Override public AllocationTable createAllocationTable( long adapterId, long tableId ) { long id = idBuilder.getNewAllocId(); - AllocationTable table = new AllocationTable( id, tableId, namespace.id, adapterId, List.of() ); + AllocationTable table = new AllocationTable( id, tableId, namespace.id, adapterId ); tables.put( id, table ); return table; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 8f889a7c2e..a454cd4145 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -257,11 +257,11 @@ public void deleteIndex( long indexId ) { @Override - public long addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { + public LogicalColumn addColumn( String name, long tableId, int position, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, Collation collation ) { long id = idBuilder.getNewFieldId(); LogicalColumn column = new LogicalColumn( id, name, tableId, logicalNamespace.id, position, type, collectionsType, length, scale, dimension, cardinality, nullable, collation, null ); columns.put( id, column ); - return id; + return column; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index 806f77f1cf..f7c2b163fd 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -20,7 +20,7 @@ import java.util.List; import java.util.Objects; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; @@ -126,8 +126,8 @@ public void execute( Context context, Statement statement, QueryParameters param } // Make sure that all adapters are of type store (and not source) - for ( CatalogDataPlacement placement : statement.getTransaction().getSnapshot().alloc().getDataPlacements( catalogTable.id ) ) { - getDataStoreInstance( placement.adapterId ); + for ( AllocationEntity allocation : statement.getTransaction().getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ) ) { + getDataStoreInstance( allocation.adapterId ); } String defaultValue = this.defaultValue == null ? null : this.defaultValue.toString(); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 37b19523b5..9731e792e1 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -100,8 +100,8 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -1185,7 +1185,7 @@ void getDataSourceColumns( final Context ctx ) { CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); List pkColumnNames = primaryKey.getColumnNames(); List columns = new ArrayList<>(); - for ( CatalogColumnPlacement ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { + for ( AllocationColumn ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { LogicalColumn col = catalog.getSnapshot().rel().getColumn( ccp.columnId ); columns.add( new DbColumn( col.name, @@ -1905,8 +1905,8 @@ private Placement getPlacements( final Index index ) { long pkid = table.primaryKey; List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); - for ( CatalogColumnPlacement placement : pkPlacements ) { + List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); + for ( AllocationColumn placement : pkPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); p.addAdapter( new RelationalStore( diff --git a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java index 5afc5883ba..69078c329d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/Placement.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/Placement.java @@ -21,8 +21,8 @@ import java.util.List; import java.util.stream.Collectors; import lombok.Value; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.PartitionType; @@ -101,7 +101,7 @@ public static class RelationalStore extends Store { public RelationalStore( String uniqueName, String adapterName, - List columnPlacements, + List columnPlacements, final List partitionKeys, final long numPartitions, final PartitionType partitionType ) { @@ -190,13 +190,13 @@ private static class ColumnPlacement { private final PlacementType placementType; - public ColumnPlacement( CatalogColumnPlacement catalogColumnPlacement ) { - this.tableId = catalogColumnPlacement.tableId; - this.tableName = catalogColumnPlacement.getLogicalTableName(); - this.columnId = catalogColumnPlacement.columnId; - this.columnName = catalogColumnPlacement.getLogicalColumnName(); - this.storeId = (int) catalogColumnPlacement.adapterId; - this.placementType = catalogColumnPlacement.placementType; + public ColumnPlacement( AllocationColumn allocationColumn ) { + this.tableId = allocationColumn.tableId; + this.tableName = allocationColumn.getLogicalTableName(); + this.columnId = allocationColumn.columnId; + this.columnName = allocationColumn.getLogicalColumnName(); + this.storeId = (int) allocationColumn.adapterId; + this.placementType = allocationColumn.placementType; } } From babe85ba31aad491b535b7e98d64123dd23ed292 Mon Sep 17 00:00:00 2001 From: datomo Date: Thu, 13 Apr 2023 13:32:44 +0200 Subject: [PATCH 060/436] fixed new allocationColumn methods in allocation snapshot --- .../db/catalog/entity/AllocationColumn.java | 8 +- .../db/catalog/snapshot/AllocSnapshot.java | 1 + .../snapshot/impl/AllocSnapshotImpl.java | 73 ++++++++++--------- .../org/polypheny/db/ddl/DdlManagerImpl.java | 2 +- .../db/partition/FrequencyMapImpl.java | 2 +- .../db/routing/routers/BaseRouter.java | 2 +- .../polypheny/db/adapter/csv/CsvSchema.java | 4 +- .../polypheny/db/adapter/csv/CsvTable.java | 4 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 2 +- 9 files changed, 49 insertions(+), 49 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java index c44836dab4..db9594fbf9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java @@ -68,21 +68,19 @@ public AllocationColumn( @SneakyThrows public String getLogicalTableName() { - throw new org.apache.commons.lang3.NotImplementedException(); + return Catalog.snapshot().rel().getTable( tableId ).name; } @SneakyThrows public String getLogicalColumnName() { - //return Catalog.getInstance().getLogicalRel( namespaceId ).getColumn( columnId ).name; - throw new org.apache.commons.lang3.NotImplementedException(); + return Catalog.snapshot().rel().getColumn( columnId ).name; } @SneakyThrows public String getAdapterUniqueName() { - // return Catalog.getInstance().getAdapter( adapterId ).uniqueName; - throw new org.apache.commons.lang3.NotImplementedException(); + return Catalog.snapshot().getAdapter( adapterId ).uniqueName; } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index 221c37ffc0..4ca9baf909 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -411,4 +411,5 @@ public interface AllocSnapshot { List getColumns( long allocId ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index 25dc1217d2..71d7d728cb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -62,9 +62,9 @@ public class AllocSnapshotImpl implements AllocSnapshot { ImmutableMap allocs; ImmutableMap> allocsOnAdapters; - ImmutableMap> columPlacements; + ImmutableMap> logicalColumnToAlloc; - ImmutableMap> tablePlacements; + ImmutableMap> allocColumns; ImmutableMap, List> adapterLogicalTablePlacements; ImmutableMap, AllocationEntity> adapterLogicalTableAlloc; ImmutableMap> logicalAllocs; @@ -90,37 +90,44 @@ public AllocSnapshotImpl( Map allocationCatalogs ) { .filter( a -> a.getNamespace().namespaceType == NamespaceType.GRAPH ) .map( c -> (AllocationGraphCatalog) c ) .collect( Collectors.toList() ) ); - - this.columns = allocationCatalogs.values() + this.columns = buildColumns( allocationCatalogs.values() .stream() .filter( a -> a.getNamespace().namespaceType == NamespaceType.RELATIONAL ) .map( c -> (AllocationRelationalCatalog) c ) - .map( c -> c.getColumns() ) - .collect( Collectors.toList() ); - this.tableAdapterColumns = buildTableAdapterColumns(); + .map( AllocationRelationalCatalog::getColumns ) + .flatMap( c -> c.values().stream() ) + .collect( Collectors.toList() ) ); this.allocs = mergeAllocs(); this.allocsOnAdapters = buildAllocsOnAdapters(); this.adapterColumnPlacement = buildAdapterColumnPlacement(); - this.columPlacements = buildColumnPlacements(); + this.logicalColumnToAlloc = buildColumnPlacements(); this.adapterLogicalTablePlacements = buildAdapterLogicalTablePlacements(); this.adapterLogicalTableAlloc = buildAdapterLogicalTableAlloc(); - this.tablePlacements = buildTablePlacements(); + this.allocColumns = buildAllocColumns(); this.logicalAllocs = buildLogicalAllocs(); + + this.tableAdapterColumns = buildTableAdapterColumns(); + } + + + private ImmutableMap, AllocationColumn> buildColumns( List columns ) { + return ImmutableMap.copyOf( columns.stream().collect( Collectors.toMap( c -> Pair.of( c.columnId, c.adapterId ), c -> c ) ) ); + } private ImmutableMap>> buildTableAdapterColumns() { Map>> map = new HashMap<>(); - this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { - if ( !map.containsKey( v.logicalId ) ) { - map.put( v.logicalId, new HashMap<>() ); + for ( AllocationColumn column : this.columns.values() ) { + if ( !map.containsKey( column.tableId ) ) { + map.put( column.tableId, new HashMap<>() ); } - if ( !map.get( v.logicalId ).containsKey( v.adapterId ) ) { - map.get( v.logicalId ).put( v.logicalId, new ArrayList<>() ); + if ( !map.get( column.tableId ).containsKey( column.adapterId ) ) { + map.get( column.tableId ).put( column.adapterId, new ArrayList<>() ); } - map.get( v.logicalId ).get( v.logicalId ).add( p.columnId ); - } ) ); + map.get( column.tableId ).get( column.adapterId ).add( column.columnId ); + } return ImmutableMap.copyOf( map ); } @@ -138,14 +145,14 @@ private ImmutableMap> buildLogicalAllocs() { } - private ImmutableMap> buildTablePlacements() { + private ImmutableMap> buildAllocColumns() { Map> map = new HashMap<>(); - this.columns.forEach( ( k, v ) -> v.placements.forEach( p -> { - if ( !map.containsKey( v.id ) ) { - map.put( v.id, new ArrayList<>() ); + for ( AllocationColumn value : columns.values() ) { + if ( !map.containsKey( value.tableId ) ) { + map.put( value.tableId, new ArrayList<>() ); } - map.get( v.id ).add( p ); - } ) ); + map.get( value.tableId ).add( value ); + } return ImmutableMap.copyOf( map ); } @@ -160,12 +167,6 @@ private ImmutableMap, AllocationEntity> buildAdapterLogicalTabl private ImmutableMap, List> buildAdapterLogicalTablePlacements() { Map, List> map = new HashMap<>(); - this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { - if ( !map.containsKey( Pair.of( p.adapterId, p.tableId ) ) ) { - map.put( Pair.of( p.adapterId, p.tableId ), new ArrayList<>() ); - } - map.get( Pair.of( p.adapterId, p.tableId ) ).add( p ); - } ) ); return ImmutableMap.copyOf( map ); } @@ -173,12 +174,12 @@ private ImmutableMap, List> buildAdapterLogic private ImmutableMap> buildColumnPlacements() { Map> map = new HashMap<>(); - this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> { - if ( !map.containsKey( p.columnId ) ) { - map.put( p.columnId, new ArrayList<>() ); + for ( AllocationColumn column : columns.values() ) { + if ( !map.containsKey( column.columnId ) ) { + map.put( column.columnId, new ArrayList<>() ); } - map.get( p.columnId ).add( p ); - } ) ); + map.get( column.columnId ).add( column ); + } return ImmutableMap.copyOf( map ); } @@ -186,7 +187,7 @@ private ImmutableMap> buildColumnPlacements() { private ImmutableMap, AllocationColumn> buildAdapterColumnPlacement() { Map, AllocationColumn> map = new HashMap<>(); - this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> map.put( Pair.of( v.adapterId, p.columnId ), p ) ) ); + //this.tables.forEach( ( k, v ) -> v.placements.forEach( p -> map.put( Pair.of( v.adapterId, p.columnId ), p ) ) ); return ImmutableMap.copyOf( map ); } @@ -264,7 +265,7 @@ public boolean checkIfExistsColumnPlacement( long adapterId, long columnId ) { @Override public List getColumnPlacements( long columnId ) { - return columPlacements.get( columnId ); + return logicalColumnToAlloc.get( columnId ); } @@ -547,7 +548,7 @@ public AllocationEntity getAllocation( long adapterId, long entityId ) { @Override public List getColumns( long allocId ) { - + return allocColumns.get( allocId ); } } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 20532a2b12..bf989a31c8 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -2199,7 +2199,7 @@ public void createTable( long namespaceId, String name, List f columns.add( catalog.getAllocRel( namespaceId ).addColumn( alloc.id, id, PlacementType.AUTOMATIC, i ) ); i++; } - + catalog.updateSnapshot(); catalog.getPhysical( namespaceId ).addEntities( store.createPhysicalTable( statement.getPrepareContext(), logical, alloc, columns ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 0c637e5730..6aed1af0ba 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -345,7 +345,7 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT DataPlacementRole.UPTODATE ); } - store.createPhysicalTable( statement.getPrepareContext(), table, alloc, null ); + store.createPhysicalTable( statement.getPrepareContext(), table, null, null ); List logicalColumns = new ArrayList<>(); catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getSnapshot().rel().getColumn( cp.columnId ) ) ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index e5b27e3382..8ed4abbe09 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -365,7 +365,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List joinedScanCache.put( allocationEntities.hashCode(), node ); } - AllocationColumn placement = allocationEntities.get( 0 ).unwrap( AllocationTable.class ).placements.get( 0 ); + AllocationColumn placement = catalog.getSnapshot().alloc().getColumns( allocationEntities.get( 0 ).id ).get( 0 ); // todo dl: remove after RowType refactor if ( Catalog.snapshot().getNamespace( placement.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( "d", 0, cluster.getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 58d1d783c2..4404b578be 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -88,11 +88,11 @@ public PhysicalTable createCsvTable( long id, LogicalTable catalogTable, Allocat final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List fieldTypes = new LinkedList<>(); - List fieldIds = new ArrayList<>( allocationTable.placements.size() ); + List fieldIds = new ArrayList<>(); List columns = csvSource.getExportedColumns().get( catalogTable.name ); - for ( AllocationColumn placement : allocationTable.placements ) { + for ( AllocationColumn placement : allocationTable.getColumns().values() ) { LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( placement.columnId ); AlgDataType sqlType = sqlType( typeFactory, logicalColumn.type, logicalColumn.length, logicalColumn.scale, null ); fieldInfo.add( logicalColumn.name, columns.get( (int) placement.position ).physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java index df45ef56e4..8eade2af0a 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java @@ -35,7 +35,7 @@ import java.util.List; import java.util.stream.Collectors; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.util.Source; @@ -63,7 +63,7 @@ public abstract class CsvTable extends PhysicalTable { allocationTable.getNamespaceName(), allocationTable.getColumnNames(), allocationTable.getColumnNames(), - allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> AlgDataTypeFactory.DEFAULT.createPolyType( c.type ) ) ), + allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getAlgDataType ) ), allocationTable.getColumnOrder() ); this.source = source; this.fieldTypes = fieldTypes; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 17cd4e35cb..387ddecdfc 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -126,7 +126,7 @@ public JdbcEntity( private static Map getPhysicalColumnNames( Adapter adapter, AllocationTable allocationTable ) { AbstractJdbcStore store = (AbstractJdbcStore) adapter; - return allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.id, c -> store.getPhysicalColumnName( c.id ) ) ); + return allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.columnId, c -> store.getPhysicalColumnName( c.columnId ) ) ); } From 1e8ca117db0e40b41fc92c7b03c61ca76d2ed9b4 Mon Sep 17 00:00:00 2001 From: datomo Date: Fri, 14 Apr 2023 00:09:12 +0200 Subject: [PATCH 061/436] fixing default value and add column --- .../catalogs/AllocationRelationalCatalog.java | 3 ++- .../catalogs/LogicalRelationalCatalog.java | 3 ++- .../db/catalog/entity/AllocationColumn.java | 2 ++ .../catalog/entity/logical/LogicalTable.java | 3 ++- .../entity/physical/PhysicalEntity.java | 1 + .../db/catalog/snapshot/AllocSnapshot.java | 4 ++-- .../catalog/snapshot/LogicalRelSnapshot.java | 2 +- .../snapshot/impl/AllocSnapshotImpl.java | 2 +- .../org/polypheny/db/tools/AlgBuilder.java | 24 +++++++++++++++++++ .../main/java/org/polypheny/db/util/Pair.java | 16 ++++++++++--- .../org/polypheny/db/util/Permutation.java | 7 ++++++ .../polypheny/db/util/mapping/Mappings.java | 6 ++--- .../org/polypheny/db/ddl/DdlManagerImpl.java | 24 ++++++++++--------- .../db/routing/routers/BaseRouter.java | 2 +- .../statistics/StatisticResult.java | 4 ++-- .../org/polypheny/db/avatica/DbmsMeta.java | 2 +- .../jdbc/stores/AbstractJdbcStore.java | 2 +- .../polypheny/db/catalog/Serializable.java | 2 +- .../allocation/PolyAllocRelCatalog.java | 13 ++++++---- .../db/catalog/logical/RelationalCatalog.java | 6 +++-- .../org/polypheny/db/webui/HttpServer.java | 2 +- 21 files changed, 92 insertions(+), 38 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index 0fd024f820..d7da90c632 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -24,6 +24,7 @@ import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.partition.properties.PartitionProperty; +import org.polypheny.db.util.Pair; public interface AllocationRelationalCatalog extends AllocationCatalog { @@ -208,6 +209,6 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { Map getTables(); - Map getColumns(); + Map, AllocationColumn> getAllocColumns(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index bbfc5ba30a..61aa38af88 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -176,8 +176,9 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param columnId The id of the column * @param type The type of the default value * @param defaultValue True if the column should allow null values, false if not. + * @return */ - void setDefaultValue( long columnId, PolyType type, String defaultValue ); + LogicalColumn setDefaultValue( long columnId, PolyType type, String defaultValue ); /** * Deletes an existing default value of a column. NoOp if there is no default value defined. diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java index db9594fbf9..a871a9c1a4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java @@ -23,6 +23,7 @@ import lombok.NonNull; import lombok.SneakyThrows; import lombok.Value; +import lombok.With; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.catalog.Catalog; @@ -31,6 +32,7 @@ @EqualsAndHashCode @Value +@With public class AllocationColumn implements CatalogObject { private static final long serialVersionUID = -1909757888176291095L; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 48d2b5c434..8faa538537 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -21,6 +21,7 @@ import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.io.Serializable; +import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; import lombok.EqualsAndHashCode; @@ -139,7 +140,7 @@ public List getColumns() { public List getColumnIds() { - return getColumns().stream().map( c -> c.id ).collect( Collectors.toList() ); + return getColumns().stream().sorted( Comparator.comparingInt( a -> a.position ) ).map( c -> c.id ).collect( Collectors.toList() ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java index 32478a2939..b38dc1e847 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/physical/PhysicalEntity.java @@ -52,4 +52,5 @@ public State getCatalogType() { public abstract AlgDataType getLogicalRowType(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index 4ca9baf909..05be4cce56 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -158,7 +158,7 @@ public interface AllocSnapshot { /** * Get all partitions of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. + * getAllocColumns(xid, databaseName, null, null, null) returns all partitions of the database. * * @param schemaNamePattern Pattern for the schema name. null returns all. * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. @@ -177,7 +177,7 @@ public interface AllocSnapshot { /** * Get all partitions of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all partitions of the database. + * getAllocColumns(xid, databaseName, null, null, null) returns all partitions of the database. * * @param schemaNamePattern Pattern for the schema name. null returns all. * @param tableNamePattern Pattern for the table name. null returns catalog/src/test/java/org/polypheny/db/test/CatalogTest.javaall. diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 24195ceaaf..7557013053 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -84,7 +84,7 @@ public interface LogicalRelSnapshot { /** * Get all columns of the specified database which fit to the specified filter patterns. - * getColumns(xid, databaseName, null, null, null) returns all columns of the database. + * getAllocColumns(xid, databaseName, null, null, null) returns all columns of the database. * * @param tableName Pattern for the table name. null returns all. * @param columnName Pattern for the column name. null returns all. diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index 71d7d728cb..d6e65e1442 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -94,7 +94,7 @@ public AllocSnapshotImpl( Map allocationCatalogs ) { .stream() .filter( a -> a.getNamespace().namespaceType == NamespaceType.RELATIONAL ) .map( c -> (AllocationRelationalCatalog) c ) - .map( AllocationRelationalCatalog::getColumns ) + .map( AllocationRelationalCatalog::getAllocColumns ) .flatMap( c -> c.values().stream() ) .collect( Collectors.toList() ) ); diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index c0e7fcb6b9..6c0045e08b 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -53,6 +53,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.SortedSet; @@ -102,11 +103,13 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.StructKind; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; @@ -141,6 +144,7 @@ import org.polypheny.db.util.Litmus; import org.polypheny.db.util.NlsString; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Permutation; import org.polypheny.db.util.TimeString; import org.polypheny.db.util.TimestampString; import org.polypheny.db.util.Util; @@ -1348,14 +1352,34 @@ public AlgBuilder scan( @Nonnull CatalogEntity entity ) { } + private void reorder( List order, Map namesIdMapping ) { + List names = peek().getRowType().getFieldNames(); + List mapping = new ArrayList<>(); + for ( String name : names ) { + mapping.add( order.indexOf( namesIdMapping.get( name ) ) ); + } + permute( new Permutation( mapping ) ); + } + + public AlgBuilder scan( @Nonnull PhysicalEntity entity ) { + final AlgNode scan = scanFactory.createScan( cluster, entity ); push( scan ); rename( entity.getLogicalRowType().getFieldNames() ); + if ( entity.unwrap( PhysicalTable.class ) != null ) { + List order = Catalog.snapshot().rel().getTable( entity.logicalId ).unwrap( LogicalTable.class ).getColumnIds(); + reorder( order, flip( entity.unwrap( PhysicalTable.class ).logicalColumns ) ); + } return this; } + private Map flip( Map map ) { + return map.entrySet().stream().collect( Collectors.toMap( Entry::getValue, Entry::getKey ) ); + } + + /** * Creates a {@link RelScan} of the table with a given name. * diff --git a/core/src/main/java/org/polypheny/db/util/Pair.java b/core/src/main/java/org/polypheny/db/util/Pair.java index 09dee6f01a..97e640e87f 100644 --- a/core/src/main/java/org/polypheny/db/util/Pair.java +++ b/core/src/main/java/org/polypheny/db/util/Pair.java @@ -34,6 +34,8 @@ package org.polypheny.db.util; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.AbstractList; import java.util.Collections; @@ -43,6 +45,8 @@ import java.util.Map; import java.util.Objects; import javax.annotation.Nonnull; +import lombok.Value; +import lombok.experimental.NonFinal; /** @@ -53,10 +57,14 @@ * @param Left-hand type * @param Right-hand type */ +@Value +@NonFinal public class Pair implements Comparable>, Map.Entry, Serializable { - public final T1 left; - public final T2 right; + @Serialize + public T1 left; + @Serialize + public T2 right; /** @@ -65,7 +73,9 @@ public class Pair implements Comparable>, Map.Entry * @param left left value * @param right right value */ - public Pair( T1 left, T2 right ) { + public Pair( + @Deserialize("left") T1 left, + @Deserialize("right") T2 right ) { this.left = left; this.right = right; } diff --git a/core/src/main/java/org/polypheny/db/util/Permutation.java b/core/src/main/java/org/polypheny/db/util/Permutation.java index 4cd8e87adb..27d2216d05 100644 --- a/core/src/main/java/org/polypheny/db/util/Permutation.java +++ b/core/src/main/java/org/polypheny/db/util/Permutation.java @@ -36,6 +36,7 @@ import java.util.Arrays; import java.util.Iterator; +import java.util.List; import org.polypheny.db.util.mapping.IntPair; import org.polypheny.db.util.mapping.Mapping; import org.polypheny.db.util.mapping.MappingType; @@ -68,6 +69,11 @@ public Permutation( int size ) { } + public Permutation( List targets ) { + this( targets.stream().mapToInt( i -> i ).toArray() ); + } + + /** * Creates a permutation from an array. * @@ -577,5 +583,6 @@ public Permutation product( Permutation permutation ) { } return product; } + } diff --git a/core/src/main/java/org/polypheny/db/util/mapping/Mappings.java b/core/src/main/java/org/polypheny/db/util/mapping/Mappings.java index e23decdfb1..bcce4d18f9 100644 --- a/core/src/main/java/org/polypheny/db/util/mapping/Mappings.java +++ b/core/src/main/java/org/polypheny/db/util/mapping/Mappings.java @@ -252,7 +252,7 @@ public static List apply( final Mapping mapping, final List list ) { public static List apply2( final Mapping mapping, final List list ) { - return new AbstractList() { + return new AbstractList<>() { @Override public Integer get( int index ) { final int source = list.get( index ); @@ -277,7 +277,7 @@ public int size() { * @return Permuted view of list */ public static List apply3( final Mapping mapping, final List list ) { - return new AbstractList() { + return new AbstractList<>() { @Override public T get( int index ) { return list.get( mapping.getSource( index ) ); @@ -322,7 +322,7 @@ public int size() { * Converse of {@link #target(List, int)} */ public static List asList( final TargetMapping mapping ) { - return new AbstractList() { + return new AbstractList<>() { @Override public Integer get( int source ) { int target = mapping.getTargetOpt( source ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index bf989a31c8..010b0e678c 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -20,6 +20,7 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -139,15 +140,16 @@ private void checkViewDependencies( LogicalTable catalogTable ) { } - private void addDefaultValue( long namespaceId, String defaultValue, long addedColumnId ) { + private LogicalColumn addDefaultValue( long namespaceId, String defaultValue, LogicalColumn column ) { if ( defaultValue != null ) { // TODO: String is only a temporal solution for default values String v = defaultValue; if ( v.startsWith( "'" ) ) { v = v.substring( 1, v.length() - 1 ); } - catalog.getLogicalRel( namespaceId ).setDefaultValue( addedColumnId, PolyType.VARCHAR, v ); + return catalog.getLogicalRel( namespaceId ).setDefaultValue( column.id, PolyType.VARCHAR, v ); } + return column; } @@ -437,7 +439,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys ); // Add default value - addDefaultValue( catalogTable.namespaceId, defaultValue, addedColumn.id ); + addDefaultValue( catalogTable.namespaceId, defaultValue, addedColumn ); AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapterId, catalogTable.id ); // Add column placement @@ -445,7 +447,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys allocation.id, addedColumn.id, PlacementType.STATIC, - position );//Not a valid partitionID --> placeholder + catalog.getSnapshot().alloc().getColumns( allocation.id ).size() );//Not a valid partitionID --> placeholder // Set column position catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, addedColumn.id, exportedColumn.physicalPosition ); @@ -456,7 +458,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn beforeColumn, LogicalColumn afterColumn ) { - List columns = catalog.getSnapshot().rel().getColumns( catalogTable.id ); + List columns = catalog.getSnapshot().rel().getColumns( catalogTable.id ).stream().sorted( Comparator.comparingInt( a -> a.position ) ).collect( Collectors.toList() ); int position = columns.size() + 1; if ( beforeColumn != null || afterColumn != null ) { if ( beforeColumn != null ) { @@ -475,9 +477,9 @@ private int updateAdjacentPositions( LogicalTable catalogTable, LogicalColumn be private void updateColumnPosition( LogicalTable catalogTable, List columns, int i ) { catalog.getLogicalRel( catalogTable.namespaceId ).setColumnPosition( columns.get( i - 1 ).id, i + 1 ); - for ( AllocationEntity allocation : catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ) ) { + /*for ( AllocationEntity allocation : catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ) ) { catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( allocation.id, columns.get( i - 1 ).id, i + 1 ); - } + }*/ } @@ -513,7 +515,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo ); // Add default value - addDefaultValue( catalogTable.namespaceId, defaultValue, addedColumn.id ); + addedColumn = addDefaultValue( catalogTable.namespaceId, defaultValue, addedColumn ); // Ask router on which stores this column shall be placed List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); @@ -525,7 +527,7 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo allocation.id, addedColumn.id, // Will be set later PlacementType.AUTOMATIC, // Will be set later - position );//Not a valid partitionID --> placeholder + catalog.getSnapshot().alloc().getColumns( allocation.id ).size() );//Not a valid partitionID --> placeholder AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), allocation.unwrap( AllocationTable.class ), addedColumn ); } @@ -1218,7 +1220,7 @@ public void setDefaultValue( LogicalTable catalogTable, String columnName, Strin // Check if model permits operation checkModelLogic( catalogTable, columnName ); - addDefaultValue( catalogTable.namespaceId, defaultValue, logicalColumn.id ); + addDefaultValue( catalogTable.namespaceId, defaultValue, logicalColumn ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -2837,7 +2839,7 @@ private long addColumn( long namespaceId, String columnName, ColumnTypeInformati ); // Add default value - addDefaultValue( namespaceId, defaultValue, addedColumn.id ); + addedColumn = addDefaultValue( namespaceId, defaultValue, addedColumn ); /*for ( DataStore s : stores ) { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( s.getAdapterId(), tableId ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 8ed4abbe09..1d0330d2c2 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -263,7 +263,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List statement, allocationEntities.get( 0 ).id ); // Final project - buildFinalProject( builder, allocationEntities.get( 0 ).unwrap( AllocationTable.class ) ); + //buildFinalProject( builder, allocationEntities.get( 0 ).unwrap( AllocationTable.class ) ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticResult.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticResult.java index abc4054227..70ea198d3c 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticResult.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticResult.java @@ -82,8 +82,8 @@ private Comparable[][] rotate2dArray( Comparable[][] data ) { // */ // public static Map toOccurrenceMap( StatisticResult stats ) { // HashMap map = new HashMap<>(); -// String[] values = stats.getColumns()[0].getData(); -// String[] occurrences = stats.getColumns()[1].getData(); +// String[] values = stats.getAllocColumns()[0].getData(); +// String[] occurrences = stats.getAllocColumns()[1].getData(); // //TODO: handle mismatch // for ( int i = 0; i < values.length; i++ ) { // map.put( (E) values[i], Integer.parseInt( occurrences[i] ) ); diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 63fc9f7217..927ac65617 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -317,7 +317,7 @@ public MetaResultSet getColumns( final ConnectionHandle ch, final String databas final PolyphenyDbConnectionHandle connection = getPolyphenyDbConnectionHandle( ch.id ); synchronized ( connection ) { if ( log.isTraceEnabled() ) { - log.trace( "getColumns( ConnectionHandle {}, String {}, Pat {}, Pat {}, Pat {} )", ch, database, schemaPattern, tablePattern, columnPattern ); + log.trace( "getAllocColumns( ConnectionHandle {}, String {}, Pat {}, Pat {}, Pat {} )", ch, database, schemaPattern, tablePattern, columnPattern ); } final List columns = getLogicalTables( schemaPattern, tablePattern ).stream().flatMap( t -> catalog.getSnapshot().rel().getColumns( (tablePattern == null || tablePattern.s == null) ? null : new Pattern( tablePattern.s ), diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 165072c3b0..96280cd9b1 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -139,7 +139,7 @@ public List createPhysicalTable( Context context, LogicalTable lo executeUpdate( query, context ); return Collections.singletonList( JdbcSchema.create( logicalTable.id, catalog.getSnapshot(), logicalTable.getNamespaceName(), connectionFactory, dialect, this ).createJdbcTable( IdBuilder.getInstance().getNewPhysicalId(), logicalTable, allocationTable ) ); - //return new PhysicalTable( allocationTable, getDefaultPhysicalSchemaName(), physicalTableName, allocationTable.getColumns().values().stream().map( c -> getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ) ); + //return new PhysicalTable( allocationTable, getDefaultPhysicalSchemaName(), physicalTableName, allocationTable.getAllocColumns().values().stream().map( c -> getPhysicalColumnName( c.id ) ).collect( Collectors.toList() ) ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Serializable.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Serializable.java index d282ddee7b..c2acaf7b2c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Serializable.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/Serializable.java @@ -28,7 +28,7 @@ public interface Serializable { BinarySerializer getSerializer(); - default byte[] serialize() { + default byte[] serialize() { byte[] buffer = new byte[1000]; getSerializer().encode( buffer, 0, this ); return buffer; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index d714241c07..f9d0e9ec55 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -34,6 +34,7 @@ import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.partition.properties.PartitionProperty; +import org.polypheny.db.util.Pair; @Slf4j public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Serializable { @@ -53,7 +54,7 @@ public class PolyAllocRelCatalog implements AllocationRelationalCatalog, Seriali @Serialize @Getter - public final ConcurrentHashMap columns; + public final ConcurrentHashMap, AllocationColumn> allocColumns; public PolyAllocRelCatalog( LogicalNamespace namespace ) { @@ -64,10 +65,10 @@ public PolyAllocRelCatalog( LogicalNamespace namespace ) { public PolyAllocRelCatalog( @Deserialize("namespace") LogicalNamespace namespace, @Deserialize("tables") Map tables, - @Deserialize("columns") Map columns ) { + @Deserialize("allocColumns") Map, AllocationColumn> allocColumns ) { this.tables = new ConcurrentHashMap<>( tables ); this.namespace = namespace; - this.columns = new ConcurrentHashMap<>( columns ); + this.allocColumns = new ConcurrentHashMap<>( allocColumns ); } @@ -82,14 +83,14 @@ public PolyAllocRelCatalog copy() { @Override public AllocationColumn addColumn( long allocationId, long columnId, PlacementType placementType, int position ) { AllocationColumn column = new AllocationColumn( namespace.id, allocationId, columnId, placementType, position, tables.get( allocationId ).adapterId ); - columns.put( columnId, column ); + allocColumns.put( Pair.of( allocationId, columnId ), column ); return column; } @Override public void deleteColumn( long allocationId, long columnId, boolean columnOnly ) { - columns.remove( columnId ); + allocColumns.remove( Pair.of( allocationId, columnId ) ); } @@ -101,6 +102,8 @@ public void updateColumnPlacementType( long adapterId, long columnId, PlacementT @Override public void updateColumnPlacementPhysicalPosition( long allocId, long columnId, long position ) { + // Pair key = Pair.of( allocId, columnId ); + // allocColumns.put( key, allocColumns.get( key ).withPosition( position ) ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index a454cd4145..7158df20ca 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -306,8 +306,10 @@ public void deleteColumn( long columnId ) { @Override - public void setDefaultValue( long columnId, PolyType type, String defaultValue ) { - columns.put( columnId, columns.get( columnId ).toBuilder().type( type ).defaultValue( new CatalogDefaultValue( columnId, type, defaultValue, "defaultValue" ) ).build() ); + public LogicalColumn setDefaultValue( long columnId, PolyType type, String defaultValue ) { + LogicalColumn column = columns.get( columnId ).toBuilder().defaultValue( new CatalogDefaultValue( columnId, type, defaultValue, "defaultValue" ) ).build(); + columns.put( columnId, column ); + return column; } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 07f4019eac..34cae28c92 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -231,7 +231,7 @@ private void crudRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/getDashboardDiagram", crud.statisticCrud::getDashboardDiagram ); - webuiServer.post( "/getColumns", crud::getColumns ); + webuiServer.post( "/getAllocColumns", crud::getColumns ); webuiServer.post( "/getDataSourceColumns", crud::getDataSourceColumns ); From fdf1032a9f82ed7a1ff55df41bee4f728eeed4fb Mon Sep 17 00:00:00 2001 From: datomo Date: Sat, 15 Apr 2023 00:59:02 +0200 Subject: [PATCH 062/436] removed persistent monitoring for now, adjusted getColumns method in table --- .../db/catalog/entity/logical/LogicalColumn.java | 9 +++++---- .../db/catalog/entity/logical/LogicalTable.java | 2 +- .../org/polypheny/db/routing/routers/BaseRouter.java | 5 ++--- .../monitoring/statistics/StatisticsManagerImpl.java | 4 ++-- .../main/java/org/polypheny/db/avatica/DbmsMeta.java | 8 ++++---- .../org/polypheny/db/monitoring/MapDbRepository.java | 12 +++++------- .../db/sql/language/validate/SqlValidatorImpl.java | 6 ++---- 7 files changed, 21 insertions(+), 25 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java index bc715757ca..b02e3063f9 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java @@ -135,13 +135,14 @@ public AlgDataType getAlgDataType( final AlgDataTypeFactory typeFactory ) { assert this.type.allowsNoPrecNoScale(); elementType = typeFactory.createPolyType( this.type ); } + if ( collectionsType == PolyType.ARRAY ) { - return typeFactory.createArrayType( elementType, cardinality != null ? cardinality : -1, dimension != null ? dimension : -1 ); + elementType = typeFactory.createArrayType( elementType, cardinality != null ? cardinality : -1, dimension != null ? dimension : -1 ); } else if ( collectionsType == PolyType.MAP ) { - return typeFactory.createMapType( typeFactory.createPolyType( PolyType.ANY ), elementType ); - } else { - return elementType; + elementType = typeFactory.createMapType( typeFactory.createPolyType( PolyType.ANY ), elementType ); } + + return typeFactory.createTypeWithNullability( elementType, nullable ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 8faa538537..fbd762806b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -114,7 +114,7 @@ public int compareTo( LogicalTable o ) { public AlgDataType getRowType() { final AlgDataTypeFactory.Builder fieldInfo = AlgDataTypeFactory.DEFAULT.builder(); - for ( LogicalColumn column : Catalog.getInstance().getSnapshot().rel().getColumns( id ) ) { + for ( LogicalColumn column : Catalog.getInstance().getSnapshot().rel().getColumns( id ).stream().sorted( Comparator.comparingInt( a -> a.position ) ).collect( Collectors.toList() ) ) { AlgDataType sqlType = column.getAlgDataType( AlgDataTypeFactory.DEFAULT ); fieldInfo.add( column.name, null, sqlType ).nullable( column.nullable ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 1d0330d2c2..2d41f70e15 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -233,10 +233,9 @@ public RoutedAlgBuilder handleGeneric( AlgNode node, RoutedAlgBuilder builder ) protected List handleGeneric( AlgNode node, List builders ) { if ( node.getInputs().size() == 1 ) { - log.warn( "why?" ); - /*builders.forEach( + builders.forEach( builder -> builder.replaceTop( node.copy( node.getTraitSet(), ImmutableList.of( builder.peek( 0 ) ) ) ) - );*/ + ); } else if ( node.getInputs().size() == 2 ) { // Joins, SetOperations builders.forEach( builder -> builder.replaceTop( node.copy( node.getTraitSet(), ImmutableList.of( builder.peek( 1 ), builder.peek( 0 ) ) ), 2 ) diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index e03aa540fa..283ba5222e 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -962,8 +962,8 @@ private void addNewColumnStatistics( Map> changedValues, long i, P } - private void handleInsertColumn( long tableId, Map> changedValues, long schemaId, List columns, int i, QueryResult queryResult ) { - StatisticColumn statisticColumn = this.statisticSchemaMap.get( schemaId ).get( tableId ).get( columns.get( i ) ); + private void handleInsertColumn( long tableId, Map> changedValues, long namespaceId, List columns, int i, QueryResult queryResult ) { + StatisticColumn statisticColumn = this.statisticSchemaMap.get( namespaceId ).get( tableId ).get( columns.get( i ) ); statisticColumn.insert( (List) changedValues.get( (long) i ) ); put( queryResult, statisticColumn ); } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 927ac65617..76be0dd967 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -182,7 +182,7 @@ private MetaResultSet createMetaResultSet( CursorFactory cursorFactory, final Frame firstFrame ) { final PolyphenyDbSignature signature = - new PolyphenyDbSignature( + new PolyphenyDbSignature<>( "", ImmutableList.of(), internalParameters, @@ -1113,7 +1113,7 @@ public Frame fetch( final StatementHandle h, final long offset, final int fetchM final PolyphenyDbStatementHandle statementHandle = getPolyphenyDbStatementHandle( h ); - final PolyphenyDbSignature signature = statementHandle.getSignature(); + final PolyphenyDbSignature signature = statementHandle.getSignature(); final Iterator iterator; if ( statementHandle.getOpenResultSet() == null ) { final Iterable iterable = createIterable( statementHandle.getStatement().getDataContext(), signature ); @@ -1124,7 +1124,7 @@ public Frame fetch( final StatementHandle h, final long offset, final int fetchM iterator = statementHandle.getOpenResultSet(); statementHandle.getExecutionStopWatch().resume(); } - final List rows = MetaImpl.collect( signature.cursorFactory, LimitIterator.of( iterator, fetchMaxRowCount ), new ArrayList<>() ); + final List rows = MetaImpl.collect( signature.cursorFactory, LimitIterator.of( iterator, fetchMaxRowCount ), new ArrayList<>() ); statementHandle.getExecutionStopWatch().suspend(); boolean done = fetchMaxRowCount == 0 || rows.size() < fetchMaxRowCount; @SuppressWarnings("unchecked") @@ -1301,7 +1301,7 @@ private List execute( StatementHandle h, PolyphenyDbConnectionHan // TODO MV: Due to the performance benefits of sending data together with the first frame, this issue should be addressed // Remember that fetch is synchronized maxRowsInFirstFrame != 0 && SEND_FIRST_FRAME_WITH_RESPONSE - ? fetch( h, 0, (int) Math.min( Math.max( statementHandle.getMaxRowCount(), maxRowsInFirstFrame ), Integer.MAX_VALUE ) ) + ? fetch( h, 0, Math.min( Math.max( statementHandle.getMaxRowCount(), maxRowsInFirstFrame ), Integer.MAX_VALUE ) ) : null //Frame.MORE // Send first frame to together with the response to save a fetch call ) ); } catch ( NoSuchStatementException e ) { diff --git a/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDbRepository.java b/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDbRepository.java index 1300bd632b..37939ac9b2 100644 --- a/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDbRepository.java +++ b/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDbRepository.java @@ -28,11 +28,9 @@ import java.util.stream.Collectors; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.mapdb.BTreeMap; import org.mapdb.DB; import org.mapdb.DBException; import org.mapdb.DBMaker; -import org.mapdb.Serializer; import org.polypheny.db.StatusService; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.monitoring.events.MonitoringDataPoint; @@ -46,9 +44,9 @@ public class MapDbRepository implements PersistentMonitoringRepository { private static final String FILE_PATH = "simpleBackendDb"; private static final String FOLDER_NAME = "monitoring"; - protected final HashMap, BTreeMap> data = new HashMap<>(); + protected final HashMap, HashMap> data = new HashMap<>(); protected DB simpleBackendDb; - protected BTreeMap queryPostCosts; + protected HashMap queryPostCosts; @Override @@ -59,7 +57,7 @@ public void initialize( boolean resetRepository ) { @Override public void dataPoint( @NonNull MonitoringDataPoint dataPoint ) { - BTreeMap table = this.data.get( dataPoint.getClass() ); + HashMap table = this.data.get( dataPoint.getClass() ); if ( table == null ) { this.createPersistentTable( dataPoint.getClass() ); table = this.data.get( dataPoint.getClass() ); @@ -265,13 +263,13 @@ protected void initialize( String filePath, String folderName, boolean resetRepo private void initializePostCosts() { - queryPostCosts = simpleBackendDb.treeMap( QueryPostCost.class.getName(), Serializer.STRING, Serializer.JAVA ).createOrOpen(); + queryPostCosts = new HashMap<>();//simpleBackendDb.treeMap( QueryPostCost.class.getName(), Serializer.STRING, Serializer.JAVA ).createOrOpen(); } private void createPersistentTable( Class classPersistentData ) { if ( classPersistentData != null ) { - final BTreeMap treeMap = simpleBackendDb.treeMap( classPersistentData.getName(), Serializer.UUID, Serializer.JAVA ).createOrOpen(); + final HashMap treeMap = new HashMap<>();//simpleBackendDb.treeMap( classPersistentData.getName(), Serializer.UUID, Serializer.JAVA ).createOrOpen(); data.put( classPersistentData, treeMap ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index b24c273f0a..d29418d187 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -889,10 +889,8 @@ public void validateQuery( SqlNode node, SqlValidatorScope scope, AlgDataType ta } validateNamespace( ns, targetRowType ); - switch ( node.getKind() ) { - case EXTEND: - // Until we have a dedicated namespace for EXTEND - deriveType( scope, node ); + if ( Objects.requireNonNull( node.getKind() ) == Kind.EXTEND ) {// Until we have a dedicated namespace for EXTEND + deriveType( scope, node ); } if ( node == top ) { validateModality( node ); From bcb391c7c4d813449d608cb29e55d54a9cda392a Mon Sep 17 00:00:00 2001 From: datomo Date: Sat, 15 Apr 2023 10:02:25 +0200 Subject: [PATCH 063/436] started readding materialized views and normal views --- .../relational/LogicalRelViewScan.java | 10 +-- .../catalogs/LogicalRelationalCatalog.java | 8 +- ...View.java => LogicalMaterializedView.java} | 56 +++++++----- .../{CatalogView.java => LogicalView.java} | 39 ++++---- .../db/view/MaterializedViewManager.java | 4 +- .../org/polypheny/db/view/ViewManager.java | 4 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 88 ++++++------------- .../db/view/MaterializedViewManagerImpl.java | 8 +- .../statistics/StatisticsManagerImpl.java | 2 +- .../db/catalog/logical/RelationalCatalog.java | 57 +++++++++--- .../java/org/polypheny/db/webui/Crud.java | 12 +-- 11 files changed, 154 insertions(+), 134 deletions(-) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogMaterializedView.java => LogicalMaterializedView.java} (57%) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogView.java => LogicalView.java} (77%) diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java index af78f0b967..de966d3213 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java @@ -26,14 +26,12 @@ import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexNode; -import org.polypheny.db.schema.Entity; public class LogicalRelViewScan extends RelScan { @@ -64,10 +62,10 @@ public static AlgNode create( AlgOptCluster cluster, final CatalogEntity entity return ImmutableList.of(); } ); - CatalogView catalogView = entity.unwrap( CatalogView.class ); - AlgCollation algCollation = catalogView.getAlgCollation(); + LogicalView logicalView = entity.unwrap( LogicalView.class ); + AlgCollation algCollation = logicalView.getAlgCollation(); - return new LogicalRelViewScan( cluster, traitSet, entity, catalogView.prepareView( cluster ), algCollation ); + return new LogicalRelViewScan( cluster, traitSet, entity, logicalView.prepareView( cluster ), algCollation ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 61aa38af88..937e84d93a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -24,8 +24,9 @@ import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -69,7 +70,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param name of the view to add * @param namespaceId id of the schema * @param entityType type of table - * @param modifiable Whether the content of the table can be modified * @param definition {@link AlgNode} used to create Views * @param algCollation relCollation used for materialized view * @param underlyingTables all tables and columns used within the view @@ -80,7 +80,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param ordered if materialized view is ordered or not * @return id of the inserted materialized view */ - long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ); + LogicalMaterializedView addMaterializedView( String name, long namespaceId, EntityType entityType, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ); /** * Renames a table @@ -247,7 +247,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * * @param catalogView view for which to delete its dependencies */ - void deleteViewDependencies( CatalogView catalogView ); + void deleteViewDependencies( LogicalView catalogView ); /** * Updates the last time a materialized view has been refreshed. diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java similarity index 57% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java rename to core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java index 62b2245c82..f4cf5a0703 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java @@ -18,6 +18,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.SuperBuilder; @@ -30,35 +36,35 @@ @SuperBuilder(toBuilder = true) @EqualsAndHashCode(callSuper = true) @Value -public class CatalogMaterializedView extends CatalogView { +public class LogicalMaterializedView extends LogicalView { private static final long serialVersionUID = 4728996184367206274L; + @Serialize public String language; - + @Serialize public AlgCollation algCollation; - + @Serialize public String query; - + @Serialize public MaterializedCriteria materializedCriteria; - + @Serialize public boolean ordered; - public CatalogMaterializedView( - long id, - String name, - long namespaceId, - EntityType entityType, - String query, - Long primaryKey, - boolean modifiable, - AlgCollation algCollation, - ImmutableList connectedViews, - ImmutableMap> underlyingTables, - String language, - MaterializedCriteria materializedCriteria, - boolean ordered + public LogicalMaterializedView( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("namespaceId") long namespaceId, + @Deserialize("id") EntityType entityType, + @Deserialize("entityType") String query, + @Deserialize("primaryKey") Long primaryKey, + @Deserialize("algCollation") AlgCollation algCollation, + @Deserialize("connectedViews") List connectedViews, + @Deserialize("underlyingTables") Map> underlyingTables, + @Deserialize("language") String language, + @Deserialize("materializedCriteria") MaterializedCriteria materializedCriteria, + @Deserialize("ordered") boolean ordered ) { super( id, @@ -67,11 +73,17 @@ public CatalogMaterializedView( entityType, query, primaryKey, - modifiable, algCollation, - underlyingTables, - connectedViews, + ImmutableMap.copyOf( underlyingTables ), + ImmutableList.copyOf( connectedViews ), language ); + + Map> map = new HashMap<>(); + for ( Entry> e : underlyingTables.entrySet() ) { + if ( map.put( e.getKey(), ImmutableList.copyOf( e.getValue() ) ) != null ) { + throw new IllegalStateException( "Duplicate key" ); + } + } this.query = query; this.algCollation = algCollation; this.language = language; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java similarity index 77% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java rename to core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java index 99ca24aa42..32ecb92695 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java @@ -19,8 +19,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.List; import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; @@ -41,41 +43,42 @@ @SuperBuilder(toBuilder = true) @Value @NonFinal -public class CatalogView extends LogicalTable { +public class LogicalView extends LogicalTable { private static final long serialVersionUID = -4771308114962700515L; - @Getter - protected ImmutableMap> underlyingTables; + @Serialize + public ImmutableMap> underlyingTables; + @Serialize public String language; + @Serialize public AlgCollation algCollation; + @Serialize public String query; - public CatalogView( - long id, - String name, - long namespaceId, - EntityType entityType, - String query, - Long primaryKey, - boolean modifiable, - AlgCollation algCollation, - ImmutableMap> underlyingTables, - ImmutableList connectedViews, - String language ) { + public LogicalView( + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("namespaceId") long namespaceId, + @Deserialize("entityType") EntityType entityType, + @Deserialize("query") String query, + @Deserialize("primaryKey") Long primaryKey, + @Deserialize("algCollation") AlgCollation algCollation, + @Deserialize("underlyingTables") ImmutableMap> underlyingTables, + @Deserialize("connectedViews") ImmutableList connectedViews, + @Deserialize("language") String language ) { super( id, name, namespaceId, entityType, primaryKey, - modifiable, + false, connectedViews ); this.query = query; this.algCollation = algCollation; this.underlyingTables = underlyingTables; - // mapdb cannot handle the class QueryLanguage, therefore we use the String here this.language = language; } diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index 1cfb46c918..22a5892b19 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -26,7 +26,7 @@ import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; -import org.polypheny.db.catalog.entity.CatalogMaterializedView; +import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.transaction.PolyXid; @@ -65,7 +65,7 @@ public abstract void addData( List stores, Map> addedColumns, AlgRoot algRoot, - CatalogMaterializedView materializedView ); + LogicalMaterializedView materializedView ); public abstract void addTables( Transaction transaction, List ids ); diff --git a/core/src/main/java/org/polypheny/db/view/ViewManager.java b/core/src/main/java/org/polypheny/db/view/ViewManager.java index f7f47ed8d3..bbd39eb3aa 100644 --- a/core/src/main/java/org/polypheny/db/view/ViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/ViewManager.java @@ -46,7 +46,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.CatalogMaterializedView; +import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.rex.RexBuilder; @@ -252,7 +252,7 @@ public AlgNode checkNode( AlgNode other ) { return expandViewNode( other ); } else if ( doesSubstituteOrderBy && other instanceof LogicalRelScan ) { LogicalTable catalogTable = other.getEntity().unwrap( LogicalTable.class ); - if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((CatalogMaterializedView) catalogTable).isOrdered() ) { + if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW && ((LogicalMaterializedView) catalogTable).isOrdered() ) { return orderMaterialized( other ); } } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 010b0e678c..81a6443159 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -62,11 +62,11 @@ import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; @@ -1687,6 +1687,7 @@ private String adjustNameIfNeeded( String name, long namespaceId ) { @Override public void createMaterializedView( String viewName, long namespaceId, AlgRoot algRoot, boolean replace, Statement statement, List stores, PlacementType placementType, List projectedColumns, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ifNotExists, boolean ordered ) { viewName = adjustNameIfNeeded( viewName, namespaceId ); + // Check if there is already a table with this name if ( assertEntityExists( namespaceId, viewName, ifNotExists ) ) { return; @@ -1716,11 +1717,10 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a } } - long tableId = catalog.getLogicalRel( namespaceId ).addMaterializedView( + LogicalMaterializedView view = catalog.getLogicalRel( namespaceId ).addMaterializedView( viewName, namespaceId, EntityType.MATERIALIZED_VIEW, - false, algRoot.alg, algRoot.collation, underlying, @@ -1732,71 +1732,36 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a ); // Creates a list with all columns, tableId is needed to create the primary key - List columns = getColumnInformation( projectedColumns, fieldList, true, tableId ); - Map> addedColumns = new HashMap<>(); - - List columnIds = new ArrayList<>(); - - for ( FieldInformation field : columns ) { - LogicalColumn column = catalog.getLogicalRel( namespaceId ).addColumn( - field.name, - tableId, - field.position, - field.typeInformation.type, - field.typeInformation.collectionType, - field.typeInformation.precision, - field.typeInformation.scale, - field.typeInformation.dimension, - field.typeInformation.cardinality, - field.typeInformation.nullable, - field.collation ); - - // Created primary key is added to list - if ( field.name.startsWith( "_matid_" ) ) { - columnIds.add( column.id ); - } - - for ( DataStore s : stores ) { - long adapterId = s.getAdapterId(); - AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapterId, tableId ); - catalog.getAllocRel( namespaceId ).addColumn( - allocation.id, - column.id, - placementType, - 0 ); + List fields = getColumnInformation( projectedColumns, fieldList, true, view.id ); - List logicalColumns; - if ( addedColumns.containsKey( adapterId ) ) { - logicalColumns = addedColumns.get( adapterId ); - } else { - logicalColumns = new ArrayList<>(); - } - logicalColumns.add( relSnapshot.getColumn( column.id ) ); - addedColumns.put( adapterId, logicalColumns ); - } + Map ids = new LinkedHashMap<>(); + for ( FieldInformation field : fields ) { + ids.put( field.name, addColumn( namespaceId, field.name, field.typeInformation, field.collation, field.defaultValue, view.id, field.position, stores, placementType ) ); } // Sets previously created primary key - catalog.getLogicalRel( namespaceId ).addPrimaryKey( tableId, columnIds ); + //catalog.getLogicalRel( namespaceId ).addPrimaryKey( view.id, columnIds ); - CatalogMaterializedView catalogMaterializedView = catalog.getSnapshot().rel().getTable( tableId ).unwrap( CatalogMaterializedView.class ); - Catalog.getInstance().getSnapshot(); + catalog.updateSnapshot(); for ( DataStore store : stores ) { - catalog.getAllocRel( namespaceId ).addPartitionPlacement( - catalogMaterializedView.namespaceId, - store.getAdapterId(), - tableId, - snapshot.alloc().getPartitionProperty( catalogMaterializedView.id ).partitionIds.get( 0 ), - PlacementType.AUTOMATIC, - DataPlacementRole.UPTODATE ); + AllocationTable alloc = catalog.getAllocRel( namespaceId ).createAllocationTable( store.getAdapterId(), view.id ); + List columns = new ArrayList<>(); - store.createPhysicalTable( statement.getPrepareContext(), catalogMaterializedView, null, null ); + int i = 0; + for ( long id : ids.values() ) { + columns.add( catalog.getAllocRel( namespaceId ).addColumn( alloc.id, id, PlacementType.AUTOMATIC, i ) ); + i++; + } + catalog.updateSnapshot(); + catalog.getPhysical( namespaceId ).addEntities( store.createPhysicalTable( statement.getPrepareContext(), view, alloc, columns ) ); } + catalog.updateSnapshot(); // Selected data from tables is added into the newly crated materialized view MaterializedViewManager materializedManager = MaterializedViewManager.getInstance(); - materializedManager.addData( statement.getTransaction(), stores, addedColumns, algRoot, catalogMaterializedView ); + log.warn( "add" ); + materializedManager.addData( statement.getTransaction(), stores, null, algRoot, view ); } @@ -2168,6 +2133,11 @@ public void createTableOld( long namespaceId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) { String adjustedName = adjustNameIfNeeded( name, namespaceId ); + // Check if there is already a table with this name + if ( assertEntityExists( namespaceId, adjustedName, ifNotExists ) ) { + return; + } + if ( stores == null ) { // Ask router on which store(s) the table should be placed stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewEntity(); @@ -2912,7 +2882,7 @@ public void dropView( LogicalTable catalogView, Statement statement ) { checkViewDependencies( catalogView ); catalog.getLogicalRel( catalogView.namespaceId ).flagTableForDeletion( catalogView.id, true ); - catalog.getLogicalRel( catalogView.namespaceId ).deleteViewDependencies( (CatalogView) catalogView ); + catalog.getLogicalRel( catalogView.namespaceId ).deleteViewDependencies( (LogicalView) catalogView ); // Delete columns @@ -2939,7 +2909,7 @@ public void dropMaterializedView( LogicalTable materializedView, Statement state catalog.getLogicalRel( materializedView.namespaceId ).flagTableForDeletion( materializedView.id, true ); - catalog.getLogicalRel( materializedView.namespaceId ).deleteViewDependencies( (CatalogView) materializedView ); + catalog.getLogicalRel( materializedView.namespaceId ).deleteViewDependencies( (LogicalView) materializedView ); dropTable( materializedView, statement ); diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 2139be343a..b4910fefaa 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -45,7 +45,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogMaterializedView; +import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; @@ -284,7 +284,7 @@ public void prepareToUpdate( Long materializedId ) { // Get a shared global schema lock (only DDLs acquire an exclusive global schema lock) idAccessMap.add( Pair.of( LockManager.GLOBAL_LOCK, LockMode.SHARED ) ); // Get locks for individual tables - EntityAccessMap accessMap = new EntityAccessMap( ((CatalogMaterializedView) catalogTable).getDefinition(), new HashMap<>() ); + EntityAccessMap accessMap = new EntityAccessMap( ((LogicalMaterializedView) catalogTable).getDefinition(), new HashMap<>() ); idAccessMap.addAll( accessMap.getAccessedEntityPair() ); LockManager.INSTANCE.lock( idAccessMap, (TransactionImpl) statement.getTransaction() ); } catch ( DeadlockException e ) { @@ -301,7 +301,7 @@ public void prepareToUpdate( Long materializedId ) { * Is used if a materialized view is created in order to add the data from the underlying tables to the materialized view */ @Override - public void addData( Transaction transaction, List stores, Map> columns, AlgRoot algRoot, CatalogMaterializedView materializedView ) { + public void addData( Transaction transaction, List stores, Map> columns, AlgRoot algRoot, LogicalMaterializedView materializedView ) { addMaterializedInfo( materializedView.id, materializedView.getMaterializedCriteria() ); List columnPlacements = new LinkedList<>(); @@ -338,7 +338,7 @@ public void updateData( Transaction transaction, Long materializedId ) { List ids = new ArrayList<>(); if ( snapshot.getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { - CatalogMaterializedView catalogMaterializedView = snapshot.getLogicalEntity( materializedId ).unwrap( CatalogMaterializedView.class ); + LogicalMaterializedView catalogMaterializedView = snapshot.getLogicalEntity( materializedId ).unwrap( LogicalMaterializedView.class ); List dataPlacements = snapshot.alloc().getDataPlacements( catalogMaterializedView.id ); for ( CatalogDataPlacement placement : dataPlacements ) { ids.add( placement.adapterId ); diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 283ba5222e..cfa1d89f96 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -242,7 +242,7 @@ private void resetAllIsFull() { * Reset all statistics and reevaluate them. */ private void reevaluateAllStatistics() { - if ( true || statisticQueryInterface == null ) { + if ( statisticQueryInterface == null ) { return; } log.debug( "Resetting StatisticManager." ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 7158df20ca..4bc6df71dd 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -48,10 +48,10 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -80,6 +80,10 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog @Getter public Map columns; + @Serialize + @Getter + public Map nodes; + @Serialize @Getter public LogicalNamespace logicalNamespace; @@ -119,7 +123,8 @@ public RelationalCatalog( @Deserialize("indexes") Map indexes, @Deserialize("keys") Map keys, @Deserialize("keyColumns") Map keyColumns, - @Deserialize("constraints") Map constraints ) { + @Deserialize("constraints") Map constraints, + @Deserialize("nodes") Map nodes ) { this.logicalNamespace = logicalNamespace; this.tables = tables; @@ -128,11 +133,12 @@ public RelationalCatalog( this.keys = keys; this.keyColumns = keyColumns; this.constraints = constraints; + this.nodes = nodes; } public RelationalCatalog( LogicalNamespace namespace ) { - this( namespace, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); + this( namespace, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); } @@ -169,8 +175,39 @@ public long addView( String name, long namespaceId, EntityType entityType, boole @Override - public long addMaterializedView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) { - throw new NotImplementedException(); + public LogicalMaterializedView addMaterializedView( final String name, long namespaceId, EntityType entityType, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) { + long id = idBuilder.getNewEntityId(); + + String adjustedName = name; + + if ( !logicalNamespace.caseSensitive ) { + adjustedName = name.toLowerCase(); + } + + if ( entityType != EntityType.MATERIALIZED_VIEW ) { + // Should not happen, addViewTable is only called with EntityType.View + throw new RuntimeException( "addMaterializedViewTable is only possible with EntityType = MATERIALIZED_VIEW" ); + } + + LogicalMaterializedView materializedViewTable = new LogicalMaterializedView( + id, + adjustedName, + namespaceId, + entityType, + query, + null, + algCollation, + ImmutableList.of(), + underlyingTables, + language.getSerializedName(), + materializedCriteria, + ordered + ); + + tables.put( id, materializedViewTable ); + nodes.put( id, definition ); + + return materializedViewTable; } @@ -515,10 +552,10 @@ public void deleteConstraint( long constraintId ) { @Override - public void deleteViewDependencies( CatalogView catalogView ) { - for ( long id : catalogView.getUnderlyingTables().keySet() ) { + public void deleteViewDependencies( LogicalView logicalView ) { + for ( long id : logicalView.underlyingTables.keySet() ) { LogicalTable old = tables.get( id ); - List connectedViews = old.connectedViews.stream().filter( e -> e != catalogView.id ).collect( Collectors.toList() ); + List connectedViews = old.connectedViews.stream().filter( e -> e != logicalView.id ).collect( Collectors.toList() ); LogicalTable table = old.toBuilder().connectedViews( ImmutableList.copyOf( connectedViews ) ).build(); @@ -533,7 +570,7 @@ public void deleteViewDependencies( CatalogView catalogView ) { @Override public void updateMaterializedViewRefreshTime( long materializedViewId ) { - CatalogMaterializedView old = (CatalogMaterializedView) tables.get( materializedViewId ); + LogicalMaterializedView old = (LogicalMaterializedView) tables.get( materializedViewId ); MaterializedCriteria materializedCriteria = old.getMaterializedCriteria(); materializedCriteria.setLastUpdate( new Timestamp( System.currentTimeMillis() ) ); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 9731e792e1..5be26c031c 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -106,10 +106,10 @@ import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogMaterializedView; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogView; +import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; @@ -1257,9 +1257,9 @@ void getMaterializedInfo( final Context ctx ) { LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); if ( catalogTable.entityType == EntityType.MATERIALIZED_VIEW ) { - CatalogMaterializedView catalogMaterializedView = (CatalogMaterializedView) catalogTable; + LogicalMaterializedView logicalMaterializedView = (LogicalMaterializedView) catalogTable; - MaterializedCriteria materializedCriteria = catalogMaterializedView.getMaterializedCriteria(); + MaterializedCriteria materializedCriteria = logicalMaterializedView.getMaterializedCriteria(); ArrayList materializedInfo = new ArrayList<>(); materializedInfo.add( materializedCriteria.getCriteriaType().toString() ); @@ -1866,9 +1866,9 @@ void getUnderlyingTable( final Context ctx ) { LogicalTable catalogTable = getLogicalTable( request.getSchemaName(), request.getTableName() ); if ( catalogTable.entityType == EntityType.VIEW ) { - ImmutableMap> underlyingTableOriginal = ((CatalogView) catalogTable).getUnderlyingTables(); + ImmutableMap> underlyingTableOriginal = catalogTable.unwrap( LogicalView.class ).underlyingTables; Map> underlyingTable = new HashMap<>(); - for ( Entry> entry : underlyingTableOriginal.entrySet() ) { + for ( Entry> entry : underlyingTableOriginal.entrySet() ) { List columns = new ArrayList<>(); for ( Long ids : entry.getValue() ) { columns.add( catalog.getSnapshot().rel().getColumn( ids ).name ); From 2c9e307eda608ce1e39cbbfba1559e57d9befd1b Mon Sep 17 00:00:00 2001 From: datomo Date: Sat, 15 Apr 2023 22:10:08 +0200 Subject: [PATCH 064/436] fixing views, refactor statistics --- .../org/polypheny/db/StatisticsManager.java | 18 +- .../catalogs/LogicalRelationalCatalog.java | 6 +- .../entity/LogicalMaterializedView.java | 24 +- .../db/catalog/entity/LogicalView.java | 21 +- .../db/catalog/snapshot/AllocSnapshot.java | 6 +- .../snapshot/impl/AllocSnapshotImpl.java | 6 +- .../db/monitoring/events/MonitoringType.java | 25 +++ .../db/monitoring/events/StatementEvent.java | 2 +- .../polypheny/db/processing/DataMigrator.java | 4 +- .../db/view/MaterializedViewManager.java | 15 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 42 ++-- .../db/processing/DataMigratorImpl.java | 46 ++-- .../db/routing/UiRoutingPageUtil.java | 2 +- .../db/routing/routers/AbstractDqlRouter.java | 2 +- .../db/routing/routers/BaseRouter.java | 2 +- .../db/routing/routers/CachedPlanRouter.java | 2 +- .../db/routing/routers/DmlRouterImpl.java | 4 +- .../routers/FullPlacementQueryRouter.java | 4 +- .../db/routing/routers/SimpleRouter.java | 2 +- .../CreateSinglePlacementStrategy.java | 2 +- .../db/transaction/EntityAccessMap.java | 2 +- .../db/view/MaterializedViewManagerImpl.java | 81 +++---- .../events/metrics/DdlDataPoint.java | 25 +-- .../events/metrics/DmlDataPoint.java | 3 +- .../events/metrics/QueryDataPointImpl.java | 3 +- .../statistics/NumericalStatisticColumn.java | 2 +- .../statistics/StatisticColumn.java | 68 +----- .../statistics/StatisticRepository.java | 63 +++--- .../statistics/StatisticsManagerImpl.java | 205 +++++++----------- .../db/catalog/logical/RelationalCatalog.java | 30 +-- .../altertable/SqlAlterTableAddColumn.java | 2 +- .../java/org/polypheny/db/webui/Crud.java | 4 +- 32 files changed, 297 insertions(+), 426 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/monitoring/events/MonitoringType.java diff --git a/core/src/main/java/org/polypheny/db/StatisticsManager.java b/core/src/main/java/org/polypheny/db/StatisticsManager.java index bf557ffb9d..5b4cba09e0 100644 --- a/core/src/main/java/org/polypheny/db/StatisticsManager.java +++ b/core/src/main/java/org/polypheny/db/StatisticsManager.java @@ -19,9 +19,7 @@ import java.beans.PropertyChangeListener; import java.util.List; import java.util.Map; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.monitoring.events.MonitoringType; public abstract class StatisticsManager implements PropertyChangeListener { @@ -50,7 +48,7 @@ public static StatisticsManager getInstance() { public abstract void tablesToUpdate( long tableId ); // Use cache if possible - public abstract void tablesToUpdate( long tableId, Map> changedValues, String type, long schemaId ); + public abstract void tablesToUpdate( long tableId, Map> changedValues, MonitoringType type, long schemaId ); protected abstract void reevaluateTable( long tableId ); @@ -60,17 +58,17 @@ public static StatisticsManager getInstance() { public abstract void deleteTableToUpdate( long tableId, long schemaId ); - public abstract void updateRowCountPerTable( long tableId, int number, String source ); + public abstract void updateRowCountPerTable( long tableId, int number, MonitoringType type ); public abstract void setIndexSize( long tableId, int indexSize ); - public abstract void setTableCalls( long tableId, String kind ); + public abstract void setTableCalls( long tableId, MonitoringType type ); public abstract String getRevalId(); public abstract void setRevalId( String revalId ); - public abstract Map getStatisticSchemaMap(); + public abstract Map getStatisticFields(); public abstract Map getQualifiedStatisticMap(); @@ -84,10 +82,4 @@ public static StatisticsManager getInstance() { public abstract void initializeStatisticSettings(); - public abstract void updateColumnName( LogicalColumn logicalColumn, String newName ); - - public abstract void updateTableName( LogicalTable catalogTable, String newName ); - - public abstract void updateSchemaName( LogicalNamespace logicalNamespace, String newName ); - } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 937e84d93a..859b6eb246 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -55,21 +55,19 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * * @param name The name of the view to add * @param namespaceId The id of the schema - * @param entityType The table type * @param modifiable Whether the content of the table can be modified * @param definition {@link AlgNode} used to create Views * @param underlyingTables all tables and columns used within the view * @param fieldList all columns used within the View * @return The id of the inserted table */ - long addView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ); + LogicalView addView( String name, long namespaceId, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, List connectedViews, AlgDataType fieldList, String query, QueryLanguage language ); /** * Adds a materialized view to a specified schema. * * @param name of the view to add * @param namespaceId id of the schema - * @param entityType type of table * @param definition {@link AlgNode} used to create Views * @param algCollation relCollation used for materialized view * @param underlyingTables all tables and columns used within the view @@ -80,7 +78,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param ordered if materialized view is ordered or not * @return id of the inserted materialized view */ - LogicalMaterializedView addMaterializedView( String name, long namespaceId, EntityType entityType, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ); + LogicalMaterializedView addMaterializedView( String name, long namespaceId, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ); /** * Renames a table diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java index f4cf5a0703..2cedf1ac29 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java @@ -17,7 +17,6 @@ package org.polypheny.db.catalog.entity; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.util.HashMap; @@ -40,12 +39,6 @@ public class LogicalMaterializedView extends LogicalView { private static final long serialVersionUID = 4728996184367206274L; - @Serialize - public String language; - @Serialize - public AlgCollation algCollation; - @Serialize - public String query; @Serialize public MaterializedCriteria materializedCriteria; @Serialize @@ -56,13 +49,12 @@ public LogicalMaterializedView( @Deserialize("id") long id, @Deserialize("name") String name, @Deserialize("namespaceId") long namespaceId, - @Deserialize("id") EntityType entityType, @Deserialize("entityType") String query, @Deserialize("primaryKey") Long primaryKey, @Deserialize("algCollation") AlgCollation algCollation, @Deserialize("connectedViews") List connectedViews, @Deserialize("underlyingTables") Map> underlyingTables, - @Deserialize("language") String language, + @Deserialize("language") QueryLanguage language, @Deserialize("materializedCriteria") MaterializedCriteria materializedCriteria, @Deserialize("ordered") boolean ordered ) { @@ -70,12 +62,11 @@ public LogicalMaterializedView( id, name, namespaceId, - entityType, + EntityType.MATERIALIZED_VIEW, query, - primaryKey, algCollation, - ImmutableMap.copyOf( underlyingTables ), - ImmutableList.copyOf( connectedViews ), + underlyingTables, + connectedViews, language ); Map> map = new HashMap<>(); @@ -84,9 +75,6 @@ public LogicalMaterializedView( throw new IllegalStateException( "Duplicate key" ); } } - this.query = query; - this.algCollation = algCollation; - this.language = language; this.materializedCriteria = materializedCriteria; this.ordered = ordered; } @@ -98,8 +86,4 @@ public AlgNode getDefinition() { } - public QueryLanguage getLanguage() { - return QueryLanguage.from( language ); - } - } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java index 32ecb92695..031cc4e8cc 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java @@ -22,6 +22,7 @@ import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.util.List; +import java.util.Map; import lombok.EqualsAndHashCode; import lombok.Value; import lombok.experimental.NonFinal; @@ -50,7 +51,7 @@ public class LogicalView extends LogicalTable { @Serialize public ImmutableMap> underlyingTables; @Serialize - public String language; + public QueryLanguage language; @Serialize public AlgCollation algCollation; @Serialize @@ -63,31 +64,25 @@ public LogicalView( @Deserialize("namespaceId") long namespaceId, @Deserialize("entityType") EntityType entityType, @Deserialize("query") String query, - @Deserialize("primaryKey") Long primaryKey, @Deserialize("algCollation") AlgCollation algCollation, - @Deserialize("underlyingTables") ImmutableMap> underlyingTables, - @Deserialize("connectedViews") ImmutableList connectedViews, - @Deserialize("language") String language ) { + @Deserialize("underlyingTables") Map> underlyingTables, + @Deserialize("connectedViews") List connectedViews, + @Deserialize("language") QueryLanguage language ) { super( id, name, namespaceId, entityType, - primaryKey, + null, false, - connectedViews ); + ImmutableList.copyOf( connectedViews ) ); this.query = query; this.algCollation = algCollation; - this.underlyingTables = underlyingTables; + this.underlyingTables = ImmutableMap.copyOf( underlyingTables ); this.language = language; } - public QueryLanguage getLanguage() { - return QueryLanguage.from( language ); - } - - public AlgNode prepareView( AlgOptCluster cluster ) { AlgNode viewLogicalRoot = getDefinition(); prepareView( viewLogicalRoot, cluster ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index 05be4cce56..5ee2f780ce 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -44,7 +44,7 @@ public interface AllocSnapshot { List getAllocationsOnAdapter( long id ); - AllocationEntity getAllocEntity( long id ); + AllocationEntity getAllocation( long id ); /** * Gets a collective list of column placements per column on an adapter. @@ -54,7 +54,7 @@ public interface AllocSnapshot { * @param columnId The id of the column * @return The specific column placement */ - AllocationColumn getColumnPlacement( long adapterId, long columnId ); + AllocationColumn getColumn( long adapterId, long columnId ); /** * Checks if there is a column with the specified name in the specified table. @@ -371,7 +371,7 @@ public interface AllocSnapshot { boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId ); - List getAllocationsFromLogical( long logicalId ); + List getFromLogical( long logicalId ); boolean isPartitioned( long id ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index d6e65e1442..e62e4a9c45 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -246,13 +246,13 @@ public List getAllocationsOnAdapter( long id ) { @Override - public AllocationEntity getAllocEntity( long id ) { + public AllocationEntity getAllocation( long id ) { return allocs.get( id ); } @Override - public AllocationColumn getColumnPlacement( long adapterId, long columnId ) { + public AllocationColumn getColumn( long adapterId, long columnId ) { return adapterColumnPlacement.get( Pair.of( adapterId, columnId ) ); } @@ -480,7 +480,7 @@ public boolean checkIfExistsPartitionPlacement( long adapterId, long partitionId @Override - public List getAllocationsFromLogical( long logicalId ) { + public List getFromLogical( long logicalId ) { return logicalAllocs.get( logicalId ); } diff --git a/core/src/main/java/org/polypheny/db/monitoring/events/MonitoringType.java b/core/src/main/java/org/polypheny/db/monitoring/events/MonitoringType.java new file mode 100644 index 0000000000..932fc04b8a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/monitoring/events/MonitoringType.java @@ -0,0 +1,25 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.monitoring.events; + +public enum MonitoringType { + INSERT, + TRUNCATE, + DROP_COLUMN, + DROP_TABLE, + DELETE; +} diff --git a/core/src/main/java/org/polypheny/db/monitoring/events/StatementEvent.java b/core/src/main/java/org/polypheny/db/monitoring/events/StatementEvent.java index 142b3babdd..3f6ab0871c 100644 --- a/core/src/main/java/org/polypheny/db/monitoring/events/StatementEvent.java +++ b/core/src/main/java/org/polypheny/db/monitoring/events/StatementEvent.java @@ -41,7 +41,7 @@ @Getter public abstract class StatementEvent extends BaseEvent { - protected String monitoringType; + protected MonitoringType monitoringType; protected AlgRoot routed; protected PolyImplementation result; protected Statement statement; diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index a0b54ec3ff..c4f4927ec7 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -76,10 +76,10 @@ void copyPartitionData( List sourcePartitionIds, List targetPartitionIds ); - AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); + AlgRoot buildInsertStatement( Statement statement, List to, long allocId ); //is used within copyData - void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index 22a5892b19..fce0ad185b 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -18,7 +18,6 @@ import java.util.ArrayList; import java.util.List; -import java.util.Map; import lombok.Getter; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.AlgNode; @@ -28,7 +27,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelModify; import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.MaterializedCriteria; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.transaction.Transaction; @@ -63,13 +63,12 @@ public static MaterializedViewManager getInstance() { public abstract void addData( Transaction transaction, List stores, - Map> addedColumns, AlgRoot algRoot, LogicalMaterializedView materializedView ); public abstract void addTables( Transaction transaction, List ids ); - public abstract void updateData( Transaction transaction, Long viewId ); + public abstract void updateData( Transaction transaction, long viewId ); public abstract void updateCommittedXid( PolyXid xid ); @@ -91,7 +90,13 @@ public static class TableUpdateVisitor extends AlgShuttleImpl { public AlgNode visit( LogicalRelModify modify ) { if ( modify.getOperation() != Modify.Operation.MERGE ) { if ( (modify.getEntity() != null) ) { - ids.add( modify.getEntity().id ); + if ( modify.getEntity().unwrap( PhysicalEntity.class ) != null ) { + ids.add( modify.getEntity().unwrap( PhysicalEntity.class ).getLogicalId() ); + } else if ( modify.getEntity().unwrap( AllocationEntity.class ) != null ) { + ids.add( modify.getEntity().unwrap( AllocationEntity.class ).getLogicalId() ); + } else { + ids.add( modify.getEntity().id ); + } } } return super.visit( modify ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 81a6443159..5b02de9849 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -33,7 +33,6 @@ import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import org.polypheny.db.StatisticsManager; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataSource; @@ -135,7 +134,7 @@ private void checkViewDependencies( LogicalTable catalogTable ) { for ( Long id : catalogTable.connectedViews ) { views.add( catalog.getSnapshot().rel().getTable( id ).name ); } - throw new PolyphenyDbException( "Cannot alter table because of underlying View " + views.stream().map( String::valueOf ).collect( Collectors.joining( (", ") ) ) ); + throw new GenericRuntimeException( "Cannot alter table because of underlying views: %s ", views.stream().map( String::valueOf ).collect( Collectors.joining( (", ") ) ) ); } } @@ -372,9 +371,6 @@ public void renameNamespace( String newName, String oldName ) { } LogicalNamespace logicalNamespace = catalog.getSnapshot().getNamespace( oldName ); catalog.renameNamespace( logicalNamespace.id, newName ); - - // Update Name in statistics - StatisticsManager.getInstance().updateSchemaName( logicalNamespace, newName ); } @@ -398,7 +394,7 @@ public void addColumnToSourceTable( LogicalTable catalogTable, String columnPhys throw new GenericRuntimeException( "The table has an unexpected number of placements!" ); } - long adapterId = catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ).get( 0 ).adapterId; + long adapterId = catalog.getSnapshot().alloc().getFromLogical( catalogTable.id ).get( 0 ).adapterId; DataSource dataSource = (DataSource) AdapterManager.getInstance().getAdapter( adapterId ); String physicalTableName = catalog.getSnapshot().physical().getPhysicalTable( catalogTable.id, adapterId ).name; @@ -888,7 +884,7 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, // Add new column placements // long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores - List allocations = catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ); + List allocations = catalog.getSnapshot().alloc().getFromLogical( catalogTable.id ); for ( AllocationEntity allocation : allocations ) { Map allocColumns = allocation.unwrap( AllocationTable.class ).getColumns(); for ( long columnId : columnIds ) { @@ -966,7 +962,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( dp.adapterId, dp.columnId, true ); }*/ - for ( AllocationEntity table : catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ) ) { + for ( AllocationEntity table : catalog.getSnapshot().alloc().getFromLogical( catalogTable.id ) ) { for ( AllocationColumn placement : catalog.getSnapshot().alloc().getColumns( table.id ) ) { if ( catalogTable.entityType == EntityType.ENTITY ) { AdapterManager.getInstance().getStore( table.adapterId ).dropColumn( statement.getPrepareContext(), placement ); @@ -1292,7 +1288,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds // Remove columns physically for ( long columnId : columnsToRemove ) { // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), columnId ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumn( storeInstance.getAdapterId(), columnId ) ); // Drop column placement AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, columnId, true ); @@ -1357,7 +1353,7 @@ else if ( !partitionGroupNames.isEmpty() && partitionGroupIds.isEmpty() ) { for ( long cid : columnIds ) { if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), cid ) ) { - AllocationColumn placement = catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), cid ); + AllocationColumn placement = catalog.getSnapshot().alloc().getColumn( storeInstance.getAdapterId(), cid ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( storeInstance.getAdapterId(), cid, PlacementType.MANUAL ); @@ -1502,7 +1498,7 @@ public void addColumnPlacement( LogicalTable catalogTable, String columnName, @N // Make sure that this store does not contain a placement of this column if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ) { - AllocationColumn placement = catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ); + AllocationColumn placement = catalog.getSnapshot().alloc().getColumn( storeInstance.getAdapterId(), logicalColumn.id ); if ( placement.placementType == PlacementType.AUTOMATIC ) { // Make placement manual catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementType( @@ -1565,7 +1561,7 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, @ throw new GenericRuntimeException( "Cannot drop primary key" ); } // Drop Column on store - storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumnPlacement( storeInstance.getAdapterId(), logicalColumn.id ) ); + storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumn( storeInstance.getAdapterId(), logicalColumn.id ) ); AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Drop column placement catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, logicalColumn.id, false ); @@ -1595,8 +1591,6 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme catalog.getLogicalRel( catalogTable.namespaceId ).renameTable( catalogTable.id, newTableName ); - // Update Name in statistics - StatisticsManager.getInstance().updateTableName( catalogTable, newTableName ); // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -1615,9 +1609,6 @@ public void renameColumn( LogicalTable catalogTable, String columnName, String n catalog.getLogicalRel( catalogTable.namespaceId ).renameColumn( logicalColumn.id, newColumnName ); - // Update Name in statistics - StatisticsManager.getInstance().updateColumnName( logicalColumn, newColumnName ); - // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); } @@ -1646,14 +1637,14 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC // add check if underlying table is of model document -> mql, relational -> sql underlyingTables.keySet().forEach( tableId -> checkModelLangCompatibility( language, namespaceId, tableId ) ); - long tableId = catalog.getLogicalRel( namespaceId ).addView( + LogicalView view = catalog.getLogicalRel( namespaceId ).addView( viewName, namespaceId, - EntityType.VIEW, false, algNode, algCollation, underlyingTables, + null, fieldList, query, language @@ -1662,7 +1653,7 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC for ( FieldInformation column : columns ) { catalog.getLogicalRel( namespaceId ).addColumn( column.name, - tableId, + view.id, column.position, column.typeInformation.type, column.typeInformation.collectionType, @@ -1720,7 +1711,6 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a LogicalMaterializedView view = catalog.getLogicalRel( namespaceId ).addMaterializedView( viewName, namespaceId, - EntityType.MATERIALIZED_VIEW, algRoot.alg, algRoot.collation, underlying, @@ -1760,8 +1750,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a // Selected data from tables is added into the newly crated materialized view MaterializedViewManager materializedManager = MaterializedViewManager.getInstance(); - log.warn( "add" ); - materializedManager.addData( statement.getTransaction(), stores, null, algRoot, view ); + materializedManager.addData( statement.getTransaction(), stores, algRoot, view ); } @@ -2882,7 +2871,7 @@ public void dropView( LogicalTable catalogView, Statement statement ) { checkViewDependencies( catalogView ); catalog.getLogicalRel( catalogView.namespaceId ).flagTableForDeletion( catalogView.id, true ); - catalog.getLogicalRel( catalogView.namespaceId ).deleteViewDependencies( (LogicalView) catalogView ); + // catalog.getLogicalRel( catalogView.namespaceId ).deleteViewDependencies( (LogicalView) catalogView ); // Delete columns @@ -2908,8 +2897,7 @@ public void dropMaterializedView( LogicalTable materializedView, Statement state checkViewDependencies( materializedView ); catalog.getLogicalRel( materializedView.namespaceId ).flagTableForDeletion( materializedView.id, true ); - - catalog.getLogicalRel( materializedView.namespaceId ).deleteViewDependencies( (LogicalView) materializedView ); + // catalog.getLogicalRel( materializedView.namespaceId ).deleteViewDependencies( (LogicalView) materializedView ); dropTable( materializedView, statement ); @@ -3036,7 +3024,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) { Snapshot snapshot = catalog.getSnapshot(); // delete all allocs and physicals - List allocations = snapshot.alloc().getAllocationsFromLogical( catalogTable.id ); + List allocations = snapshot.alloc().getFromLogical( catalogTable.id ); for ( AllocationEntity allocation : allocations ) { for ( PhysicalEntity physical : snapshot.physical().fromAlloc( allocation.id ) ) { catalog.getPhysical( catalogTable.namespaceId ).deleteEntity( physical.id ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index cf1787e37b..2a91758672 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -54,6 +54,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.refactor.ModifiableEntity; import org.polypheny.db.catalog.snapshot.AllocSnapshot; @@ -175,16 +176,16 @@ public void copyData( Transaction transaction, CatalogAdapter store, List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( Catalog.getInstance().getSnapshot().alloc().getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( Catalog.getInstance().getSnapshot().alloc().getColumn( store.id, logicalColumn.id ) ); } - List selectColumnList = new LinkedList<>( columns ); + List selectedColumns = new LinkedList<>( columns ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { LogicalColumn logicalColumn = relSnapshot.getColumn( cid ); - if ( !selectColumnList.contains( logicalColumn ) ) { - selectColumnList.add( logicalColumn ); + if ( !selectedColumns.contains( logicalColumn ) ) { + selectedColumns.add( logicalColumn ); } } @@ -198,7 +199,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List selectColumnList, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { + public void executeQuery( List selectedColumns, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { try { PolyImplementation result; if ( isMaterializedView ) { @@ -247,20 +248,20 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl Iterator sourceIterator = enumerable.iterator(); Map resultColMapping = new HashMap<>(); - for ( LogicalColumn logicalColumn : selectColumnList ) { + for ( AllocationColumn column : selectedColumns ) { int i = 0; for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { - if ( metaData.getName().equalsIgnoreCase( logicalColumn.name ) ) { - resultColMapping.put( logicalColumn.id, i ); + if ( metaData.getName().equalsIgnoreCase( column.getLogicalColumnName() ) ) { + resultColMapping.put( column.getColumnId(), i ); } i++; } } if ( isMaterializedView ) { - for ( LogicalColumn logicalColumn : selectColumnList ) { - if ( !resultColMapping.containsKey( logicalColumn.id ) ) { + for ( AllocationColumn column : selectedColumns ) { + if ( !resultColMapping.containsKey( column.getColumnId() ) ) { int i = resultColMapping.values().stream().mapToInt( v -> v ).max().orElseThrow( NoSuchElementException::new ); - resultColMapping.put( logicalColumn.id, i + 1 ); + resultColMapping.put( column.getColumnId(), i + 1 ); } } } @@ -294,13 +295,12 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl } else { fields = sourceAlg.validatedRowType.getFieldList(); } - int pos = 0; + for ( Map.Entry> v : values.entrySet() ) { targetStatement.getDataContext().addParameterValues( v.getKey(), fields.get( resultColMapping.get( v.getKey() ) ).getType(), v.getValue() ); - pos++; } Iterator iterator = targetStatement.getQueryProcessor() @@ -355,9 +355,9 @@ public AlgRoot buildDeleteStatement( Statement statement, List @Override - public AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ) { - PhysicalTable physical = statement.getTransaction().getSnapshot().physical().getPhysicalTable( partitionId ); - ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); + public AlgRoot buildInsertStatement( Statement statement, List to, long allocId ) { + List physicals = statement.getTransaction().getSnapshot().physical().fromAlloc( allocId ); + ModifiableEntity modifiableTable = physicals.get( 0 ).unwrap( ModifiableEntity.class ); AlgOptCluster cluster = AlgOptCluster.create( statement.getQueryProcessor().getPlanner(), @@ -382,7 +382,7 @@ public AlgRoot buildInsertStatement( Statement statement, List AlgNode node = modifiableTable.toModificationAlg( cluster, cluster.traitSet(), - physical, + physicals.get( 0 ), builder.build(), Modify.Operation.INSERT, null, @@ -410,7 +410,7 @@ private AlgRoot buildUpdateStatement( Statement statement, List selectSourcePlacements( LogicalTable table, for ( LogicalColumn column : snapshot.rel().getColumns( table.id ) ) { if ( columnIds.contains( column.id ) ) { if ( snapshot.alloc().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { - placementList.add( snapshot.alloc().getColumnPlacement( adapterIdWithMostPlacements, column.id ) ); + placementList.add( snapshot.alloc().getColumn( adapterIdWithMostPlacements, column.id ) ); } else { for ( AllocationColumn placement : snapshot.alloc().getColumnPlacements( column.id ) ) { if ( placement.adapterId != excludingAdapterId ) { @@ -525,7 +525,7 @@ public void copySelectiveData( Transaction transaction, CatalogAdapter store, Lo // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( snapshot.getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( snapshot.getColumn( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); @@ -627,7 +627,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo // Check Lists List targetColumnPlacements = new LinkedList<>(); for ( LogicalColumn logicalColumn : columns ) { - targetColumnPlacements.add( snapshot.alloc().getColumnPlacement( store.id, logicalColumn.id ) ); + targetColumnPlacements.add( snapshot.alloc().getColumn( store.id, logicalColumn.id ) ); } List selectColumnList = new LinkedList<>( columns ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index 74e3097a0f..851cd1614e 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -103,7 +103,7 @@ private static void addSelectedAdapterTable( InformationManager queryAnalyzer, P CatalogPartitionGroup catalogPartitionGroup = snapshot.alloc().getPartitionGroup( catalogPartition.partitionGroupId ); v.forEach( p -> { - AllocationColumn allocationColumn = snapshot.alloc().getColumnPlacement( p.left, p.right ); + AllocationColumn allocationColumn = snapshot.alloc().getColumn( p.left, p.right ); CatalogPartitionPlacement catalogPartitionPlacement = snapshot.alloc().getPartitionPlacement( p.left, k ); LogicalColumn logicalColumn = snapshot.rel().getColumn( allocationColumn.columnId ); table.addRow( diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index 4990ddf334..c99ae15057 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -229,7 +229,7 @@ protected List buildSelect( AlgNode node, List 1 ) { + if ( Catalog.snapshot().alloc().getFromLogical( catalogTable.id ).size() > 1 ) { return handleVerticalPartitioningOrReplication( node, catalogTable, statement, logicalTable, builders, cluster, queryInformation ); } return handleNonePartitioning( node, catalogTable, statement, builders, cluster, queryInformation ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 2d41f70e15..a2b2b44849 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -126,7 +126,7 @@ protected static Map> selectPlacement( LogicalTable } return new HashMap<>() {{ - List allocs = Catalog.snapshot().alloc().getAllocationsFromLogical( table.id ); + List allocs = Catalog.snapshot().alloc().getFromLogical( table.id ); put( allocs.get( 0 ).id, placementList ); }}; } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java index 0f2dc9a1ee..efe9db5b36 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/CachedPlanRouter.java @@ -69,7 +69,7 @@ private RoutedAlgBuilder buildCachedSelect( AlgNode node, RoutedAlgBuilder build for ( long partition : partitionIds ) { if ( cachedPlan.physicalPlacementsOfPartitions.get( partition ) != null ) { List colPlacements = cachedPlan.physicalPlacementsOfPartitions.get( partition ).stream() - .map( placementInfo -> catalog.getSnapshot().alloc().getColumnPlacement( placementInfo.left, placementInfo.right ) ) + .map( placementInfo -> catalog.getSnapshot().alloc().getColumn( placementInfo.left, placementInfo.right ) ) .collect( Collectors.toList() ); placement.put( partition, colPlacements ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index d92cae4288..78275076b6 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -150,7 +150,7 @@ public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); - List allocs = snapshot.alloc().getAllocationsFromLogical( catalogTable.id ); + List allocs = snapshot.alloc().getFromLogical( catalogTable.id ); PhysicalTable physical = snapshot.physical().fromAlloc( allocs.get( 0 ).id ).get( 0 ).unwrap( PhysicalTable.class ); ModifiableEntity modifiableTable = physical.unwrap( ModifiableEntity.class ); @@ -210,7 +210,7 @@ public AlgNode routeDmlOld( LogicalRelModify modify, Statement statement ) { // Essentially gets a list of all stores where this table resides List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); - List allocs = snapshot.alloc().getAllocationsFromLogical( catalogTable.id );//.getPartitionProperty( catalogTable.id ); + List allocs = snapshot.alloc().getFromLogical( catalogTable.id );//.getPartitionProperty( catalogTable.id ); if ( !allocs.isEmpty() && log.isDebugEnabled() ) { log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, -1 );//property.partitionGroupIds ); for ( AllocationColumn dataPlacement : pkPlacements ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index 4e1e013e4a..67ebd6794c 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -113,7 +113,7 @@ protected List handleNonePartitioning( PartitionProperty property = snapshot.alloc().getPartitionProperty( catalogTable.id );*/ //currentPlacementDistribution.put( property.partitionIds.get( 0 ), placementCombination ); - List allocationEntities = Catalog.snapshot().alloc().getAllocationsFromLogical( catalogTable.id ); + List allocationEntities = Catalog.snapshot().alloc().getFromLogical( catalogTable.id ); for ( RoutedAlgBuilder builder : builders ) { RoutedAlgBuilder newBuilder = RoutedAlgBuilder.createCopy( statement, cluster, builder ); @@ -158,7 +158,7 @@ protected Set> selectPlacement( LogicalTable catalogTable final Set> result = new HashSet<>(); for ( long adapterId : adapters ) { List placements = usedColumns.stream() - .map( colId -> Catalog.snapshot().alloc().getColumnPlacement( adapterId, colId ) ) + .map( colId -> Catalog.snapshot().alloc().getColumn( adapterId, colId ) ) .collect( Collectors.toList() ); if ( !placements.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java index 9086f0d6ea..a6da7227e7 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/SimpleRouter.java @@ -57,7 +57,7 @@ protected List handleVerticalPartitioningOrReplication( AlgNod protected List handleNonePartitioning( AlgNode node, LogicalTable catalogTable, Statement statement, List builders, AlgOptCluster cluster, LogicalQueryInformation queryInformation ) { // Get placements and convert into placement distribution // final Map> placements = selectPlacement( catalogTable ); - List entities = Catalog.snapshot().alloc().getAllocationsFromLogical( catalogTable.id ); + List entities = Catalog.snapshot().alloc().getFromLogical( catalogTable.id ); // Only one builder available // builders.get( 0 ).addPhysicalInfo( placements ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java index 07c1a6c526..e76a4d4fc9 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java +++ b/dbms/src/main/java/org/polypheny/db/routing/strategies/CreateSinglePlacementStrategy.java @@ -32,7 +32,7 @@ public class CreateSinglePlacementStrategy implements CreatePlacementStrategy { @Override public List getDataStoresForNewColumn( LogicalColumn addedColumn ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - List allocations = snapshot.alloc().getAllocationsFromLogical( addedColumn.tableId ); + List allocations = snapshot.alloc().getFromLogical( addedColumn.tableId ); return ImmutableList.of( AdapterManager.getInstance().getStore( allocations.get( 0 ).adapterId ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java index 64167164d3..a560eab312 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/EntityAccessMap.java @@ -269,7 +269,7 @@ public void visit( AlgNode p, int ordinal, AlgNode parent ) { relevantPartitions = accessedPartitions.get( p.getId() ); } else if ( table != null ) { if ( table.namespaceType == NamespaceType.RELATIONAL ) { - List allocations = Catalog.getInstance().getSnapshot().alloc().getAllocationsFromLogical( table.id ); + List allocations = Catalog.getInstance().getSnapshot().alloc().getFromLogical( table.id ); relevantPartitions = allocations.stream().map( a -> a.id ).collect( Collectors.toList() ); } else { relevantPartitions = List.of(); diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index b4910fefaa..521485a54b 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -29,7 +29,6 @@ import java.util.Map.Entry; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataStore; @@ -44,13 +43,14 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.AllocationColumn; -import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.LogicalMaterializedView; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; @@ -90,7 +90,6 @@ public class MaterializedViewManagerImpl extends MaterializedViewManager { private final List intervalToUpdate; final Map updateCandidates; - private Snapshot snapshot; public MaterializedViewManagerImpl( TransactionManager transactionManager ) { @@ -177,15 +176,16 @@ public synchronized void addMaterializedInfo( Long materializedId, MaterializedC */ @Override public void addTables( Transaction transaction, List tableIds ) { - if ( tableIds.size() > 1 ) { - snapshot = Catalog.getInstance().getSnapshot(); - LogicalNamespace namespace = snapshot.getNamespace( tableIds.get( 0 ) ); - LogicalTable catalogTable = snapshot.rel().getTable( tableIds.get( 1 ) ); - long id = catalogTable.id; - if ( !catalogTable.getConnectedViews().isEmpty() ) { - updateCandidates.put( transaction.getXid(), id ); - } + if ( tableIds.isEmpty() ) { + return; } + + LogicalTable catalogTable = Catalog.snapshot().rel().getTable( tableIds.get( 0 ) ); + long id = catalogTable.id; + /*if ( !catalogTable.getConnectedViews().isEmpty() ) { + updateCandidates.put( transaction.getXid(), id ); + }*/ + } @@ -208,7 +208,7 @@ public void updateCommittedXid( PolyXid xid ) { * * @param potentialInteresting id of underlying table that was updated */ - public void materializedUpdate( Long potentialInteresting ) { + public void materializedUpdate( long potentialInteresting ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); LogicalTable catalogTable = snapshot.getNamespaces( null ).stream().map( n -> snapshot.rel().getTable( potentialInteresting ) ).filter( Objects::nonNull ).findFirst().orElse( null ); List connectedViews = catalogTable.getConnectedViews(); @@ -301,23 +301,26 @@ public void prepareToUpdate( Long materializedId ) { * Is used if a materialized view is created in order to add the data from the underlying tables to the materialized view */ @Override - public void addData( Transaction transaction, List stores, Map> columns, AlgRoot algRoot, LogicalMaterializedView materializedView ) { + public void addData( Transaction transaction, List stores, AlgRoot algRoot, LogicalMaterializedView materializedView ) { addMaterializedInfo( materializedView.id, materializedView.getMaterializedCriteria() ); - List columnPlacements = new LinkedList<>(); DataMigrator dataMigrator = transaction.getDataMigrator(); - List dataPlacements = transaction.getSnapshot().alloc().getDataPlacements( materializedView.id ); - for ( CatalogDataPlacement placement : dataPlacements ) { + for ( AllocationEntity allocation : transaction.getSnapshot().alloc().getFromLogical( materializedView.id ) ) { Statement sourceStatement = transaction.createStatement(); prepareSourceRel( sourceStatement, materializedView.getAlgCollation(), algRoot.alg ); Statement targetStatement = transaction.createStatement(); - columnPlacements.clear(); - columns.get( placement.adapterId ).forEach( column -> columnPlacements.add( snapshot.alloc().getColumnPlacement( placement.adapterId, column.id ) ) ); - // If partitions should be allowed for materialized views this needs to be changed that all partitions are considered - AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, columnPlacements, snapshot.alloc().getPartitionsOnDataPlacement( placement.adapterId, materializedView.id ).get( 0 ) ); + if ( allocation.unwrap( AllocationTable.class ) != null ) { + List allocColumns = Catalog.snapshot().alloc().getColumns( allocation.id ); - dataMigrator.executeQuery( columns.get( placement.adapterId ), algRoot, sourceStatement, targetStatement, targetRel, true, materializedView.isOrdered() ); + //columns.get( placement.adapterId ).forEach( column -> columnPlacements.add( snapshot.alloc().getColumnPlacement( placement.adapterId, column.id ) ) ); + // If partitions should be allowed for materialized views this needs to be changed that all partitions are considered + AlgRoot targetRel = dataMigrator.buildInsertStatement( targetStatement, allocColumns, allocation.id ); + + dataMigrator.executeQuery( allocColumns, algRoot, sourceStatement, targetStatement, targetRel, true, materializedView.isOrdered() ); + } else { + throw new GenericRuntimeException( "MaterializedViews are only supported for relational entites at the moment." ); + } } } @@ -329,19 +332,19 @@ public void addData( Transaction transaction, List stores, Map columnPlacements = new LinkedList<>(); Map> columns = new HashMap<>(); - List ids = new ArrayList<>(); - if ( snapshot.getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { - LogicalMaterializedView catalogMaterializedView = snapshot.getLogicalEntity( materializedId ).unwrap( LogicalMaterializedView.class ); - List dataPlacements = snapshot.alloc().getDataPlacements( catalogMaterializedView.id ); - for ( CatalogDataPlacement placement : dataPlacements ) { - ids.add( placement.adapterId ); + //List ids = new ArrayList<>(); + if ( transaction.getSnapshot().getLogicalEntity( materializedId ) != null && materializedInfo.containsKey( materializedId ) ) { + LogicalMaterializedView catalogMaterializedView = transaction.getSnapshot().getLogicalEntity( materializedId ).unwrap( LogicalMaterializedView.class ); + /*List dataPlacements = snapshot.alloc().getDataPlacements( catalogMaterializedView.id ); + for ( AllocationEntity allocation : ) { + ids.add( allocation.adapterId ); List logicalColumns = new ArrayList<>(); int localAdapterIndex = dataPlacements.indexOf( placement ); @@ -349,19 +352,17 @@ public void updateData( Transaction transaction, Long materializedId ) { .columnPlacementsOnAdapter.forEach( col -> logicalColumns.add( snapshot.rel().getColumn( col ) ) ); columns.put( placement.adapterId, logicalColumns ); - } - - AlgRoot targetRel; + }*/ - for ( long id : ids ) { + for ( AllocationEntity entity : transaction.getSnapshot().alloc().getFromLogical( materializedId ) ) { Statement sourceStatement = transaction.createStatement(); Statement deleteStatement = transaction.createStatement(); Statement insertStatement = transaction.createStatement(); prepareSourceRel( sourceStatement, catalogMaterializedView.getAlgCollation(), catalogMaterializedView.getDefinition() ); - columnPlacements.clear(); + // columnPlacements.clear(); - columns.get( id ).forEach( column -> columnPlacements.add( snapshot.alloc().getColumnPlacement( id, column.id ) ) ); + //columns.get( id ).forEach( column -> columnPlacements.add( snapshot.alloc().getColumn( id, column.id ) ) ); // Build {@link AlgNode} to build delete Statement from materialized view AlgBuilder deleteAlgBuilder = AlgBuilder.create( deleteStatement ); @@ -373,12 +374,12 @@ public void updateData( Transaction transaction, Long materializedId ) { Statement targetStatementDelete = transaction.createStatement(); // Delete all data - targetRel = dataMigrator.buildDeleteStatement( + AlgRoot targetRel = dataMigrator.buildDeleteStatement( targetStatementDelete, columnPlacements, - snapshot.alloc().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); + entity.id ); dataMigrator.executeQuery( - columns.get( id ), + transaction.getSnapshot().alloc().getColumns( entity.id ), AlgRoot.of( deleteRel, Kind.SELECT ), deleteStatement, targetStatementDelete, @@ -392,9 +393,9 @@ public void updateData( Transaction transaction, Long materializedId ) { targetRel = dataMigrator.buildInsertStatement( targetStatementInsert, columnPlacements, - snapshot.alloc().getPartitionsOnDataPlacement( id, catalogMaterializedView.id ).get( 0 ) ); + entity.id ); dataMigrator.executeQuery( - columns.get( id ), + transaction.getSnapshot().alloc().getColumns( entity.id ), AlgRoot.of( insertRel, Kind.SELECT ), sourceStatement, targetStatementInsert, diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DdlDataPoint.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DdlDataPoint.java index 3eeb741e92..67a8457f85 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DdlDataPoint.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DdlDataPoint.java @@ -19,30 +19,24 @@ import java.io.Serializable; import java.sql.Timestamp; import java.util.UUID; -import lombok.AccessLevel; -import lombok.AllArgsConstructor; import lombok.Builder; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; +import lombok.Value; import org.polypheny.db.monitoring.events.MonitoringDataPoint; +import org.polypheny.db.monitoring.events.MonitoringType; -@Getter -@Setter @Builder -@NoArgsConstructor(access = AccessLevel.PUBLIC) -@AllArgsConstructor(access = AccessLevel.MODULE) +@Value public class DdlDataPoint implements MonitoringDataPoint, Serializable { private static final long serialVersionUID = 268576586444646401L; - private UUID Id; - private Timestamp recordedTimestamp; + UUID Id; + Timestamp recordedTimestamp; protected boolean isCommitted; - private long tableId; - private String monitoringType; - private long schemaId; - private long columnId; + long tableId; + MonitoringType monitoringType; + long schemaId; + long columnId; @Override @@ -62,4 +56,5 @@ public DataPointType getDataPointType() { return DataPointType.DDL; } + } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java index ec9ed34272..192b684401 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DmlDataPoint.java @@ -31,6 +31,7 @@ import lombok.NoArgsConstructor; import lombok.Setter; import org.polypheny.db.monitoring.events.MonitoringDataPoint; +import org.polypheny.db.monitoring.events.MonitoringType; @Getter @@ -47,7 +48,7 @@ public class DmlDataPoint implements MonitoringDataPoint, Serializable { private final Map dataElements = new HashMap<>(); private UUID Id; private Timestamp recordedTimestamp; - private String monitoringType; + private MonitoringType monitoringType; private String description; private long executionTime; private boolean isSubQuery; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java index 2712e39b17..849c4eec58 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java @@ -30,6 +30,7 @@ import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; +import org.polypheny.db.monitoring.events.MonitoringType; import org.polypheny.db.monitoring.events.QueryDataPoint; @@ -47,7 +48,7 @@ public class QueryDataPointImpl implements QueryDataPoint, Serializable { private final HashMap dataElements = new HashMap<>(); private UUID Id; private Timestamp recordedTimestamp; - private String monitoringType; + private MonitoringType monitoringType; private String description; private long executionTime; private boolean isSubQuery; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java index 08adff0bca..fe21ee4652 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/NumericalStatisticColumn.java @@ -51,7 +51,7 @@ public class NumericalStatisticColumn extends StatisticColumn { public NumericalStatisticColumn( QueryResult column ) { - super( column.getColumn().namespaceId, column.getEntity().id, column.getColumn().id, column.getColumn().type, StatisticType.NUMERICAL ); + super( column.getColumn().id, column.getColumn().type ); } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java index 28fe5b6eb0..cb6432ed10 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticColumn.java @@ -23,45 +23,20 @@ import java.util.List; import lombok.Getter; import lombok.Setter; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.type.PolyType; /** * Stores the available statistic data of a specific column */ -public abstract class StatisticColumn { - - @Expose - @Getter - private String schema; - - @Expose - @Getter - private String table; - - @Expose - @Getter - private String column; - - @Getter - private final long schemaId; - @Getter - private final long tableId; +public abstract class StatisticColumn { - @Getter - private final long columnId; - @Expose - private final String qualifiedColumnName; + public final long columnId; - @Getter - private final PolyType type; + public final PolyType type; - @Expose - private final StatisticType columnType; @Expose @Setter @@ -79,32 +54,12 @@ public abstract class StatisticColumn { protected Integer count; - public StatisticColumn( long schemaId, long tableId, long columnId, PolyType type, StatisticType columnType ) { - this.schemaId = schemaId; - this.tableId = tableId; + public StatisticColumn( long columnId, PolyType type ) { this.columnId = columnId; this.type = type; - this.columnType = columnType; - - LogicalRelSnapshot snapshot = Catalog.getInstance().getSnapshot().rel(); - if ( snapshot.getTable( tableId ) != null ) { - this.schema = Catalog.getInstance().getSnapshot().getNamespace( schemaId ).name; - this.table = snapshot.getTable( tableId ).name; - this.column = snapshot.getColumn( columnId ).name; - } - this.qualifiedColumnName = String.format( "%s.%s.%s", this.schema, this.table, this.column ); - } - - - public String getQualifiedColumnName() { - return this.schema + "." + this.table + "." + this.column; } - public String getQualifiedTableName() { - return this.schema + "." + this.table; - } - public abstract void insert( T val ); @@ -113,21 +68,6 @@ public String getQualifiedTableName() { public abstract String toString(); - public void updateColumnName( String columnName ) { - this.column = columnName; - } - - - public void updateTableName( String tableName ) { - this.table = tableName; - } - - - public void updateSchemaName( String schemaName ) { - this.schema = schemaName; - } - - public enum StatisticType { @SerializedName("temporal") TEMPORAL, diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java index dc048eb3b1..e9e7e5d495 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java @@ -24,6 +24,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.monitoring.events.MonitoringDataPoint; import org.polypheny.db.monitoring.events.MonitoringDataPoint.DataPointType; +import org.polypheny.db.monitoring.events.MonitoringType; import org.polypheny.db.monitoring.events.metrics.DdlDataPoint; import org.polypheny.db.monitoring.events.metrics.DmlDataPoint; import org.polypheny.db.monitoring.events.metrics.QueryDataPointImpl; @@ -54,7 +55,7 @@ public void dataPoint( MonitoringDataPoint dataPoint ) { private void updateDdlStatistics( DdlDataPoint dataPoint, StatisticsManager statisticsManager ) { - if ( dataPoint.getMonitoringType().equals( "TRUNCATE" ) ) { + if ( dataPoint.getMonitoringType() == MonitoringType.TRUNCATE ) { statisticsManager.updateRowCountPerTable( dataPoint.getTableId(), 0, @@ -110,40 +111,44 @@ private void updateQueryStatistics( QueryDataPointImpl dataPoint, StatisticsMana private void updateDmlStatistics( DmlDataPoint dataPoint, StatisticsManager statisticsManager ) { - if ( dataPoint.getChangedValues() != null ) { - Set values = new HashSet<>( dataPoint.getAvailableColumnsWithTable().values() ); - boolean isOneTable = values.size() == 1; + if ( dataPoint.getChangedValues() == null ) { + return; + } - Catalog catalog = Catalog.getInstance(); - if ( isOneTable ) { - long tableId = values.stream().findFirst().get(); - statisticsManager.setTableCalls( tableId, dataPoint.getMonitoringType() ); + Set values = new HashSet<>( dataPoint.getAvailableColumnsWithTable().values() ); + boolean isOneTable = values.size() == 1; - if ( catalog.getSnapshot().getLogicalEntity( tableId ) != null ) { - if ( dataPoint.getMonitoringType().equals( "INSERT" ) ) { - int added = dataPoint.getRowCount(); - statisticsManager.tablesToUpdate( - tableId, - dataPoint.getChangedValues(), - dataPoint.getMonitoringType(), - catalog.getSnapshot().getLogicalEntity( tableId ).namespaceId ); - statisticsManager.updateRowCountPerTable( tableId, added, dataPoint.getMonitoringType() ); - } else if ( dataPoint.getMonitoringType().equals( "DELETE" ) ) { - int deleted = dataPoint.getRowCount(); - statisticsManager.updateRowCountPerTable( tableId, deleted, dataPoint.getMonitoringType() ); - // After a delete, it is not clear what exactly was deleted, so the statistics of the table are updated - statisticsManager.tablesToUpdate( tableId ); - } - } - } else { - for ( long id : values ) { - if ( catalog.getSnapshot().getLogicalEntity( id ) != null ) { - statisticsManager.setTableCalls( id, dataPoint.getMonitoringType() ); - } + Catalog catalog = Catalog.getInstance(); + if ( isOneTable ) { + long tableId = values.stream().findFirst().get(); + statisticsManager.setTableCalls( tableId, dataPoint.getMonitoringType() ); + if ( catalog.getSnapshot().getLogicalEntity( tableId ) == null ) { + return; + } + if ( dataPoint.getMonitoringType() == MonitoringType.INSERT ) { + int added = dataPoint.getRowCount(); + statisticsManager.tablesToUpdate( + tableId, + dataPoint.getChangedValues(), + dataPoint.getMonitoringType(), + catalog.getSnapshot().getLogicalEntity( tableId ).namespaceId ); + statisticsManager.updateRowCountPerTable( tableId, added, dataPoint.getMonitoringType() ); + } else if ( dataPoint.getMonitoringType() == MonitoringType.DELETE ) { + int deleted = dataPoint.getRowCount(); + statisticsManager.updateRowCountPerTable( tableId, deleted, dataPoint.getMonitoringType() ); + // After a delete, it is not clear what exactly was deleted, so the statistics of the table are updated + statisticsManager.tablesToUpdate( tableId ); + } + } else { + for ( long id : values ) { + if ( catalog.getSnapshot().getLogicalEntity( id ) != null ) { + statisticsManager.setTableCalls( id, dataPoint.getMonitoringType() ); } + } } + } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index cfa1d89f96..c1d49cd9c4 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -53,7 +53,6 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -69,6 +68,7 @@ import org.polypheny.db.information.InformationPage; import org.polypheny.db.information.InformationTable; import org.polypheny.db.languages.OperatorRegistry; +import org.polypheny.db.monitoring.events.MonitoringType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; @@ -111,7 +111,7 @@ public class StatisticsManagerImpl extends StatisticsManager { private final Map> tableStatistic; @Getter - private volatile Map>>> statisticSchemaMap; + private volatile Map> statisticFields; private final Queue tablesToUpdate = new ConcurrentLinkedQueue<>(); @@ -121,7 +121,7 @@ public class StatisticsManagerImpl extends StatisticsManager { public StatisticsManagerImpl( StatisticQueryProcessor statisticQueryProcessor ) { this.setQueryInterface( statisticQueryProcessor ); - this.statisticSchemaMap = new ConcurrentHashMap<>(); + this.statisticFields = new ConcurrentHashMap<>(); this.tableStatistic = new ConcurrentHashMap<>(); this.listeners.addPropertyChangeListener( this ); @@ -141,52 +141,6 @@ public void initializeStatisticSettings() { } - @Override - public void updateColumnName( LogicalColumn logicalColumn, String newName ) { - if ( statisticSchemaMap.containsKey( logicalColumn.namespaceId ) - && statisticSchemaMap.get( logicalColumn.namespaceId ).containsKey( logicalColumn.tableId ) - && statisticSchemaMap.get( logicalColumn.namespaceId ).get( logicalColumn.tableId ).containsKey( logicalColumn.id ) ) { - StatisticColumn statisticColumn = statisticSchemaMap.get( logicalColumn.namespaceId ).get( logicalColumn.tableId ).get( logicalColumn.id ); - statisticColumn.updateColumnName( newName ); - statisticSchemaMap.get( logicalColumn.namespaceId ).get( logicalColumn.tableId ).put( logicalColumn.id, statisticColumn ); - } - } - - - @Override - public void updateTableName( LogicalTable catalogTable, String newName ) { - if ( statisticSchemaMap.containsKey( catalogTable.namespaceId ) && statisticSchemaMap.get( catalogTable.namespaceId ).containsKey( catalogTable.id ) ) { - Map> columnsInformation = statisticSchemaMap.get( catalogTable.namespaceId ).get( catalogTable.id ); - for ( Entry> columnInfo : columnsInformation.entrySet() ) { - StatisticColumn statisticColumn = columnInfo.getValue(); - statisticColumn.updateTableName( newName ); - statisticSchemaMap.get( catalogTable.namespaceId ).get( catalogTable.id ).put( columnInfo.getKey(), statisticColumn ); - } - } - if ( tableStatistic.containsKey( catalogTable.id ) ) { - StatisticTable tableStatistics = tableStatistic.get( catalogTable.id ); - tableStatistics.updateTableName( newName ); - tableStatistic.put( catalogTable.id, tableStatistics ); - } - } - - - @Override - public void updateSchemaName( LogicalNamespace logicalNamespace, String newName ) { - if ( statisticSchemaMap.containsKey( logicalNamespace.id ) ) { - Map>> tableInformation = statisticSchemaMap.get( logicalNamespace.id ); - for ( long tableId : tableInformation.keySet() ) { - Map> columnsInformation = statisticSchemaMap.get( logicalNamespace.id ).remove( tableId ); - for ( Entry> columnInfo : columnsInformation.entrySet() ) { - StatisticColumn statisticColumn = columnInfo.getValue(); - statisticColumn.updateSchemaName( newName ); - statisticSchemaMap.get( logicalNamespace.id ).get( tableId ).put( columnInfo.getKey(), statisticColumn ); - } - } - } - } - - private Transaction getTransaction() { Transaction transaction; transaction = statisticQueryInterface.getTransactionManager().startTransaction( Catalog.getInstance().getSnapshot().getUser( Catalog.defaultUserId ), Catalog.getInstance().getSnapshot().getNamespace( 0 ), false, "Statistic Manager" ); @@ -232,9 +186,9 @@ public void restart( Config c ) { private void resetAllIsFull() { - this.statisticSchemaMap.values().forEach( s -> s.values().forEach( t -> t.values().forEach( c -> { - assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getSnapshot().rel().getColumn( c.getColumnId() ) ), NodeType.UNIQUE_VALUE ) ); - } ) ) ); + this.statisticFields.values().forEach( c -> { + assignUnique( c, this.prepareNode( QueryResult.fromCatalogColumn( Catalog.getInstance().getSnapshot().rel().getColumn( c.columnId ) ), NodeType.UNIQUE_VALUE ) ); + } ); } @@ -246,7 +200,7 @@ private void reevaluateAllStatistics() { return; } log.debug( "Resetting StatisticManager." ); - Map>>> statisticSchemaMapCopy = new ConcurrentHashMap<>(); + Map> statisticCopy = new ConcurrentHashMap<>(); transaction = getTransaction(); statement = transaction.createStatement(); statement.getQueryProcessor().lock( statement ); @@ -254,11 +208,11 @@ private void reevaluateAllStatistics() { for ( QueryResult column : statisticQueryInterface.getAllColumns() ) { StatisticColumn col = reevaluateColumn( column ); if ( col != null ) { - put( statisticSchemaMapCopy, column, col ); + put( statisticCopy, column, col ); } } reevaluateRowCount(); - replaceStatistics( statisticSchemaMapCopy ); + replaceStatistics( statisticCopy ); log.debug( "Finished resetting StatisticManager." ); statisticQueryInterface.commitTransaction( transaction, statement ); } @@ -295,8 +249,8 @@ public void reevaluateTable( long tableId ) { return; } LogicalEntity entity = Catalog.getInstance().getSnapshot().getLogicalEntity( tableId ); - if ( entity != null ) { - deleteTable( entity.namespaceId, tableId ); + if ( entity != null && entity.unwrap( LogicalTable.class ) != null ) { + deleteTable( entity.unwrap( LogicalTable.class ) ); List res = statisticQueryInterface.getAllColumns( tableId ); @@ -311,9 +265,9 @@ public void reevaluateTable( long tableId ) { } - private void deleteTable( long schemaId, long tableId ) { - if ( this.statisticSchemaMap.get( schemaId ) != null ) { - this.statisticSchemaMap.get( schemaId ).remove( tableId ); + private void deleteTable( LogicalTable table ) { + for ( long columnId : table.getColumnIds() ) { + this.statisticFields.remove( columnId ); } } @@ -321,8 +275,8 @@ private void deleteTable( long schemaId, long tableId ) { /** * Replace the tracked statistics with new statistics. */ - private synchronized void replaceStatistics( Map>>> map ) { - this.statisticSchemaMap = new ConcurrentHashMap<>( map ); + private synchronized void replaceStatistics( Map> statistics ) { + this.statisticFields = new ConcurrentHashMap<>( statistics ); } @@ -463,18 +417,18 @@ private Integer getNumberColumnCount( StatisticQueryResult countColumn ) { private void put( QueryResult queryResult, StatisticColumn statisticColumn ) { put( - this.statisticSchemaMap, + this.statisticFields, queryResult, statisticColumn ); } private void put( - Map>>> statisticSchemaMapCopy, + Map> statisticsCopy, QueryResult queryResult, StatisticColumn statisticColumn ) { put( - statisticSchemaMapCopy, + statisticsCopy, queryResult.getColumn().namespaceId, queryResult.getColumn().tableId, queryResult.getColumn().id, @@ -486,19 +440,13 @@ private void put( * Places a column at the correct position in the schemaMap. */ private void put( - Map>>> map, + Map> map, long schemaId, long tableId, long columnId, StatisticColumn statisticColumn ) { - if ( !map.containsKey( schemaId ) ) { - map.put( schemaId, new HashMap<>() ); - } - Map>> mapMap = map.get( schemaId ); - if ( !mapMap.containsKey( tableId ) ) { - mapMap.put( tableId, new HashMap<>() ); - } - mapMap.get( tableId ).put( columnId, statisticColumn ); + + map.put( columnId, statisticColumn ); if ( !tableStatistic.containsKey( tableId ) ) { tableStatistic.put( tableId, new StatisticTable<>( tableId ) ); @@ -759,37 +707,37 @@ public void displayInformation() { tableSelectInformation.reset(); tableInformation.reset(); statisticsInformation.reset(); - statisticSchemaMap.values().forEach( schema -> schema.values().forEach( table -> table.forEach( ( k, v ) -> { + statisticFields.forEach( ( k, v ) -> { if ( v instanceof NumericalStatisticColumn ) { if ( ((NumericalStatisticColumn) v).getMin() != null && ((NumericalStatisticColumn) v).getMax() != null ) { numericalInformation.addRow( - v.getQualifiedColumnName(), + v.columnId, ((NumericalStatisticColumn) v).getMin().toString(), ((NumericalStatisticColumn) v).getMax().toString() ); } else { - numericalInformation.addRow( v.getQualifiedColumnName(), "❌", "❌" ); + numericalInformation.addRow( v.columnId, "❌", "❌" ); } } if ( v instanceof TemporalStatisticColumn ) { if ( ((TemporalStatisticColumn) v).getMin() != null && ((TemporalStatisticColumn) v).getMax() != null ) { temporalInformation.addRow( - v.getQualifiedColumnName(), + v.columnId, ((TemporalStatisticColumn) v).getMin().toString(), ((TemporalStatisticColumn) v).getMax().toString() ); } else { - temporalInformation.addRow( v.getQualifiedColumnName(), "❌", "❌" ); + temporalInformation.addRow( v.columnId, "❌", "❌" ); } } else { String values = v.getUniqueValues().toString(); if ( !v.isFull() ) { - alphabeticalInformation.addRow( v.getQualifiedColumnName(), values ); + alphabeticalInformation.addRow( v.columnId, values ); } else { - alphabeticalInformation.addRow( v.getQualifiedColumnName(), "is Full" ); + alphabeticalInformation.addRow( v.columnId, "is Full" ); } } - statisticsInformation.addRow( v.getQualifiedColumnName(), v.getType().name() ); + statisticsInformation.addRow( v.columnId, v.type.getName() ); - } ) ) ); + } ); tableStatistic.forEach( ( k, v ) -> { tableInformation.addRow( v.getTable(), v.getNamespaceType(), v.getNumberOfRows() ); @@ -863,41 +811,37 @@ public void tablesToUpdate( long tableId ) { * @param schemaId of the table */ @Override - public void tablesToUpdate( long tableId, Map> changedValues, String type, long schemaId ) { - Catalog catalog = Catalog.getInstance(); - if ( catalog.getSnapshot().getLogicalEntity( tableId ) != null ) { - switch ( type ) { - case "INSERT": - handleInsert( tableId, changedValues, schemaId, catalog ); - break; - case "TRUNCATE": - handleTruncate( tableId, schemaId, catalog ); - break; - case "DROP_COLUMN": - handleDrop( tableId, changedValues, schemaId ); - break; - } + public void tablesToUpdate( long tableId, Map> changedValues, MonitoringType type, long schemaId ) { + if ( Catalog.snapshot().getLogicalEntity( tableId ) == null ) { + return; + } + + switch ( type ) { + case "INSERT": + handleInsert( tableId, changedValues, schemaId, Catalog.snapshot() ); + break; + case "TRUNCATE": + handleTruncate( tableId, schemaId, Catalog.snapshot() ); + break; + case "DROP_COLUMN": + handleDrop( tableId, changedValues, schemaId ); + break; } + } private void handleDrop( long tableId, Map> changedValues, long schemaId ) { - Map>> schema = this.statisticSchemaMap.get( schemaId ); - if ( schema != null ) { - Map> table = this.statisticSchemaMap.get( schemaId ).get( tableId ); - if ( table != null ) { - table.remove( changedValues.keySet().stream().findFirst().get() ); - } - } + changedValues.keySet().stream().findFirst().ifPresent( id -> statisticFields.remove( id ) ); } - private void handleTruncate( long tableId, long schemaId, Catalog catalog ) { - LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); - for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( catalogTable.id ) ) { + private void handleTruncate( long tableId, long schemaId, Snapshot snapshot ) { + LogicalTable catalogTable = snapshot.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); + for ( LogicalColumn column : snapshot.rel().getColumns( catalogTable.id ) ) { PolyType polyType = column.type; QueryResult queryResult = new QueryResult( catalogTable, column ); - if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( column.id ) != null ) { + if ( this.statisticFields.get( schemaId ).get( tableId ).get( column.id ) != null ) { StatisticColumn statisticColumn = createNewStatisticColumns( polyType, queryResult ); if ( statisticColumn != null ) { put( queryResult, statisticColumn ); @@ -920,24 +864,24 @@ private > StatisticColumn createNewStatisticColumns( } - private void handleInsert( long tableId, Map> changedValues, long schemaId, Catalog catalog ) { - LogicalTable catalogTable = catalog.getSnapshot().getLogicalEntity( tableId ).unwrap( LogicalTable.class ); - if ( this.statisticSchemaMap.get( schemaId ) != null ) { - if ( this.statisticSchemaMap.get( schemaId ).get( tableId ) != null ) { - for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( tableId ) ) { + private void handleInsert( long tableId, Map> changedValues, long schemaId, Snapshot snapshot ) { + LogicalTable catalogTable = snapshot.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); + if ( this.statisticFields.get( schemaId ) != null ) { + if ( this.statisticFields.get( schemaId ).get( tableId ) != null ) { + for ( LogicalColumn column : snapshot.rel().getColumns( tableId ) ) { PolyType polyType = column.type; QueryResult queryResult = new QueryResult( catalogTable, column ); - if ( this.statisticSchemaMap.get( schemaId ).get( tableId ).get( column.id ) != null && changedValues.get( (long) column.position ) != null ) { - handleInsertColumn( tableId, changedValues, schemaId, catalog.getSnapshot().rel().getColumns( tableId ).stream().map( c -> c.id ).collect( Collectors.toList() ), column.position, queryResult ); + if ( this.statisticFields.get( schemaId ).get( tableId ).get( column.id ) != null && changedValues.get( (long) column.position ) != null ) { + handleInsertColumn( tableId, changedValues, schemaId, snapshot.rel().getColumns( tableId ).stream().map( c -> c.id ).collect( Collectors.toList() ), column.position, queryResult ); } else { addNewColumnStatistics( changedValues, column.position, polyType, queryResult ); } } } else { - addInserts( changedValues, catalogTable, catalog.getSnapshot().rel().getColumns( tableId ) ); + addInserts( changedValues, catalogTable, snapshot.rel().getColumns( tableId ) ); } } else { - addInserts( changedValues, catalogTable, catalog.getSnapshot().rel().getColumns( tableId ) ); + addInserts( changedValues, catalogTable, snapshot.rel().getColumns( tableId ) ); } } @@ -963,7 +907,10 @@ private void addNewColumnStatistics( Map> changedValues, long i, P private void handleInsertColumn( long tableId, Map> changedValues, long namespaceId, List columns, int i, QueryResult queryResult ) { - StatisticColumn statisticColumn = this.statisticSchemaMap.get( namespaceId ).get( tableId ).get( columns.get( i ) ); + StatisticColumn statisticColumn = this.statisticFields.get( columns.get( i ) ); + if ( statisticColumn == null ) { + log.warn( "why" ); + } statisticColumn.insert( (List) changedValues.get( (long) i ) ); put( queryResult, statisticColumn ); } @@ -974,8 +921,8 @@ private void handleInsertColumn( long tableId, Map> changedValues, */ @Override public void deleteTableToUpdate( long tableId, long schemaId ) { - if ( statisticSchemaMap.containsKey( schemaId ) ) { - statisticSchemaMap.get( schemaId ).remove( tableId ); + if ( statisticFields.containsKey( schemaId ) ) { + statisticFields.get( schemaId ).remove( tableId ); } tableStatistic.remove( tableId ); if ( tablesToUpdate.contains( tableId ) ) { @@ -992,12 +939,12 @@ public void deleteTableToUpdate( long tableId, long schemaId ) { * * @param tableId of the table * @param number of changed rows or explicit number for the rowCount - * @param source of the rowCount information + * @param type of the rowCount information */ @Override - public void updateRowCountPerTable( long tableId, int number, String source ) { + public void updateRowCountPerTable( long tableId, int number, MonitoringType type ) { StatisticTable statisticTable; - switch ( source ) { + switch ( type ) { case "INSERT": if ( tableStatistic.containsKey( tableId ) ) { statisticTable = tableStatistic.get( tableId ); @@ -1029,7 +976,7 @@ public void updateRowCountPerTable( long tableId, int number, String source ) { statisticTable.setNumberOfRows( number ); break; default: - throw new RuntimeException( "updateRowCountPerTable is not implemented for: " + source ); + throw new RuntimeException( "updateRowCountPerTable is not implemented for: " + type ); } tableStatistic.put( tableId, statisticTable ); @@ -1066,10 +1013,10 @@ public void setIndexSize( long tableId, int indexSize ) { * and if not creates a new TableCall. * * @param tableId of the table - * @param kind of DML + * @param type of DML */ @Override - public void setTableCalls( long tableId, String kind ) { + public void setTableCalls( long tableId, MonitoringType type ) { TableCalls calls; if ( tableStatistic.containsKey( tableId ) ) { if ( tableStatistic.get( tableId ).getCalls() != null ) { @@ -1080,7 +1027,7 @@ public void setTableCalls( long tableId, String kind ) { } else { calls = new TableCalls( tableId, 0, 0, 0, 0 ); } - updateCalls( tableId, kind, calls ); + updateCalls( tableId, type, calls ); } @@ -1178,7 +1125,7 @@ public > Object getTableStatistic( long schemaId, long t statisticTable.setNumericalColumn( numericInfo ); statisticTable.setAlphabeticColumn( alphabeticInfo ); statisticTable.setTemporalColumn( temporalInfo ); - statisticSchemaMap.get( schemaId ).get( tableId ).forEach( ( k, v ) -> { + statisticFields.get( schemaId ).get( tableId ).forEach( ( k, v ) -> { if ( v.getType().getFamily() == PolyTypeFamily.NUMERIC ) { numericInfo.add( (NumericalStatisticColumn) v ); statisticTable.setNumericalColumn( numericInfo ); @@ -1259,7 +1206,7 @@ private enum NodeType { @Override public Map>>> getQualifiedStatisticMap() { Map>>> map = new HashMap<>(); - for ( Entry>>> namespace : statisticSchemaMap.entrySet() ) { + for ( Entry>>> namespace : statisticFields.entrySet() ) { for ( Entry>> entity : namespace.getValue().entrySet() ) { for ( Entry> field : entity.getValue().entrySet() ) { StatisticColumn val = field.getValue(); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 4bc6df71dd..f7a5f32355 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -34,7 +34,6 @@ import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; -import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; @@ -169,37 +168,32 @@ public LogicalTable addTable( String name, EntityType entityType, boolean modifi @Override - public long addView( String name, long namespaceId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { - throw new NotImplementedException(); - } + public LogicalView addView( String name, long namespaceId, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, List connectedViews, AlgDataType fieldList, String query, QueryLanguage language ) { + long id = idBuilder.getNewEntityId(); + LogicalView view = new LogicalView( id, name, namespaceId, EntityType.VIEW, query, algCollation, underlyingTables, connectedViews, language ); - @Override - public LogicalMaterializedView addMaterializedView( final String name, long namespaceId, EntityType entityType, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) { - long id = idBuilder.getNewEntityId(); + tables.put( id, view ); + nodes.put( id, definition ); - String adjustedName = name; + return view; + } - if ( !logicalNamespace.caseSensitive ) { - adjustedName = name.toLowerCase(); - } - if ( entityType != EntityType.MATERIALIZED_VIEW ) { - // Should not happen, addViewTable is only called with EntityType.View - throw new RuntimeException( "addMaterializedViewTable is only possible with EntityType = MATERIALIZED_VIEW" ); - } + @Override + public LogicalMaterializedView addMaterializedView( final String name, long namespaceId, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, MaterializedCriteria materializedCriteria, String query, QueryLanguage language, boolean ordered ) { + long id = idBuilder.getNewEntityId(); LogicalMaterializedView materializedViewTable = new LogicalMaterializedView( id, - adjustedName, + name, namespaceId, - entityType, query, null, algCollation, ImmutableList.of(), underlyingTables, - language.getSerializedName(), + language, materializedCriteria, ordered ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index f7c2b163fd..9f3e220b7d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -126,7 +126,7 @@ public void execute( Context context, Statement statement, QueryParameters param } // Make sure that all adapters are of type store (and not source) - for ( AllocationEntity allocation : statement.getTransaction().getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ) ) { + for ( AllocationEntity allocation : statement.getTransaction().getSnapshot().alloc().getFromLogical( catalogTable.id ) ) { getDataStoreInstance( allocation.adapterId ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 5be26c031c..3e6ab447b6 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1176,8 +1176,8 @@ void getDataSourceColumns( final Context ctx ) { } ctx.json( new Result( columns.toArray( new DbColumn[0] ), null ).setType( ResultType.VIEW ) ); } else { - List allocs = catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ); - if ( catalog.getSnapshot().alloc().getAllocationsFromLogical( catalogTable.id ).size() != 1 ) { + List allocs = catalog.getSnapshot().alloc().getFromLogical( catalogTable.id ); + if ( catalog.getSnapshot().alloc().getFromLogical( catalogTable.id ).size() != 1 ) { throw new RuntimeException( "The table has an unexpected number of placements!" ); } From 7b174ab0c73a5e339d4bda480fa4fcd0caf1f016 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 16 Apr 2023 00:09:55 +0200 Subject: [PATCH 065/436] adjusting statistics and monitoring, fixing alloc, call via adapter and logical --- .../org/polypheny/db/PolyImplementation.java | 11 +- .../org/polypheny/db/StatisticsManager.java | 2 +- .../catalogs/LogicalRelationalCatalog.java | 2 + .../entity/LogicalMaterializedView.java | 2 +- .../db/catalog/entity/LogicalView.java | 2 +- .../catalog/snapshot/LogicalRelSnapshot.java | 3 + .../snapshot/impl/AllocSnapshotImpl.java | 12 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 21 ++- .../db/monitoring/events/MonitoringType.java | 15 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 24 +-- .../db/processing/AbstractQueryProcessor.java | 3 +- .../events/analyzer/DdlEventAnalyzer.java | 5 +- .../events/analyzer/QueryEventAnalyzer.java | 11 +- .../events/metrics/DdlDataPoint.java | 2 +- .../events/metrics/QueryDataPoint.java | 37 ++--- .../events/metrics/QueryDataPointImpl.java | 45 +++--- .../statistics/AlphabeticStatisticColumn.java | 20 +-- .../statistics/StatisticRepository.java | 8 +- .../monitoring/statistics/StatisticTable.java | 2 +- .../statistics/StatisticsManagerImpl.java | 152 ++++++++---------- .../statistics/TemporalStatisticColumn.java | 2 +- .../jdbc/rel2sql/AlgToSqlConverter.java | 4 +- .../jdbc/stores/AbstractJdbcStore.java | 2 +- .../db/monitoring/MapDbRepository.java | 23 ++- 24 files changed, 205 insertions(+), 205 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/PolyImplementation.java b/core/src/main/java/org/polypheny/db/PolyImplementation.java index b56b107ba6..18b9a2c9aa 100644 --- a/core/src/main/java/org/polypheny/db/PolyImplementation.java +++ b/core/src/main/java/org/polypheny/db/PolyImplementation.java @@ -44,6 +44,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.interpreter.BindableConvention; +import org.polypheny.db.monitoring.events.MonitoringType; import org.polypheny.db.monitoring.events.StatementEvent; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.Convention; @@ -289,7 +290,7 @@ public int getRowsChanged( Statement statement ) throws Exception { int rowsChanged; try { Iterator iterator = enumerable( statement.getDataContext() ).iterator(); - rowsChanged = getRowsChanged( statement, iterator, getKind().name() ); + rowsChanged = getRowsChanged( statement, iterator, MonitoringType.from( getKind() ) ); } catch ( RuntimeException e ) { if ( e.getCause() != null ) { throw new Exception( e.getCause().getMessage(), e ); @@ -304,7 +305,7 @@ public int getRowsChanged( Statement statement ) throws Exception { } - public static int getRowsChanged( Statement statement, Iterator iterator, String kind ) throws Exception { + public static int getRowsChanged( Statement statement, Iterator iterator, MonitoringType kind ) throws Exception { int rowsChanged = -1; Object object; while ( iterator.hasNext() ) { @@ -336,12 +337,12 @@ public static int getRowsChanged( Statement statement, Iterator iterator, Str } - public static void addMonitoringInformation( Statement statement, String kind, int rowsChanged ) { + public static void addMonitoringInformation( Statement statement, MonitoringType kind, int rowsChanged ) { StatementEvent eventData = statement.getMonitoringEvent(); if ( rowsChanged > 0 ) { eventData.setRowCount( rowsChanged ); } - if ( Kind.INSERT.name().equals( kind ) || Kind.DELETE.name().equals( kind ) ) { + if ( MonitoringType.INSERT == kind || MonitoringType.DELETE == kind ) { HashMap> ordered = new HashMap<>(); @@ -359,7 +360,7 @@ public static void addMonitoringInformation( Statement statement, String kind, i } eventData.getChangedValues().putAll( ordered ); - if ( Kind.INSERT.name().equals( kind ) ) { + if ( MonitoringType.INSERT == kind ) { if ( rowsChanged >= 0 ) { eventData.setRowCount( statement.getDataContext().getParameterValues().size() ); } diff --git a/core/src/main/java/org/polypheny/db/StatisticsManager.java b/core/src/main/java/org/polypheny/db/StatisticsManager.java index 5b4cba09e0..fbbe027c14 100644 --- a/core/src/main/java/org/polypheny/db/StatisticsManager.java +++ b/core/src/main/java/org/polypheny/db/StatisticsManager.java @@ -56,7 +56,7 @@ public static StatisticsManager getInstance() { public abstract void asyncReevaluateAllStatistics(); - public abstract void deleteTableToUpdate( long tableId, long schemaId ); + public abstract void deleteTableToUpdate( long tableId ); public abstract void updateRowCountPerTable( long tableId, int number, MonitoringType type ); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 859b6eb246..0ebfe5ad19 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -316,4 +316,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { Map getConstraints(); + Map getNodes(); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java index 2cedf1ac29..c96efa3dd2 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java @@ -82,7 +82,7 @@ public LogicalMaterializedView( @Override public AlgNode getDefinition() { - return Catalog.getInstance().getNodeInfo().get( id ); + return Catalog.snapshot().rel().getNodeInfo( id ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java index 031cc4e8cc..ac591bf232 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java @@ -111,7 +111,7 @@ public void prepareView( AlgNode viewLogicalRoot, AlgOptCluster algOptCluster ) public AlgNode getDefinition() { - return Catalog.getInstance().getNodeInfo().get( id ); + return Catalog.snapshot().rel().getNodeInfo( id ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 7557013053..b873089ed3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -18,6 +18,7 @@ import java.util.List; import javax.annotation.Nullable; +import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -278,4 +279,6 @@ public interface LogicalRelSnapshot { boolean checkIfExistsEntity( String name ); + AlgNode getNodeInfo( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index e62e4a9c45..7aa98e599e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -18,9 +18,11 @@ import com.google.common.collect.ImmutableMap; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.TreeSet; import java.util.stream.Collectors; import lombok.Value; import lombok.extern.slf4j.Slf4j; @@ -64,7 +66,7 @@ public class AllocSnapshotImpl implements AllocSnapshot { ImmutableMap> allocsOnAdapters; ImmutableMap> logicalColumnToAlloc; - ImmutableMap> allocColumns; + ImmutableMap> allocColumns; ImmutableMap, List> adapterLogicalTablePlacements; ImmutableMap, AllocationEntity> adapterLogicalTableAlloc; ImmutableMap> logicalAllocs; @@ -145,11 +147,11 @@ private ImmutableMap> buildLogicalAllocs() { } - private ImmutableMap> buildAllocColumns() { - Map> map = new HashMap<>(); + private ImmutableMap> buildAllocColumns() { + Map> map = new HashMap<>(); for ( AllocationColumn value : columns.values() ) { if ( !map.containsKey( value.tableId ) ) { - map.put( value.tableId, new ArrayList<>() ); + map.put( value.tableId, new TreeSet<>( Comparator.comparingLong( c -> c.position ) ) ); } map.get( value.tableId ).add( value ); } @@ -548,7 +550,7 @@ public AllocationEntity getAllocation( long adapterId, long entityId ) { @Override public List getColumns( long allocId ) { - return allocColumns.get( allocId ); + return List.copyOf( allocColumns.get( allocId ) ); } } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index 9d23b71fd4..2ece2fa74e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -18,13 +18,16 @@ import com.google.common.collect.ImmutableMap; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.TreeSet; import java.util.stream.Collectors; import lombok.Value; import org.jetbrains.annotations.Nullable; +import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogForeignKey; @@ -49,7 +52,7 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap, LogicalTable> tableNames; - ImmutableMap> tableColumns; + ImmutableMap> tableColumns; ImmutableMap columns; ImmutableMap, LogicalColumn> columnNames; @@ -74,6 +77,7 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap, LogicalColumn> tableIdColumnNameColumn; ImmutableMap> tableConstraints; ImmutableMap> tableForeignKeys; + ImmutableMap nodes; public LogicalRelSnapshotImpl( Map catalogs ) { @@ -88,10 +92,10 @@ public LogicalRelSnapshotImpl( Map catalogs ) { //// tables - Map> tableChildren = new HashMap<>(); + Map> tableChildren = new HashMap<>(); columns.forEach( ( k, v ) -> { if ( !tableChildren.containsKey( v.tableId ) ) { - tableChildren.put( v.tableId, new ArrayList<>() ); + tableChildren.put( v.tableId, new TreeSet<>( Comparator.comparingInt( a -> a.position ) ) ); } tableChildren.get( v.tableId ).add( v ); } ); @@ -151,6 +155,9 @@ public LogicalRelSnapshotImpl( Map catalogs ) { tableConstraints.get( v.key.tableId ).add( v ); } ); this.tableConstraints = ImmutableMap.copyOf( tableConstraints ); + + /// ALGNODES e.g. views and materializedViews + this.nodes = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getNodes().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); } @@ -200,7 +207,7 @@ public List getTableKeys( long tableId ) { @Override public List getColumns( long tableId ) { - return tableColumns.get( tableId ); + return List.copyOf( tableColumns.get( tableId ) ); } @@ -376,4 +383,10 @@ public boolean checkIfExistsEntity( String newName ) { return tableNames.containsKey( newName ); } + + @Override + public AlgNode getNodeInfo( long id ) { + return nodes.get( id ); + } + } diff --git a/core/src/main/java/org/polypheny/db/monitoring/events/MonitoringType.java b/core/src/main/java/org/polypheny/db/monitoring/events/MonitoringType.java index 932fc04b8a..ce46661ce7 100644 --- a/core/src/main/java/org/polypheny/db/monitoring/events/MonitoringType.java +++ b/core/src/main/java/org/polypheny/db/monitoring/events/MonitoringType.java @@ -16,10 +16,23 @@ package org.polypheny.db.monitoring.events; +import org.apache.commons.lang3.EnumUtils; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.db.algebra.constant.Kind; + public enum MonitoringType { INSERT, TRUNCATE, DROP_COLUMN, DROP_TABLE, - DELETE; + SET_ROW_COUNT, + DELETE, SELECT, UPDATE; + + + public static MonitoringType from( Kind kind ) { + if ( EnumUtils.isValidEnum( MonitoringType.class, kind.name().toUpperCase() ) ) { + return valueOf( kind.name().toUpperCase() ); + } + throw new NotImplementedException(); + } } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 5b02de9849..d8c835ccd4 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -93,6 +93,7 @@ import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.monitoring.events.DdlEvent; +import org.polypheny.db.monitoring.events.MonitoringType; import org.polypheny.db.monitoring.events.StatementEvent; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; @@ -1591,7 +1592,6 @@ public void renameTable( LogicalTable catalogTable, String newTableName, Stateme catalog.getLogicalRel( catalogTable.namespaceId ).renameTable( catalogTable.id, newTableName ); - // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); } @@ -1644,7 +1644,7 @@ public void createView( String viewName, long namespaceId, AlgNode algNode, AlgC algNode, algCollation, underlyingTables, - null, + List.of(), fieldList, query, language @@ -3066,16 +3066,18 @@ private void prepareMonitoring( Statement statement, Kind kind, LogicalTable cat private void prepareMonitoring( Statement statement, Kind kind, LogicalTable catalogTable, LogicalColumn logicalColumn ) { // Initialize Monitoring - if ( statement.getMonitoringEvent() == null ) { - StatementEvent event = new DdlEvent(); - event.setMonitoringType( kind.name() ); - event.setTableId( catalogTable.id ); - event.setSchemaId( catalogTable.namespaceId ); - if ( kind == Kind.DROP_COLUMN ) { - event.setColumnId( logicalColumn.id ); - } - statement.setMonitoringEvent( event ); + if ( statement.getMonitoringEvent() != null ) { + return; + } + StatementEvent event = new DdlEvent(); + event.setMonitoringType( MonitoringType.from( kind ) ); + event.setTableId( catalogTable.id ); + event.setSchemaId( catalogTable.namespaceId ); + if ( kind == Kind.DROP_COLUMN ) { + event.setColumnId( logicalColumn.id ); } + statement.setMonitoringEvent( event ); + } diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 4eeb00b650..52fc4a3935 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -91,6 +91,7 @@ import org.polypheny.db.interpreter.BindableConvention; import org.polypheny.db.interpreter.Interpreters; import org.polypheny.db.monitoring.events.DmlEvent; +import org.polypheny.db.monitoring.events.MonitoringType; import org.polypheny.db.monitoring.events.QueryEvent; import org.polypheny.db.monitoring.events.StatementEvent; import org.polypheny.db.plan.AlgOptCost; @@ -1392,7 +1393,7 @@ private void prepareMonitoring( Statement statement, AlgRoot logicalRoot, boolea event.setAnalyze( isAnalyze ); event.setSubQuery( isSubquery ); event.setLogicalQueryInformation( queryInformation ); - event.setMonitoringType( logicalRoot.kind.name() ); + event.setMonitoringType( MonitoringType.from( logicalRoot.kind ) ); statement.setMonitoringEvent( event ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DdlEventAnalyzer.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DdlEventAnalyzer.java index e07fc00360..afa37f4287 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DdlEventAnalyzer.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/DdlEventAnalyzer.java @@ -22,7 +22,8 @@ public class DdlEventAnalyzer { public static DdlDataPoint analyze( DdlEvent ddlEvent ) { - DdlDataPoint metric = DdlDataPoint + + return DdlDataPoint .builder() .Id( ddlEvent.getId() ) .recordedTimestamp( ddlEvent.getRecordedTimestamp() ) @@ -32,8 +33,6 @@ public static DdlDataPoint analyze( DdlEvent ddlEvent ) { .schemaId( ddlEvent.getSchemaId() ) .columnId( ddlEvent.getColumnId() ) .build(); - - return metric; } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java index c6d620f063..a010b60824 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/analyzer/QueryEventAnalyzer.java @@ -29,7 +29,8 @@ public class QueryEventAnalyzer { public static QueryDataPointImpl analyze( QueryEvent queryEvent ) { - QueryDataPointImpl metric = QueryDataPointImpl + + return QueryDataPointImpl .builder() .Id( queryEvent.getId() ) .tables( queryEvent.getLogicalQueryInformation().getTablesIds() ) @@ -45,14 +46,8 @@ public static QueryDataPointImpl analyze( QueryEvent queryEvent ) { .physicalQueryClass( queryEvent.getPhysicalQueryClass() ) .availableColumnsWithTable( queryEvent.getLogicalQueryInformation().getAvailableColumnsWithTable() ) .indexSize( queryEvent.getIndexSize() ) + .accessedPartitions( queryEvent.getAccessedPartitions() != null ? queryEvent.getAccessedPartitions().values().stream().flatMap( Set::stream ).collect( Collectors.toList() ) : Collections.emptyList() ) .build(); - if ( queryEvent.getAccessedPartitions() != null ) { - metric.setAccessedPartitions( queryEvent.getAccessedPartitions().values().stream().flatMap( Set::stream ).collect( Collectors.toList() ) ); - } else { - metric.setAccessedPartitions( Collections.emptyList() ); - } - - return metric; } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DdlDataPoint.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DdlDataPoint.java index 67a8457f85..6713a92d6c 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DdlDataPoint.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/DdlDataPoint.java @@ -32,7 +32,7 @@ public class DdlDataPoint implements MonitoringDataPoint, Serializable { private static final long serialVersionUID = 268576586444646401L; UUID Id; Timestamp recordedTimestamp; - protected boolean isCommitted; + boolean isCommitted; long tableId; MonitoringType monitoringType; long schemaId; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPoint.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPoint.java index 0549e441bc..981d8cbde4 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPoint.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPoint.java @@ -23,37 +23,30 @@ import java.util.List; import java.util.Map; import java.util.UUID; -import lombok.AccessLevel; -import lombok.AllArgsConstructor; import lombok.Builder; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; +import lombok.Value; import org.polypheny.db.monitoring.events.MonitoringDataPoint; -@Getter -@Setter @Builder -@NoArgsConstructor(access = AccessLevel.PUBLIC) -@AllArgsConstructor(access = AccessLevel.MODULE) +@Value public class QueryDataPoint implements MonitoringDataPoint, Serializable { private static final long serialVersionUID = 9063080671064507812L; - private final List tables = new ArrayList<>(); - private final HashMap dataElements = new HashMap<>(); - private UUID Id; - private Timestamp recordedTimestamp; - private String monitoringType; - private String description; - private long executionTime; - private boolean isSubQuery; - protected boolean isCommitted; - private int rowCount; - private List fieldNames; - protected Integer indexSize; - protected final Map availableColumnsWithTable = new HashMap<>(); + List tables = new ArrayList<>(); + HashMap dataElements = new HashMap<>(); + UUID Id; + Timestamp recordedTimestamp; + String monitoringType; + String description; + long executionTime; + boolean isSubQuery; + boolean isCommitted; + int rowCount; + List fieldNames; + Integer indexSize; + Map availableColumnsWithTable = new HashMap<>(); @Override diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java index 849c4eec58..a06da4c4f3 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/events/metrics/QueryDataPointImpl.java @@ -24,44 +24,37 @@ import java.util.List; import java.util.Map; import java.util.UUID; -import lombok.AccessLevel; -import lombok.AllArgsConstructor; import lombok.Builder; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; +import lombok.Value; import org.polypheny.db.monitoring.events.MonitoringType; import org.polypheny.db.monitoring.events.QueryDataPoint; -@Getter -@Setter @Builder -@NoArgsConstructor(access = AccessLevel.PUBLIC) -@AllArgsConstructor(access = AccessLevel.MODULE) +@Value public class QueryDataPointImpl implements QueryDataPoint, Serializable { private static final long serialVersionUID = 4389301720141941770L; @Builder.Default - private final List tables = new ArrayList<>(); - private final HashMap dataElements = new HashMap<>(); - private UUID Id; - private Timestamp recordedTimestamp; - private MonitoringType monitoringType; - private String description; - private long executionTime; - private boolean isSubQuery; - protected boolean isCommitted; - private int rowCount; - private List fieldNames; - private List accessedPartitions; - private String algCompareString; - private String queryClass; - private String physicalQueryClass; - private Integer indexSize; + List tables = new ArrayList<>(); + Map dataElements = new HashMap<>(); + UUID Id; + Timestamp recordedTimestamp; + MonitoringType monitoringType; + String description; + long executionTime; + boolean isSubQuery; + boolean isCommitted; + int rowCount; + List fieldNames; + List accessedPartitions; + String algCompareString; + String queryClass; + String physicalQueryClass; + Integer indexSize; @Builder.Default - private final Map availableColumnsWithTable = new HashMap<>(); + Map availableColumnsWithTable = new HashMap<>(); @Override diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java index 77bb2af312..a5b01ec8c6 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/AlphabeticStatisticColumn.java @@ -29,20 +29,20 @@ * Responsible to validate if data should be changed */ @Slf4j -public class AlphabeticStatisticColumn> extends StatisticColumn { +public class AlphabeticStatisticColumn extends StatisticColumn { @Getter - public List uniqueValuesCache = new ArrayList<>(); + public List uniqueValuesCache = new ArrayList<>(); boolean cacheFull; public AlphabeticStatisticColumn( QueryResult column ) { - super( column.getColumn().namespaceId, column.getColumn().tableId, column.getColumn().id, column.getColumn().type, StatisticType.ALPHABETICAL ); + super( column.getColumn().id, column.getColumn().type ); } @Override - public void insert( T val ) { + public void insert( String val ) { if ( uniqueValues.size() < RuntimeConfig.STATISTIC_BUFFER.getInteger() ) { if ( !uniqueValues.contains( val ) ) { uniqueValues.add( val ); @@ -59,11 +59,13 @@ public void insert( T val ) { @Override - public void insert( List values ) { - if ( values != null && !(values.get( 0 ) instanceof ArrayList) ) { - for ( T val : values ) { - insert( val ); - } + public void insert( List values ) { + if ( values == null ) { + return; + } + + for ( String val : values ) { + insert( val ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java index e9e7e5d495..c5eb0cf066 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticRepository.java @@ -66,10 +66,10 @@ private void updateDdlStatistics( DdlDataPoint dataPoint, StatisticsManager stat dataPoint.getMonitoringType(), dataPoint.getSchemaId() ); } - if ( dataPoint.getMonitoringType().equals( "DROP_TABLE" ) ) { - statisticsManager.deleteTableToUpdate( dataPoint.getTableId(), dataPoint.getSchemaId() ); + if ( dataPoint.getMonitoringType() == MonitoringType.DROP_TABLE ) { + statisticsManager.deleteTableToUpdate( dataPoint.getTableId() ); } - if ( dataPoint.getMonitoringType().equals( "DROP_COLUMN" ) ) { + if ( dataPoint.getMonitoringType() == MonitoringType.DROP_COLUMN ) { statisticsManager.tablesToUpdate( dataPoint.getTableId(), Collections.singletonMap( dataPoint.getColumnId(), null ), @@ -92,7 +92,7 @@ private void updateQueryStatistics( QueryDataPointImpl dataPoint, StatisticsMana // RowCount from UI is only used if there is no other possibility if ( statisticsManager.rowCountPerTable( tableId ) == null || statisticsManager.rowCountPerTable( tableId ) == 0 ) { - statisticsManager.updateRowCountPerTable( tableId, dataPoint.getRowCount(), "SET-ROW-COUNT" ); + statisticsManager.updateRowCountPerTable( tableId, dataPoint.getRowCount(), MonitoringType.SET_ROW_COUNT ); } if ( dataPoint.getIndexSize() != null ) { diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java index dd8c413744..a3da3bb570 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticTable.java @@ -59,7 +59,7 @@ public class StatisticTable> { @Getter @Setter - private List> alphabeticColumn; + private List alphabeticColumn; @Getter @Setter diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index c1d49cd9c4..33da7470f7 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -28,13 +28,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Queue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.Getter; @@ -229,7 +227,7 @@ private void reevaluateRowCount() { statisticQueryInterface.getAllTable().forEach( table -> { int rowCount = getNumberColumnCount( this.prepareNode( new QueryResult( Catalog.getInstance().getSnapshot().getLogicalEntity( table.id ), null ), NodeType.ROW_COUNT_TABLE ) ); - updateRowCountPerTable( table.id, rowCount, "SET-ROW-COUNT" ); + updateRowCountPerTable( table.id, rowCount, MonitoringType.SET_ROW_COUNT ); } ); } @@ -384,11 +382,11 @@ private void assignUnique( StatisticColumn column, StatisticQueryResult u /** * Reevaluates an alphabetical column, with the configured statistics */ - private > StatisticColumn reevaluateAlphabeticalColumn( QueryResult column ) { + private AlphabeticStatisticColumn reevaluateAlphabeticalColumn( QueryResult column ) { StatisticQueryResult unique = this.prepareNode( column, NodeType.UNIQUE_VALUE ); Integer count = getNumberColumnCount( this.prepareNode( column, NodeType.ROW_COUNT_COLUMN ) ); - AlphabeticStatisticColumn statisticColumn = new AlphabeticStatisticColumn<>( column ); + AlphabeticStatisticColumn statisticColumn = new AlphabeticStatisticColumn( column ); assignUnique( statisticColumn, unique ); statisticColumn.setCount( count ); @@ -817,35 +815,36 @@ public void tablesToUpdate( long tableId, Map> changedValues, Moni } switch ( type ) { - case "INSERT": - handleInsert( tableId, changedValues, schemaId, Catalog.snapshot() ); + case INSERT: + handleInsert( tableId, changedValues, Catalog.snapshot() ); break; - case "TRUNCATE": - handleTruncate( tableId, schemaId, Catalog.snapshot() ); + case TRUNCATE: + handleTruncate( tableId, Catalog.snapshot() ); break; - case "DROP_COLUMN": - handleDrop( tableId, changedValues, schemaId ); + case DROP_COLUMN: + handleDrop( changedValues ); break; } } - private void handleDrop( long tableId, Map> changedValues, long schemaId ) { + private void handleDrop( Map> changedValues ) { changedValues.keySet().stream().findFirst().ifPresent( id -> statisticFields.remove( id ) ); } - private void handleTruncate( long tableId, long schemaId, Snapshot snapshot ) { + private void handleTruncate( long tableId, Snapshot snapshot ) { LogicalTable catalogTable = snapshot.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); - for ( LogicalColumn column : snapshot.rel().getColumns( catalogTable.id ) ) { + for ( LogicalColumn column : catalogTable.getColumns() ) { PolyType polyType = column.type; QueryResult queryResult = new QueryResult( catalogTable, column ); - if ( this.statisticFields.get( schemaId ).get( tableId ).get( column.id ) != null ) { - StatisticColumn statisticColumn = createNewStatisticColumns( polyType, queryResult ); - if ( statisticColumn != null ) { - put( queryResult, statisticColumn ); - } + if ( statisticFields.get( column.id ) == null ) { + continue; + } + StatisticColumn statisticColumn = createNewStatisticColumns( polyType, queryResult ); + if ( statisticColumn != null ) { + put( queryResult, statisticColumn ); } } } @@ -856,7 +855,7 @@ private > StatisticColumn createNewStatisticColumns( if ( polyType.getFamily() == PolyTypeFamily.NUMERIC ) { statisticColumn = new NumericalStatisticColumn( queryResult ); } else if ( polyType.getFamily() == PolyTypeFamily.CHARACTER ) { - statisticColumn = new AlphabeticStatisticColumn<>( queryResult ); + statisticColumn = new AlphabeticStatisticColumn( queryResult ); } else if ( PolyType.DATETIME_TYPES.contains( polyType ) ) { statisticColumn = new TemporalStatisticColumn<>( queryResult ); } @@ -864,25 +863,24 @@ private > StatisticColumn createNewStatisticColumns( } - private void handleInsert( long tableId, Map> changedValues, long schemaId, Snapshot snapshot ) { + private void handleInsert( long tableId, Map> changedValues, Snapshot snapshot ) { LogicalTable catalogTable = snapshot.getLogicalEntity( tableId ).unwrap( LogicalTable.class ); - if ( this.statisticFields.get( schemaId ) != null ) { - if ( this.statisticFields.get( schemaId ).get( tableId ) != null ) { - for ( LogicalColumn column : snapshot.rel().getColumns( tableId ) ) { - PolyType polyType = column.type; - QueryResult queryResult = new QueryResult( catalogTable, column ); - if ( this.statisticFields.get( schemaId ).get( tableId ).get( column.id ) != null && changedValues.get( (long) column.position ) != null ) { - handleInsertColumn( tableId, changedValues, schemaId, snapshot.rel().getColumns( tableId ).stream().map( c -> c.id ).collect( Collectors.toList() ), column.position, queryResult ); - } else { - addNewColumnStatistics( changedValues, column.position, polyType, queryResult ); - } - } + List columns = catalogTable.getColumns(); + if ( changedValues.size() != columns.size() ) { + log.warn( "non-matching statistics length" ); + return; + } + + for ( LogicalColumn column : catalogTable.getColumns() ) { + PolyType polyType = column.type; + QueryResult queryResult = new QueryResult( catalogTable, column ); + if ( this.statisticFields.containsKey( column.id ) && changedValues.get( (long) column.position ) != null ) { + handleInsertColumn( changedValues.get( (long) column.position ), column, queryResult ); } else { - addInserts( changedValues, catalogTable, snapshot.rel().getColumns( tableId ) ); + addNewColumnStatistics( changedValues, column.position, polyType, queryResult ); } - } else { - addInserts( changedValues, catalogTable, snapshot.rel().getColumns( tableId ) ); } + } @@ -906,12 +904,9 @@ private void addNewColumnStatistics( Map> changedValues, long i, P } - private void handleInsertColumn( long tableId, Map> changedValues, long namespaceId, List columns, int i, QueryResult queryResult ) { - StatisticColumn statisticColumn = this.statisticFields.get( columns.get( i ) ); - if ( statisticColumn == null ) { - log.warn( "why" ); - } - statisticColumn.insert( (List) changedValues.get( (long) i ) ); + private void handleInsertColumn( List changedValues, LogicalColumn column, QueryResult queryResult ) { + StatisticColumn statisticColumn = this.statisticFields.get( column.id ); + statisticColumn.insert( (List) changedValues ); put( queryResult, statisticColumn ); } @@ -920,9 +915,9 @@ private void handleInsertColumn( long tableId, Map> changedValues, * Removes statistics from a given table. */ @Override - public void deleteTableToUpdate( long tableId, long schemaId ) { - if ( statisticFields.containsKey( schemaId ) ) { - statisticFields.get( schemaId ).remove( tableId ); + public void deleteTableToUpdate( long tableId ) { + for ( LogicalColumn column : Catalog.snapshot().rel().getColumns( tableId ) ) { + statisticFields.get( column.id ); } tableStatistic.remove( tableId ); if ( tablesToUpdate.contains( tableId ) ) { @@ -945,7 +940,7 @@ public void deleteTableToUpdate( long tableId, long schemaId ) { public void updateRowCountPerTable( long tableId, int number, MonitoringType type ) { StatisticTable statisticTable; switch ( type ) { - case "INSERT": + case INSERT: if ( tableStatistic.containsKey( tableId ) ) { statisticTable = tableStatistic.get( tableId ); int totalRows = statisticTable.getNumberOfRows() + number; @@ -956,7 +951,7 @@ public void updateRowCountPerTable( long tableId, int number, MonitoringType typ statisticTable.setNumberOfRows( number ); } break; - case "DELETE": + case DELETE: if ( tableStatistic.containsKey( tableId ) ) { statisticTable = tableStatistic.get( tableId ); int totalRows = statisticTable.getNumberOfRows() - number; @@ -966,8 +961,8 @@ public void updateRowCountPerTable( long tableId, int number, MonitoringType typ statisticTable = new StatisticTable<>( tableId ); } break; - case "SET-ROW-COUNT": - case "TRUNCATE": + case SET_ROW_COUNT: + case TRUNCATE: if ( tableStatistic.containsKey( tableId ) ) { statisticTable = tableStatistic.get( tableId ); } else { @@ -1034,7 +1029,7 @@ public void setTableCalls( long tableId, MonitoringType type ) { /** * Updates the TableCalls. */ - private synchronized void updateCalls( long tableId, String kind, TableCalls calls ) { + private synchronized void updateCalls( long tableId, MonitoringType kind, TableCalls calls ) { StatisticTable statisticTable; if ( tableStatistic.containsKey( tableId ) ) { statisticTable = tableStatistic.remove( tableId ); @@ -1043,7 +1038,7 @@ private synchronized void updateCalls( long tableId, String kind, TableCalls cal } switch ( kind ) { - case "SELECT": + case SELECT: statisticTable.setCalls( new TableCalls( calls.getTableId(), calls.getNumberOfSelects() + 1, @@ -1052,7 +1047,7 @@ private synchronized void updateCalls( long tableId, String kind, TableCalls cal calls.getNumberOfUpdates() ) ); tableStatistic.put( tableId, statisticTable ); break; - case "INSERT": + case INSERT: statisticTable.setCalls( new TableCalls( calls.getTableId(), calls.getNumberOfSelects(), @@ -1061,7 +1056,7 @@ private synchronized void updateCalls( long tableId, String kind, TableCalls cal calls.getNumberOfUpdates() ) ); tableStatistic.put( tableId, statisticTable ); break; - case "DELETE": + case DELETE: statisticTable.setCalls( new TableCalls( calls.getTableId(), calls.getNumberOfSelects(), @@ -1070,7 +1065,7 @@ private synchronized void updateCalls( long tableId, String kind, TableCalls cal calls.getNumberOfUpdates() ) ); tableStatistic.put( tableId, statisticTable ); break; - case "UPDATE": + case UPDATE: statisticTable.setCalls( new TableCalls( calls.getTableId(), calls.getNumberOfSelects() + 1, @@ -1117,21 +1112,21 @@ public Object getDashboardInformation() { * @return an Objet with all available table statistics */ @Override - public > Object getTableStatistic( long schemaId, long tableId ) { - StatisticTable statisticTable = (StatisticTable) tableStatistic.get( tableId ); - List numericInfo = new ArrayList<>(); - List> alphabeticInfo = new ArrayList<>(); - List> temporalInfo = new ArrayList<>(); - statisticTable.setNumericalColumn( numericInfo ); + public Object getTableStatistic( long schemaId, long tableId ) { + StatisticTable statisticTable = tableStatistic.get( tableId ); + List> numericInfo = new ArrayList<>(); + List alphabeticInfo = new ArrayList<>(); + List> temporalInfo = new ArrayList<>(); + statisticTable.setNumericalColumn( (List) numericInfo ); statisticTable.setAlphabeticColumn( alphabeticInfo ); - statisticTable.setTemporalColumn( temporalInfo ); - statisticFields.get( schemaId ).get( tableId ).forEach( ( k, v ) -> { - if ( v.getType().getFamily() == PolyTypeFamily.NUMERIC ) { - numericInfo.add( (NumericalStatisticColumn) v ); - statisticTable.setNumericalColumn( numericInfo ); - } else if ( v.getType().getFamily() == PolyTypeFamily.CHARACTER ) { - alphabeticInfo.add( (AlphabeticStatisticColumn) v ); - statisticTable.setAlphabeticColumn( (List) alphabeticInfo ); + statisticTable.setTemporalColumn( (List) temporalInfo ); + statisticFields.forEach( ( k, v ) -> { + if ( v.type.getFamily() == PolyTypeFamily.NUMERIC ) { + numericInfo.add( (NumericalStatisticColumn) v ); + statisticTable.setNumericalColumn( (List) numericInfo ); + } else if ( v.type.getFamily() == PolyTypeFamily.CHARACTER ) { + alphabeticInfo.add( (AlphabeticStatisticColumn) v ); + statisticTable.setAlphabeticColumn( alphabeticInfo ); } else if ( PolyType.DATETIME_TYPES.contains( Catalog.getInstance().getSnapshot().rel().getColumn( k ).type ) ) { temporalInfo.add( (TemporalStatisticColumn) v ); statisticTable.setTemporalColumn( (List) temporalInfo ); @@ -1204,24 +1199,13 @@ private enum NodeType { @Override - public Map>>> getQualifiedStatisticMap() { - Map>>> map = new HashMap<>(); - for ( Entry>>> namespace : statisticFields.entrySet() ) { - for ( Entry>> entity : namespace.getValue().entrySet() ) { - for ( Entry> field : entity.getValue().entrySet() ) { - StatisticColumn val = field.getValue(); - if ( !map.containsKey( val.getSchema() ) ) { - map.put( val.getSchema(), new HashMap<>() ); - } - Map>> nVal = map.get( val.getSchema() ); - if ( !nVal.containsKey( val.getTable() ) ) { - nVal.put( val.getTable(), new HashMap<>() ); - } - Map> eVal = nVal.get( val.getTable() ); - eVal.put( val.getColumn(), val ); - } - } + public Map> getQualifiedStatisticMap() { + Map> map = new HashMap<>(); + + for ( StatisticColumn val : statisticFields.values() ) { + map.put( String.valueOf( val.columnId ), val ); } + return map; } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java index 698d5305f0..72cb02bc10 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/TemporalStatisticColumn.java @@ -74,7 +74,7 @@ public void setMax( T max ) { public TemporalStatisticColumn( QueryResult column ) { - super( column.getColumn().namespaceId, column.getEntity().id, column.getColumn().id, column.getColumn().type, StatisticType.TEMPORAL ); + super( column.getColumn().namespaceId, column.getColumn().type ); temporalType = column.getColumn().type.getFamily().name(); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java index 8ca010a80d..406b2ea042 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/rel2sql/AlgToSqlConverter.java @@ -518,8 +518,8 @@ private SqlNodeList identifierList( List names ) { /** * Converts a list of names expressions to a list of single-part {@link SqlIdentifier}s. */ - private SqlNodeList physicalIdentifierList( JdbcEntity partitionPlacement, List columnNames ) { - return new SqlNodeList( partitionPlacement.columns.values().stream().map( c -> new SqlIdentifier( c, ParserPos.ZERO ) ).collect( Collectors.toList() ), POS ); + private SqlNodeList physicalIdentifierList( JdbcEntity entity, List columnNames ) { + return new SqlNodeList( entity.order.stream().map( entity.columns::get ).map( c -> new SqlIdentifier( c, ParserPos.ZERO ) ).collect( Collectors.toList() ), POS ); //return new SqlNodeList( columnNames.stream().map( columnName -> getPhysicalColumnName( partitionPlacement, columnName ) ).collect( Collectors.toList() ), POS ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 96280cd9b1..811aba1a67 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -169,7 +169,7 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica @Override public void addColumn( Context context, AllocationTable catalogTable, LogicalColumn logicalColumn ) { String physicalColumnName = getPhysicalColumnName( logicalColumn.id ); - PhysicalTable physicalTable = context.getSnapshot().physical().getPhysicalTable( this.getAdapterId(), catalogTable.id ); + PhysicalTable physicalTable = context.getSnapshot().physical().getPhysicalTable( catalogTable.id, this.getAdapterId() ); String physicalTableName = physicalTable.name; String physicalSchemaName = physicalTable.namespaceName; diff --git a/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDbRepository.java b/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDbRepository.java index 37939ac9b2..a85a611311 100644 --- a/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDbRepository.java +++ b/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDbRepository.java @@ -28,9 +28,6 @@ import java.util.stream.Collectors; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.mapdb.DB; -import org.mapdb.DBException; -import org.mapdb.DBMaker; import org.polypheny.db.StatusService; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.monitoring.events.MonitoringDataPoint; @@ -45,7 +42,7 @@ public class MapDbRepository implements PersistentMonitoringRepository { private static final String FILE_PATH = "simpleBackendDb"; private static final String FOLDER_NAME = "monitoring"; protected final HashMap, HashMap> data = new HashMap<>(); - protected DB simpleBackendDb; + //protected DB simpleBackendDb; protected HashMap queryPostCosts; @@ -63,9 +60,9 @@ public void dataPoint( @NonNull MonitoringDataPoint dataPoint ) { table = this.data.get( dataPoint.getClass() ); } - if ( table != null && dataPoint != null ) { + if ( table != null ) { table.put( dataPoint.id(), dataPoint ); - this.simpleBackendDb.commit(); + //this.simpleBackendDb.commit(); } } @@ -149,7 +146,7 @@ public void resetAllDataPoints() { return; } data.clear(); - this.simpleBackendDb.commit(); + //this.simpleBackendDb.commit(); } @@ -197,7 +194,7 @@ public void updateQueryPostCosts( @NonNull String physicalQueryClass, long execu queryPostCosts.replace( physicalQueryClass, new QueryPostCostImpl( physicalQueryClass, newTime, samples ) ); } - this.simpleBackendDb.commit(); + // this.simpleBackendDb.commit(); } @@ -207,14 +204,14 @@ public void resetQueryPostCosts() { return; } queryPostCosts.clear(); - this.simpleBackendDb.commit(); + // this.simpleBackendDb.commit(); } protected void initialize( String filePath, String folderName, boolean resetRepository ) { - if ( simpleBackendDb != null ) { + /* if ( simpleBackendDb != null ) { simpleBackendDb.close(); - } + }*/ synchronized ( this ) { File folder = PolyphenyHomeDirManager.getInstance().registerNewFolder( folderName ); @@ -235,7 +232,7 @@ protected void initialize( String filePath, String folderName, boolean resetRepo long start = System.currentTimeMillis(); long finish = System.currentTimeMillis(); - while ( fileLocked && ((finish - start) < timeThreshold) ) { + /*while ( fileLocked && ((finish - start) < timeThreshold) ) { try { simpleBackendDb = DBMaker .fileDB( new File( folder, filePath ) ) @@ -257,7 +254,7 @@ protected void initialize( String filePath, String folderName, boolean resetRepo + "Wait a few seconds or stop the locking process and try again. " ); } - simpleBackendDb.getStore().fileLoad(); + simpleBackendDb.getStore().fileLoad();*/ } } From 8bc82f352b89d532bb57c856405668a58c5cb0a1 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 16 Apr 2023 11:24:48 +0200 Subject: [PATCH 066/436] fixing validation missing return, rebuilding of changed plugins during testing --- build.gradle | 6 +- .../org/polypheny/db/adapter/Adapter.java | 3 +- .../db/catalog/snapshot/AllocSnapshot.java | 2 +- .../snapshot/impl/AllocSnapshotImpl.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 12 +- .../db/processing/DataMigratorImpl.java | 2 +- .../db/routing/routers/BaseRouter.java | 6 +- .../db/routing/routers/DmlRouterImpl.java | 4 +- .../routers/FullPlacementQueryRouter.java | 2 +- .../polypheny/db/adapter/csv/CsvSource.java | 3 +- plugins/jdbc-adapter-framework/build.gradle | 1 + .../jdbc/sources/AbstractJdbcSource.java | 4 +- .../jdbc/stores/AbstractJdbcStore.java | 54 +++--- .../db/sql/language/validate/ListScope.java | 1 + .../language/validate/SqlValidatorImpl.java | 176 +++++++++--------- .../java/org/polypheny/db/webui/Crud.java | 2 +- 16 files changed, 143 insertions(+), 137 deletions(-) diff --git a/build.gradle b/build.gradle index b586dfcf4a..28f9afbbcf 100644 --- a/build.gradle +++ b/build.gradle @@ -145,7 +145,6 @@ allprojects { } } - idea { module { downloadJavadoc = true @@ -165,10 +164,15 @@ allprojects { // plugin location ext.pluginsDir = rootProject.buildDir.path + '/plugins' + build { dependsOn(":plugins:build") } +allprojects { + test.dependsOn(":plugins:assemblePlugins") +} + idea { project { diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index d452ab27ad..f921b15b06 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -49,6 +49,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -342,7 +343,7 @@ public Entity createDocumentSchema( LogicalCollection catalogEntity, CatalogColl } - public abstract void truncate( Context context, LogicalTable table ); + public abstract void truncate( Context context, AllocationEntity table ); public abstract boolean prepare( PolyXid xid ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index 5ee2f780ce..f59d67fd5c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -71,7 +71,7 @@ public interface AllocSnapshot { * @param columnId The id of the specific column * @return List of column placements of specific column */ - List getColumnPlacements( long columnId ); + List getColumnFromLogical( long columnId ); /** * Get column placements of a specific table on a specific adapter on column detail level. diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index 7aa98e599e..dcdddda666 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -266,7 +266,7 @@ public boolean checkIfExistsColumnPlacement( long adapterId, long columnId ) { @Override - public List getColumnPlacements( long columnId ) { + public List getColumnFromLogical( long columnId ) { return logicalColumnToAlloc.get( columnId ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index d8c835ccd4..75bbd406f2 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -1112,7 +1112,7 @@ public void setColumnType( LogicalTable catalogTable, String columnName, ColumnT type.scale, type.dimension, type.cardinality ); - for ( AllocationColumn placement : catalog.getSnapshot().alloc().getColumnPlacements( logicalColumn.id ) ) { + for ( AllocationColumn placement : catalog.getSnapshot().alloc().getColumnFromLogical( logicalColumn.id ) ) { AdapterManager.getInstance().getStore( placement.adapterId ).updateColumnType( statement.getPrepareContext(), placement, @@ -2567,7 +2567,7 @@ public void addPartitioning( PartitionInformation partitionInfo, List stores = new ArrayList<>(); fillStores = true; } - List allocationColumns = snapshot.alloc().getColumnPlacements( pkColumn.id ); + List allocationColumns = snapshot.alloc().getColumnFromLogical( pkColumn.id ); for ( AllocationColumn ccp : allocationColumns ) { if ( fillStores ) { // Ask router on which store(s) the table should be placed @@ -2683,7 +2683,7 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme LogicalColumn pkColumn = relSnapshot.getColumn( pkColumnIds.get( 0 ) ); // This gets us only one ccp per store (first part of PK) - List allocationColumns = catalog.getSnapshot().alloc().getColumnPlacements( pkColumn.id ); + List allocationColumns = catalog.getSnapshot().alloc().getColumnFromLogical( pkColumn.id ); for ( AllocationColumn ccp : allocationColumns ) { // Ask router on which store(s) the table should be placed Adapter adapter = AdapterManager.getInstance().getAdapter( ccp.adapterId ); @@ -3052,10 +3052,8 @@ public void truncate( LogicalTable catalogTable, Statement statement ) { prepareMonitoring( statement, Kind.TRUNCATE, catalogTable ); // Execute truncate on all placements - List placements = statement.getTransaction().getSnapshot().alloc().getDataPlacements( catalogTable.id ); - placements.forEach( placement -> { - AdapterManager.getInstance().getAdapter( placement.adapterId ).truncate( statement.getPrepareContext(), catalogTable ); - } ); + List allocations = statement.getTransaction().getSnapshot().alloc().getFromLogical( catalogTable.id ); + allocations.forEach( a -> AdapterManager.getInstance().getAdapter( a.adapterId ).truncate( statement.getPrepareContext(), a ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 2a91758672..1417a5c768 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -491,7 +491,7 @@ public static List selectSourcePlacements( LogicalTable table, if ( snapshot.alloc().getDataPlacement( adapterIdWithMostPlacements, table.id ).columnPlacementsOnAdapter.contains( column.id ) ) { placementList.add( snapshot.alloc().getColumn( adapterIdWithMostPlacements, column.id ) ); } else { - for ( AllocationColumn placement : snapshot.alloc().getColumnPlacements( column.id ) ) { + for ( AllocationColumn placement : snapshot.alloc().getColumnFromLogical( column.id ) ) { if ( placement.adapterId != excludingAdapterId ) { placementList.add( placement ); break; diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index a2b2b44849..0d4b808355 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -122,7 +122,7 @@ protected static Map> selectPlacement( LogicalTable // Take the adapter with most placements as base and add missing column placements List placementList = new LinkedList<>(); for ( LogicalColumn column : Catalog.snapshot().rel().getColumns( table.id ) ) { - placementList.add( Catalog.snapshot().alloc().getColumnPlacements( column.id ).get( 0 ) ); + placementList.add( Catalog.snapshot().alloc().getColumnFromLogical( column.id ).get( 0 ) ); } return new HashMap<>() {{ @@ -307,7 +307,7 @@ public AlgNode buildJoinedScan( Statement statement, AlgOptCluster cluster, List // Add primary key for ( Entry> entry : placementsByAdapter.entrySet() ) { for ( LogicalColumn pkColumn : pkColumns ) { - CatalogColumnPlacement pkPlacement = Catalog.getInstance().getSnapshot().alloc().getColumnPlacements( pkColumn.id ).get( 0 ); + CatalogColumnPlacement pkPlacement = Catalog.getInstance().getSnapshot().alloc().getColumnFromLogical( pkColumn.id ).get( 0 ); if ( !entry.getValue().contains( pkPlacement ) ) { entry.getValue().add( pkPlacement ); } @@ -496,7 +496,7 @@ public AlgNode getRelationalScan( LogicalLpgScan alg, long adapterId, Statement protected CatalogEntity getSubstitutionTable( Statement statement, long tableId, long columnId, long adapterId ) { /*LogicalTable nodes = Catalog.getInstance().getTable( tableId ); - CatalogColumnPlacement placement = Catalog.getInstance().getColumnPlacements( adapterId, columnId ); + CatalogColumnPlacement placement = Catalog.getInstance().getColumnFromLogical( adapterId, columnId ); List qualifiedTableName = ImmutableList.of( PolySchemaBuilder.buildAdapterSchemaName( placement.adapterUniqueName, diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index 78275076b6..43ad457d2a 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -209,7 +209,7 @@ public AlgNode routeDmlOld( LogicalRelModify modify, Statement statement ) { LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); // Essentially gets a list of all stores where this table resides - List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); + List pkPlacements = snapshot.alloc().getColumnFromLogical( pkColumn.id ); List allocs = snapshot.alloc().getFromLogical( catalogTable.id );//.getPartitionProperty( catalogTable.id ); if ( !allocs.isEmpty() && log.isDebugEnabled() ) { log.debug( "\nListing all relevant stores for table: '{}' and all partitions: {}", catalogTable.name, -1 );//property.partitionGroupIds ); @@ -1365,7 +1365,7 @@ private AlgBuilder handleSelectFromOtherTable( RoutedAlgBuilder builder, Logical long pkid = fromTable.primaryKey; List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); + List pkPlacements = snapshot.alloc().getColumnFromLogical( pkColumn.id ); List nodes = new ArrayList<>(); for ( AllocationColumn pkPlacement : pkPlacements ) { diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index 67ebd6794c..9a4ffa2daf 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -151,7 +151,7 @@ protected Set> selectPlacement( LogicalTable catalogTable // Filter for placements by adapters List adapters = Catalog.snapshot().alloc().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() .stream() - .filter( elem -> elem.getValue().containsAll( usedColumns ) ) + .filter( elem -> new HashSet<>( elem.getValue() ).containsAll( usedColumns ) ) .map( Entry::getKey ) .collect( Collectors.toList() ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index b8c58c4c27..49222df3ca 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -42,6 +42,7 @@ import org.polypheny.db.adapter.csv.CsvTable.Flavor; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.IdBuilder; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; @@ -145,7 +146,7 @@ public Namespace getCurrentSchema() { @Override - public void truncate( Context context, LogicalTable table ) { + public void truncate( Context context, AllocationEntity table ) { throw new RuntimeException( "CSV adapter does not support truncate" ); } diff --git a/plugins/jdbc-adapter-framework/build.gradle b/plugins/jdbc-adapter-framework/build.gradle index 0dfd5943f3..71a39f6cd4 100644 --- a/plugins/jdbc-adapter-framework/build.gradle +++ b/plugins/jdbc-adapter-framework/build.gradle @@ -38,6 +38,7 @@ sourceSets { } } + compileJava { dependsOn(":core:processResources") dependsOn(":plugins:sql-language:processResources") diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 0f4fad98fd..33b735aac5 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -36,7 +36,7 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; -import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; @@ -118,7 +118,7 @@ public void createNewSchema( Snapshot snapshot, String name, long id ) { @Override - public void truncate( Context context, LogicalTable catalogTable ) { + public void truncate( Context context, AllocationEntity catalogTable ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 811aba1a67..fc1b9cbe1e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -34,6 +34,7 @@ import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -270,24 +271,25 @@ public void updateColumnType( Context context, AllocationColumn columnPlacement, if ( !this.dialect.supportsNestedArrays() && logicalColumn.collectionsType != null ) { return; } - for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { - StringBuilder builder = new StringBuilder(); - builder.append( "ALTER TABLE " ) - .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) - .append( "." ) - .append( dialect.quoteIdentifier( partitionPlacement.physicalTableName ) ); - //builder.append( " ALTER COLUMN " ).append( dialect.quoteIdentifier( columnPlacement.physicalColumnName ) ); - builder.append( " " ).append( getTypeString( logicalColumn.type ) ); - if ( logicalColumn.length != null ) { - builder.append( "(" ); - builder.append( logicalColumn.length ); - if ( logicalColumn.scale != null ) { - builder.append( "," ).append( logicalColumn.scale ); - } - builder.append( ")" ); + PhysicalTable physicalTable = context.getSnapshot().physical().fromAlloc( columnPlacement.tableId ).get( 0 ).unwrap( PhysicalTable.class ); + + StringBuilder builder = new StringBuilder(); + builder.append( "ALTER TABLE " ) + .append( dialect.quoteIdentifier( physicalTable.namespaceName ) ) + .append( "." ) + .append( dialect.quoteIdentifier( physicalTable.name ) ); + builder.append( " ALTER COLUMN " ).append( dialect.quoteIdentifier( physicalTable.columns.get( columnPlacement.columnId ) ) ); + builder.append( " " ).append( getTypeString( logicalColumn.type ) ); + if ( logicalColumn.length != null ) { + builder.append( "(" ); + builder.append( logicalColumn.length ); + if ( logicalColumn.scale != null ) { + builder.append( "," ).append( logicalColumn.scale ); } - executeUpdate( builder, context ); + builder.append( ")" ); } + executeUpdate( builder, context ); + } @@ -337,20 +339,18 @@ public void dropColumn( Context context, AllocationColumn columnPlacement ) { @Override - public void truncate( Context context, LogicalTable catalogTable ) { + public void truncate( Context context, AllocationEntity allocation ) { // We get the physical schema / table name by checking existing column placements of the same logical table placed on this store. // This works because there is only one physical table for each logical table on JDBC stores. The reason for choosing this // approach rather than using the default physical schema / table names is that this approach allows truncating linked tables. - for ( CatalogPartitionPlacement partitionPlacement : catalog.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( getAdapterId(), catalogTable.id ) ) { - String physicalTableName = partitionPlacement.physicalTableName; - String physicalSchemaName = partitionPlacement.physicalSchemaName; - StringBuilder builder = new StringBuilder(); - builder.append( "TRUNCATE TABLE " ) - .append( dialect.quoteIdentifier( physicalSchemaName ) ) - .append( "." ) - .append( dialect.quoteIdentifier( physicalTableName ) ); - executeUpdate( builder, context ); - } + PhysicalTable physical = context.getSnapshot().physical().fromAlloc( allocation.id ).get( 0 ).unwrap( PhysicalTable.class ); + + StringBuilder builder = new StringBuilder(); + builder.append( "TRUNCATE TABLE " ) + .append( dialect.quoteIdentifier( physical.namespaceName ) ) + .append( "." ) + .append( dialect.quoteIdentifier( physical.name ) ); + executeUpdate( builder, context ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java index 77b45b776b..e4f4a034e2 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/ListScope.java @@ -182,6 +182,7 @@ public void resolve( List names, NameMatcher nameMatcher, boolean deep, this, path, null ); + return; } // Recursively look deeper into the record-valued fields of the namespace, if it allows skipping fields. diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java index d29418d187..b193933154 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorImpl.java @@ -41,6 +41,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.function.Supplier; @@ -476,108 +477,107 @@ private boolean expandStar( List selectItems, Set aliases, List return false; } final ParserPos startPosition = identifier.getPos(); - switch ( identifier.names.size() ) { - case 1: - boolean hasDynamicStruct = false; - for ( ScopeChild child : scope.children ) { - final int before = fields.size(); - if ( child.namespace.getRowType().isDynamicStruct() ) { - hasDynamicStruct = true; - // don't expand star if the underneath table is dynamic. - // Treat this star as a special field in validation/conversion and wait until execution time to expand this star. - final SqlNode exp = - new SqlIdentifier( - ImmutableList.of( - child.name, - DynamicRecordType.DYNAMIC_STAR_PREFIX ), - startPosition ); - addToSelectList( - selectItems, - aliases, - fields, - exp, - scope, - includeSystemVars ); - } else { - final SqlNode from = child.namespace.getNode(); - final SqlValidatorNamespace fromNs = getNamespace( from, scope ); - assert fromNs != null; - final AlgDataType rowType = fromNs.getRowType(); - for ( AlgDataTypeField field : rowType.getFieldList() ) { - String columnName = field.getName(); - - // TODO: do real implicit collation here - final SqlIdentifier exp = - new SqlIdentifier( - ImmutableList.of( child.name, columnName ), - startPosition ); - // Don't add expanded rolled up columns - if ( !isRolledUpColumn( exp, scope ) ) { - addOrExpandField( - selectItems, - aliases, - fields, - includeSystemVars, - scope, - exp, - field ); - } - } - } - if ( child.nullable ) { - for ( int i = before; i < fields.size(); i++ ) { - final Map.Entry entry = fields.get( i ); - final AlgDataType type = entry.getValue(); - if ( !type.isNullable() ) { - fields.set( i, Pair.of( entry.getKey(), typeFactory.createTypeWithNullability( type, true ) ) ); - } - } - } - } - // If NATURAL JOIN or USING is present, move key fields to the front of the list, per standard SQL. Disabled if there are dynamic fields. - if ( !hasDynamicStruct || Bug.CALCITE_2400_FIXED ) { - new Permute( scope.getNode().getSqlFrom(), 0 ).permute( selectItems, fields ); - } - return true; - - default: - final SqlIdentifier prefixId = identifier.skipLast( 1 ); - final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); - final NameMatcher nameMatcher = scope.validator.nameMatcher; - scope.resolve( prefixId.names, nameMatcher, true, resolved ); - if ( resolved.count() == 0 ) { - // e.g. "select s.t.* from e" or "select r.* from e" - throw newValidationError( prefixId, RESOURCE.unknownIdentifier( prefixId.toString() ) ); - } - final AlgDataType rowType = resolved.only().rowType(); - if ( rowType.isDynamicStruct() ) { + if ( identifier.names.size() == 1 ) { + boolean hasDynamicStruct = false; + for ( ScopeChild child : scope.children ) { + final int before = fields.size(); + if ( child.namespace.getRowType().isDynamicStruct() ) { + hasDynamicStruct = true; // don't expand star if the underneath table is dynamic. + // Treat this star as a special field in validation/conversion and wait until execution time to expand this star. + final SqlNode exp = + new SqlIdentifier( + ImmutableList.of( + child.name, + DynamicRecordType.DYNAMIC_STAR_PREFIX ), + startPosition ); addToSelectList( selectItems, aliases, fields, - prefixId.plus( DynamicRecordType.DYNAMIC_STAR_PREFIX, startPosition ), + exp, scope, includeSystemVars ); - } else if ( rowType.isStruct() ) { + } else { + final SqlNode from = child.namespace.getNode(); + final SqlValidatorNamespace fromNs = getNamespace( from, scope ); + assert fromNs != null; + final AlgDataType rowType = fromNs.getRowType(); for ( AlgDataTypeField field : rowType.getFieldList() ) { String columnName = field.getName(); // TODO: do real implicit collation here - addOrExpandField( - selectItems, - aliases, - fields, - includeSystemVars, - scope, - prefixId.plus( columnName, startPosition ), - field ); + final SqlIdentifier exp = + new SqlIdentifier( + ImmutableList.of( child.name, columnName ), + startPosition ); + // Don't add expanded rolled up columns + if ( !isRolledUpColumn( exp, scope ) ) { + addOrExpandField( + selectItems, + aliases, + fields, + includeSystemVars, + scope, + exp, + field ); + } } - } else { - throw newValidationError( prefixId, RESOURCE.starRequiresRecordType() ); } - return true; + if ( child.nullable ) { + for ( int i = before; i < fields.size(); i++ ) { + final Entry entry = fields.get( i ); + final AlgDataType type = entry.getValue(); + if ( !type.isNullable() ) { + fields.set( i, Pair.of( entry.getKey(), typeFactory.createTypeWithNullability( type, true ) ) ); + } + } + } + } + // If NATURAL JOIN or USING is present, move key fields to the front of the list, per standard SQL. Disabled if there are dynamic fields. + if ( !hasDynamicStruct || Bug.CALCITE_2400_FIXED ) { + new Permute( scope.getNode().getSqlFrom(), 0 ).permute( selectItems, fields ); + } + return true; + } + + final SqlIdentifier prefixId = identifier.skipLast( 1 ); + final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); + final NameMatcher nameMatcher = scope.validator.nameMatcher; + scope.resolve( prefixId.names, nameMatcher, true, resolved ); + if ( resolved.count() == 0 ) { + // e.g. "select s.t.* from e" or "select r.* from e" + throw newValidationError( prefixId, RESOURCE.unknownIdentifier( prefixId.toString() ) ); + } + final AlgDataType rowType = resolved.only().rowType(); + if ( rowType.isDynamicStruct() ) { + // don't expand star if the underneath table is dynamic. + addToSelectList( + selectItems, + aliases, + fields, + prefixId.plus( DynamicRecordType.DYNAMIC_STAR_PREFIX, startPosition ), + scope, + includeSystemVars ); + } else if ( rowType.isStruct() ) { + for ( AlgDataTypeField field : rowType.getFieldList() ) { + String columnName = field.getName(); + + // TODO: do real implicit collation here + addOrExpandField( + selectItems, + aliases, + fields, + includeSystemVars, + scope, + prefixId.plus( columnName, startPosition ), + field ); + } + } else { + throw newValidationError( prefixId, RESOURCE.starRequiresRecordType() ); } + return true; + } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 3e6ab447b6..de6e788e53 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1905,7 +1905,7 @@ private Placement getPlacements( final Index index ) { long pkid = table.primaryKey; List pkColumnIds = snapshot.rel().getPrimaryKey( pkid ).columnIds; LogicalColumn pkColumn = snapshot.rel().getColumn( pkColumnIds.get( 0 ) ); - List pkPlacements = snapshot.alloc().getColumnPlacements( pkColumn.id ); + List pkPlacements = snapshot.alloc().getColumnFromLogical( pkColumn.id ); for ( AllocationColumn placement : pkPlacements ) { Adapter adapter = AdapterManager.getInstance().getAdapter( placement.adapterId ); PartitionProperty property = snapshot.alloc().getPartitionProperty( table.id ); From 15aecc363c2e586ffb5e3a0a6ef2ff4146a867fd Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 16 Apr 2023 14:56:43 +0200 Subject: [PATCH 067/436] add and remove column fix for ddltests --- .../db/adapter/java/ReflectiveSchema.java | 3 +- .../catalogs/AllocationRelationalCatalog.java | 3 +- .../catalogs/LogicalRelationalCatalog.java | 6 --- .../entity/LogicalMaterializedView.java | 3 -- .../db/catalog/entity/LogicalView.java | 5 +-- .../catalog/entity/logical/LogicalTable.java | 9 +--- .../catalog/snapshot/LogicalRelSnapshot.java | 3 ++ .../snapshot/impl/AllocSnapshotImpl.java | 17 +++---- .../snapshot/impl/LogicalRelSnapshotImpl.java | 42 +++++++++++++++++ .../db/catalog/MockCatalogReader.java | 6 +-- .../db/schemas/HrClusteredSchema.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 45 +++++++++---------- .../routers/FullPlacementQueryRouter.java | 15 ++++--- .../db/view/MaterializedViewManagerImpl.java | 9 ++-- .../jdbc/stores/AbstractJdbcStore.java | 19 ++++---- .../allocation/PolyAllocRelCatalog.java | 3 +- .../db/catalog/logical/RelationalCatalog.java | 23 +--------- .../validate/IdentifierNamespace.java | 4 +- 18 files changed, 113 insertions(+), 104 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java index ab74d566c8..21873c8d88 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/ReflectiveSchema.java @@ -34,7 +34,6 @@ package org.polypheny.db.adapter.java; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.Multimap; @@ -247,7 +246,7 @@ private static class ReflectiveEntity extends LogicalTable implements ScannableE ReflectiveEntity( Type elementType, Enumerable enumerable, Long id, Long partitionId, Long adapterId ) { - super( id, "test", -1, EntityType.ENTITY, null, false, ImmutableList.of() ); + super( id, "test", -1, EntityType.ENTITY, null, false ); this.elementType = elementType; this.enumerable = enumerable; throw new NotImplementedException(); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java index d7da90c632..6c68ed7429 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationRelationalCatalog.java @@ -45,9 +45,8 @@ public interface AllocationRelationalCatalog extends AllocationCatalog { * * @param allocationId The id of the adapter * @param columnId The id of the column - * @param columnOnly columnOnly If delete originates from a dropColumn */ - void deleteColumn( long allocationId, long columnId, boolean columnOnly ); + void deleteColumn( long allocationId, long columnId ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 0ebfe5ad19..f750dc5cd8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -240,12 +240,6 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { void deleteConstraint( long constraintId ); - /** - * Deletes all the dependencies of a view. This is used when deleting a view. - * - * @param catalogView view for which to delete its dependencies - */ - void deleteViewDependencies( LogicalView catalogView ); /** * Updates the last time a materialized view has been refreshed. diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java index c96efa3dd2..687647bada 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java @@ -50,9 +50,7 @@ public LogicalMaterializedView( @Deserialize("name") String name, @Deserialize("namespaceId") long namespaceId, @Deserialize("entityType") String query, - @Deserialize("primaryKey") Long primaryKey, @Deserialize("algCollation") AlgCollation algCollation, - @Deserialize("connectedViews") List connectedViews, @Deserialize("underlyingTables") Map> underlyingTables, @Deserialize("language") QueryLanguage language, @Deserialize("materializedCriteria") MaterializedCriteria materializedCriteria, @@ -66,7 +64,6 @@ public LogicalMaterializedView( query, algCollation, underlyingTables, - connectedViews, language ); Map> map = new HashMap<>(); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java index ac591bf232..e12a8d1e81 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java @@ -17,7 +17,6 @@ package org.polypheny.db.catalog.entity; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; @@ -66,7 +65,6 @@ public LogicalView( @Deserialize("query") String query, @Deserialize("algCollation") AlgCollation algCollation, @Deserialize("underlyingTables") Map> underlyingTables, - @Deserialize("connectedViews") List connectedViews, @Deserialize("language") QueryLanguage language ) { super( id, @@ -74,8 +72,7 @@ public LogicalView( namespaceId, entityType, null, - false, - ImmutableList.copyOf( connectedViews ) ); + false ); this.query = query; this.algCollation = algCollation; this.underlyingTables = ImmutableMap.copyOf( underlyingTables ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index fbd762806b..701cd5093d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -17,7 +17,6 @@ package org.polypheny.db.catalog.entity.logical; -import com.google.common.collect.ImmutableList; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.io.Serializable; @@ -25,7 +24,6 @@ import java.util.List; import java.util.stream.Collectors; import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.experimental.NonFinal; @@ -55,9 +53,6 @@ public class LogicalTable extends LogicalEntity implements Comparable connectedViews; public LogicalTable( @@ -66,13 +61,11 @@ public LogicalTable( @Deserialize("namespaceId") final long namespaceId, @Deserialize("entityType") @NonNull final EntityType type, @Deserialize("primaryKey") final Long primaryKey, - @Deserialize("modifiable") boolean modifiable, - @Deserialize("connectedViews") ImmutableList connectedViews ) { + @Deserialize("modifiable") boolean modifiable ) { super( id, name, namespaceId, type, NamespaceType.RELATIONAL ); this.primaryKey = primaryKey; this.modifiable = modifiable; - this.connectedViews = connectedViews; if ( type == EntityType.ENTITY && !modifiable ) { throw new RuntimeException( "Tables of table type TABLE must be modifiable!" ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index b873089ed3..96b2fbfd6c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -25,6 +25,7 @@ import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.Pattern; @@ -281,4 +282,6 @@ public interface LogicalRelSnapshot { AlgNode getNodeInfo( long id ); + List getConnectedViews( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java index dcdddda666..409afffa68 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/AllocSnapshotImpl.java @@ -70,7 +70,7 @@ public class AllocSnapshotImpl implements AllocSnapshot { ImmutableMap, List> adapterLogicalTablePlacements; ImmutableMap, AllocationEntity> adapterLogicalTableAlloc; ImmutableMap> logicalAllocs; - ImmutableMap>> tableAdapterColumns; + ImmutableMap>> logicalTableAdapterColumns; public AllocSnapshotImpl( Map allocationCatalogs ) { @@ -109,7 +109,7 @@ public AllocSnapshotImpl( Map allocationCatalogs ) { this.allocColumns = buildAllocColumns(); this.logicalAllocs = buildLogicalAllocs(); - this.tableAdapterColumns = buildTableAdapterColumns(); + this.logicalTableAdapterColumns = buildTableAdapterColumns(); } @@ -122,13 +122,14 @@ private ImmutableMap, AllocationColumn> buildColumns( List>> buildTableAdapterColumns() { Map>> map = new HashMap<>(); for ( AllocationColumn column : this.columns.values() ) { - if ( !map.containsKey( column.tableId ) ) { - map.put( column.tableId, new HashMap<>() ); + AllocationTable table = tables.get( column.tableId ); + if ( !map.containsKey( table.logicalId ) ) { + map.put( table.logicalId, new HashMap<>() ); } - if ( !map.get( column.tableId ).containsKey( column.adapterId ) ) { - map.get( column.tableId ).put( column.adapterId, new ArrayList<>() ); + if ( !map.get( table.logicalId ).containsKey( column.adapterId ) ) { + map.get( table.logicalId ).put( column.adapterId, new ArrayList<>() ); } - map.get( column.tableId ).get( column.adapterId ).add( column.columnId ); + map.get( table.logicalId ).get( column.adapterId ).add( column.columnId ); } return ImmutableMap.copyOf( map ); @@ -291,7 +292,7 @@ public List getColumnPlacementsByColumn( long columnId ) { @Override public Map> getColumnPlacementsByAdapter( long tableId ) { - return tableAdapterColumns.get( tableId ); + return logicalTableAdapterColumns.get( tableId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index 2ece2fa74e..fd56f31ca6 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -35,6 +35,7 @@ import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.Pattern; @@ -50,6 +51,8 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap tables; + ImmutableMap views; + ImmutableMap, LogicalTable> tableNames; ImmutableMap> tableColumns; @@ -78,6 +81,7 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap> tableConstraints; ImmutableMap> tableForeignKeys; ImmutableMap nodes; + ImmutableMap> connectedViews; public LogicalRelSnapshotImpl( Map catalogs ) { @@ -158,6 +162,38 @@ public LogicalRelSnapshotImpl( Map catalogs ) { /// ALGNODES e.g. views and materializedViews this.nodes = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getNodes().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); + + this.views = ImmutableMap.copyOf( tables + .values() + .stream() + .filter( t -> t.unwrap( LogicalView.class ) != null ) + .map( t -> t.unwrap( LogicalView.class ) ) + .collect( Collectors.toMap( e -> e.id, e -> e ) ) ); + + this.connectedViews = buildConnectedViews(); + + } + + + private ImmutableMap> buildConnectedViews() { + Map> map = new HashMap<>(); + + for ( LogicalView view : this.views.values() ) { + for ( long entityId : view.underlyingTables.keySet() ) { + if ( !map.containsKey( entityId ) ) { + map.put( entityId, new ArrayList<>() ); + } + map.get( entityId ).add( view ); + } + } + // add tables which are not connected + for ( long id : this.tables.keySet() ) { + if ( !map.containsKey( id ) ) { + map.put( id, new ArrayList<>() ); + } + } + + return ImmutableMap.copyOf( map ); } @@ -389,4 +425,10 @@ public AlgNode getNodeInfo( long id ) { return nodes.get( id ); } + + @Override + public List getConnectedViews( long id ) { + return connectedViews.get( id ); + } + } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java index 01796caeca..2f21440bc0 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalogReader.java @@ -280,7 +280,7 @@ public void registerRolledUpColumn( String columnName ) { private MockEntity( MockCatalogReader catalogReader, List names, boolean stream, double rowCount, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { - super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true, ImmutableList.of() ); + super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true ); this.catalogReader = catalogReader; this.stream = stream; this.rowCount = rowCount; @@ -296,7 +296,7 @@ private MockEntity( MockCatalogReader catalogReader, List names, boolean protected MockEntity( MockCatalogReader catalogReader, boolean stream, double rowCount, List> columnList, List keyList, AlgDataType rowType, List collationList, List names, Set monotonicColumnSet, StructKind kind, ColumnResolver resolver, InitializerExpressionFactory initializerFactory ) { - super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true, ImmutableList.of() ); + super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true ); this.catalogReader = catalogReader; this.stream = stream; this.rowCount = rowCount; @@ -318,7 +318,7 @@ protected MockEntity( private class ModifiableEntity extends LogicalTable implements Wrapper { protected ModifiableEntity( String tableName ) { - super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true, ImmutableList.of() ); + super( -1, Util.last( names ), -1, EntityType.ENTITY, null, true ); } diff --git a/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java b/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java index 6317ddbcbc..faa1330a87 100644 --- a/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java +++ b/core/src/test/java/org/polypheny/db/schemas/HrClusteredSchema.java @@ -122,7 +122,7 @@ private static class PkClusteredEntity extends LogicalTable implements Scannable PkClusteredEntity( Function dataTypeBuilder, ImmutableBitSet pkColumns, List data ) { - super( -1, "", -1, EntityType.ENTITY, null, false, ImmutableList.of() ); + super( -1, "", -1, EntityType.ENTITY, null, false ); this.data = data; this.typeBuilder = dataTypeBuilder; this.pkColumns = pkColumns; diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 75bbd406f2..4cb0c5fb77 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -130,13 +130,16 @@ private void checkIfDdlPossible( EntityType entityType ) { private void checkViewDependencies( LogicalTable catalogTable ) { - if ( catalogTable.connectedViews.size() > 0 ) { - List views = new ArrayList<>(); - for ( Long id : catalogTable.connectedViews ) { - views.add( catalog.getSnapshot().rel().getTable( id ).name ); - } - throw new GenericRuntimeException( "Cannot alter table because of underlying views: %s ", views.stream().map( String::valueOf ).collect( Collectors.joining( (", ") ) ) ); + List entities = catalog.getSnapshot().rel().getConnectedViews( catalogTable.id ); + if ( entities.isEmpty() ) { + return; } + List views = new ArrayList<>(); + for ( LogicalView view : entities ) { + views.add( view.name ); + } + throw new GenericRuntimeException( "Cannot alter table because of underlying views: %s ", views.stream().map( String::valueOf ).collect( Collectors.joining( (", ") ) ) ); + } @@ -341,7 +344,7 @@ public void dropAdapter( String name, Statement statement ) { // Delete column placement in catalog for ( LogicalColumn column : catalog.getSnapshot().rel().getColumns( tableId ) ) { if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( catalogAdapter.id, column.id ) ) { - catalog.getAllocRel( defaultNamespaceId ).deleteColumn( entity.id, column.id, false ); + catalog.getAllocRel( defaultNamespaceId ).deleteColumn( entity.id, column.id ); } } @@ -933,7 +936,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement // check if model permits operation checkModelLogic( catalogTable, columnName ); - //check if views are dependent from this view + //check if views are dependent from this table checkViewDependencies( catalogTable ); LogicalColumn column = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, columnName ); @@ -956,21 +959,12 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement } } - // Delete column from underlying data stores - /*for ( CatalogColumnPlacement dp : catalog.getAllocRel( catalogTable.namespaceId ).getColumnPlacementsByColumn( column.id ) ) { + for ( AllocationColumn allocationColumn : catalog.getSnapshot().alloc().getColumnFromLogical( column.id ) ) { if ( catalogTable.entityType == EntityType.ENTITY ) { - AdapterManager.getInstance().getStore( dp.adapterId ).dropColumn( statement.getPrepareContext(), dp ); - } - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( dp.adapterId, dp.columnId, true ); - }*/ - for ( AllocationEntity table : catalog.getSnapshot().alloc().getFromLogical( catalogTable.id ) ) { - for ( AllocationColumn placement : catalog.getSnapshot().alloc().getColumns( table.id ) ) { - if ( catalogTable.entityType == EntityType.ENTITY ) { - AdapterManager.getInstance().getStore( table.adapterId ).dropColumn( statement.getPrepareContext(), placement ); - } - AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( table.adapterId, catalogTable.id ); - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, placement.columnId, true ); + AdapterManager.getInstance().getStore( allocationColumn.adapterId ).dropColumn( statement.getPrepareContext(), allocationColumn ); } + AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( allocationColumn.adapterId, catalogTable.id ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, allocationColumn.columnId ); } // Delete from catalog @@ -1292,7 +1286,7 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumn( storeInstance.getAdapterId(), columnId ) ); // Drop column placement AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, columnId, true ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, columnId ); } List tempPartitionGroupList = new ArrayList<>(); @@ -1565,7 +1559,7 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, @ storeInstance.dropColumn( statement.getPrepareContext(), catalog.getSnapshot().alloc().getColumn( storeInstance.getAdapterId(), logicalColumn.id ) ); AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( storeInstance.getAdapterId(), catalogTable.id ); // Drop column placement - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, logicalColumn.id, false ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, logicalColumn.id ); // Reset query plan cache, implementation cache & routing cache statement.getQueryProcessor().resetCaches(); @@ -2966,7 +2960,7 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) { for ( LogicalColumn column : columns ) { if ( catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( placement.adapterId, column.id ) ) { AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( placement.getAdapterId(), catalogTable.id ); - catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, column.id, false ); + catalog.getAllocRel( catalogTable.namespaceId ).deleteColumn( allocation.id, column.id ); } } } @@ -3029,6 +3023,9 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) { for ( PhysicalEntity physical : snapshot.physical().fromAlloc( allocation.id ) ) { catalog.getPhysical( catalogTable.namespaceId ).deleteEntity( physical.id ); } + for ( long columnId : allocation.unwrap( AllocationTable.class ).getColumns().keySet() ) { + catalog.getAllocRel( allocation.namespaceId ).deleteColumn( allocation.id, columnId ); + } catalog.getAllocRel( allocation.namespaceId ).deleteAllocation( allocation.id ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index 9a4ffa2daf..2277ebfedf 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -21,7 +21,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; @@ -29,6 +28,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.partition.PartitionManager; @@ -149,16 +149,21 @@ protected Set> selectPlacement( LogicalTable catalogTable List usedColumns = queryInformation.getAllColumnsPerTable( catalogTable.id ); // Filter for placements by adapters - List adapters = Catalog.snapshot().alloc().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() + List allocs = Catalog.snapshot().alloc().getFromLogical( catalogTable.id ).stream() + .map( a -> a.unwrap( AllocationTable.class ) ) + .filter( a -> new HashSet<>( a.getColumns().values().stream().map( AllocationColumn::getColumnId ).collect( Collectors.toList() ) ).containsAll( usedColumns ) ) + .collect( Collectors.toList() ); + + /*List adapters = Catalog.snapshot().alloc().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() .stream() .filter( elem -> new HashSet<>( elem.getValue() ).containsAll( usedColumns ) ) .map( Entry::getKey ) - .collect( Collectors.toList() ); + .collect( Collectors.toList() );*/ final Set> result = new HashSet<>(); - for ( long adapterId : adapters ) { + for ( AllocationEntity alloc : allocs ) { List placements = usedColumns.stream() - .map( colId -> Catalog.snapshot().alloc().getColumn( adapterId, colId ) ) + .map( colId -> alloc.unwrap( AllocationTable.class ).getColumns().get( colId ) ) .collect( Collectors.toList() ); if ( !placements.isEmpty() ) { diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 521485a54b..8f7b7a628b 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -27,7 +27,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -44,6 +43,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.LogicalMaterializedView; +import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; @@ -210,11 +210,10 @@ public void updateCommittedXid( PolyXid xid ) { */ public void materializedUpdate( long potentialInteresting ) { Snapshot snapshot = Catalog.getInstance().getSnapshot(); - LogicalTable catalogTable = snapshot.getNamespaces( null ).stream().map( n -> snapshot.rel().getTable( potentialInteresting ) ).filter( Objects::nonNull ).findFirst().orElse( null ); - List connectedViews = catalogTable.getConnectedViews(); + //LogicalTable catalogTable = snapshot.getNamespaces( null ).stream().map( n -> snapshot.rel().getTable( potentialInteresting ) ).filter( Objects::nonNull ).findFirst().orElse( null ); + List connectedViews = snapshot.rel().getConnectedViews( potentialInteresting ); - for ( long id : connectedViews ) { - LogicalTable view = snapshot.rel().getTable( id ); + for ( LogicalView view : connectedViews ) { if ( view.entityType == EntityType.MATERIALIZED_VIEW ) { MaterializedCriteria materializedCriteria = materializedInfo.get( view.id ); if ( materializedCriteria.getCriteriaType() == CriteriaType.UPDATE ) { diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index fc1b9cbe1e..ba41a8c6f4 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -326,15 +326,16 @@ public void dropTable( Context context, LogicalTable catalogTable, List pa @Override public void dropColumn( Context context, AllocationColumn columnPlacement ) { - for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().alloc().getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { - StringBuilder builder = new StringBuilder(); - builder.append( "ALTER TABLE " ) - .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) - .append( "." ) - .append( dialect.quoteIdentifier( partitionPlacement.physicalTableName ) ); - //builder.append( " DROP " ).append( dialect.quoteIdentifier( columnPlacement.physicalColumnName ) ); - executeUpdate( builder, context ); - } + //for ( CatalogPartitionPlacement partitionPlacement : context.getSnapshot().alloc().getAllocation( columnPlacement.tableId ) ) { + PhysicalTable physical = context.getSnapshot().physical().fromAlloc( columnPlacement.tableId ).get( 0 ).unwrap( PhysicalTable.class ); + StringBuilder builder = new StringBuilder(); + builder.append( "ALTER TABLE " ) + .append( dialect.quoteIdentifier( physical.namespaceName ) ) + .append( "." ) + .append( dialect.quoteIdentifier( physical.name ) ); + builder.append( " DROP " ).append( dialect.quoteIdentifier( physical.columns.get( columnPlacement.columnId ) ) ); + executeUpdate( builder, context ); + //} } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index f9d0e9ec55..275c155737 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -89,7 +89,7 @@ public AllocationColumn addColumn( long allocationId, long columnId, PlacementTy @Override - public void deleteColumn( long allocationId, long columnId, boolean columnOnly ) { + public void deleteColumn( long allocationId, long columnId ) { allocColumns.remove( Pair.of( allocationId, columnId ) ); } @@ -185,6 +185,7 @@ public void deleteAllocation( long adapterId, long tableId ) { @Override public void deleteAllocation( long allocId ) { tables.remove( allocId ); + } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index f7a5f32355..abf79810b6 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -16,7 +16,6 @@ package org.polypheny.db.catalog.logical; -import com.google.common.collect.ImmutableList; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; @@ -161,7 +160,7 @@ public LogicalCatalog withLogicalNamespace( LogicalNamespace namespace ) { @Override public LogicalTable addTable( String name, EntityType entityType, boolean modifiable ) { long id = idBuilder.getNewEntityId(); - LogicalTable table = new LogicalTable( id, name, logicalNamespace.id, entityType, null, modifiable, null ); + LogicalTable table = new LogicalTable( id, name, logicalNamespace.id, entityType, null, modifiable ); tables.put( id, table ); return table; } @@ -171,7 +170,7 @@ public LogicalTable addTable( String name, EntityType entityType, boolean modifi public LogicalView addView( String name, long namespaceId, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, List connectedViews, AlgDataType fieldList, String query, QueryLanguage language ) { long id = idBuilder.getNewEntityId(); - LogicalView view = new LogicalView( id, name, namespaceId, EntityType.VIEW, query, algCollation, underlyingTables, connectedViews, language ); + LogicalView view = new LogicalView( id, name, namespaceId, EntityType.VIEW, query, algCollation, underlyingTables, language ); tables.put( id, view ); nodes.put( id, definition ); @@ -189,9 +188,7 @@ public LogicalMaterializedView addMaterializedView( final String name, long name name, namespaceId, query, - null, algCollation, - ImmutableList.of(), underlyingTables, language, materializedCriteria, @@ -545,22 +542,6 @@ public void deleteConstraint( long constraintId ) { } - @Override - public void deleteViewDependencies( LogicalView logicalView ) { - for ( long id : logicalView.underlyingTables.keySet() ) { - LogicalTable old = tables.get( id ); - List connectedViews = old.connectedViews.stream().filter( e -> e != logicalView.id ).collect( Collectors.toList() ); - - LogicalTable table = old.toBuilder().connectedViews( ImmutableList.copyOf( connectedViews ) ).build(); - - synchronized ( this ) { - tables.put( id, table ); - } - listeners.firePropertyChange( "table", old, table ); - } - } - - @Override public void updateMaterializedViewRefreshTime( long materializedViewId ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java index 8cf36c35c4..1717431cc3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/IdentifierNamespace.java @@ -152,9 +152,9 @@ private SqlValidatorNamespace resolveImpl( SqlIdentifier id ) { } List ns = id.names; if ( ns.size() == 1 ) { - return new EntityNamespace( validator, Catalog.getInstance().getSnapshot().rel().getTable( Catalog.defaultNamespaceId, ns.get( 0 ) ) ); + return new EntityNamespace( validator, validator.snapshot.rel().getTable( Catalog.defaultNamespaceId, ns.get( 0 ) ) ); } else if ( ns.size() == 2 ) { - return new EntityNamespace( validator, Catalog.getInstance().getSnapshot().rel().getTable( ns.get( 0 ), ns.get( 1 ) ) ); + return new EntityNamespace( validator, validator.snapshot.rel().getTable( ns.get( 0 ), ns.get( 1 ) ) ); } else { throw new RuntimeException(); } From 282e8b56c7b1f8d4b6c69f320e3ee59ee5065968 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 16 Apr 2023 17:31:14 +0200 Subject: [PATCH 068/436] fixed ddl test --- .../db/catalog/snapshot/LogicalDocSnapshot.java | 5 ++--- .../db/catalog/snapshot/LogicalGraphSnapshot.java | 6 ++---- .../snapshot/impl/LogicalDocSnapshotImpl.java | 5 ++++- .../snapshot/impl/LogicalGraphSnapshotImpl.java | 4 ---- .../snapshot/impl/PhysicalSnapshotImpl.java | 5 ++++- .../db/catalog/snapshot/impl/SnapshotImpl.java | 2 +- .../java/org/polypheny/db/ddl/DdlManagerImpl.java | 11 ++++++++--- .../db/adapter/jdbc/stores/AbstractJdbcStore.java | 2 +- .../org/polypheny/db/sql/language/SqlDdl.java | 15 ++++++--------- .../db/sql/language/ddl/SqlDropTable.java | 10 ++++------ .../ddl/altertable/SqlAlterTableAddColumn.java | 5 +++-- .../ddl/altertable/SqlAlterTableAddPlacement.java | 2 +- .../altertable/SqlAlterTableModifyPlacement.java | 4 ++-- 13 files changed, 38 insertions(+), 38 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java index 224b56c1a3..360d60478f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalDocSnapshot.java @@ -17,7 +17,6 @@ package org.polypheny.db.catalog.snapshot; import java.util.List; -import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.Pattern; @@ -30,7 +29,7 @@ public interface LogicalDocSnapshot { * @param collectionId The id of the collection * @return The requested collection */ - public abstract LogicalCollection getCollection( long collectionId ); + LogicalCollection getCollection( long collectionId ); /** * Get a collection of collections which match the given naming pattern. @@ -39,7 +38,7 @@ public interface LogicalDocSnapshot { * @param namePattern The naming pattern of the collection itself, null if all are matched * @return collection of collections matching conditions */ - public abstract List getCollections( long namespaceId, Pattern namePattern ); + List getCollections( long namespaceId, Pattern namePattern ); @Deprecated diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java index a993e8d637..24f0525e92 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalGraphSnapshot.java @@ -30,8 +30,7 @@ public interface LogicalGraphSnapshot { * @param id The id of the graph to return * @return The graph entity with the provided id */ - public abstract LogicalGraph getGraph( long id ); - + LogicalGraph getGraph( long id ); /** @@ -40,13 +39,12 @@ public interface LogicalGraphSnapshot { * @param graphName The pattern to which the name has to match, null if every name is matched * @return A collection of all graphs matching */ - public abstract List getGraphs( Pattern graphName ); + List getGraphs( Pattern graphName ); @Deprecated LogicalGraph getLogicalGraph( List names ); - LogicalGraph getLogicalGraph( long id ); LogicalGraph getLogicalGraph( long namespaceId, String name ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java index de9f89134d..8d7181fce5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java @@ -18,11 +18,13 @@ import java.util.List; import java.util.Map; +import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; +@Slf4j public class LogicalDocSnapshotImpl implements LogicalDocSnapshot { public LogicalDocSnapshotImpl( Map value ) { @@ -38,7 +40,8 @@ public LogicalCollection getCollection( long collectionId ) { @Override public List getCollections( long namespaceId, Pattern namePattern ) { - return null; + log.warn( "fix" ); + return List.of(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java index d3f4ecd147..b2dab02218 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalGraphSnapshotImpl.java @@ -47,10 +47,6 @@ public LogicalGraph getLogicalGraph( List names ) { } - @Override - public LogicalGraph getLogicalGraph( long id ) { - return null; - } @Override diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java index 24ce67202f..5d5ccfab94 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/PhysicalSnapshotImpl.java @@ -115,7 +115,10 @@ public PhysicalTable getPhysicalTable( long id ) { @Override public PhysicalTable getPhysicalTable( long logicalId, long adapterId ) { - return adapterLogicalEntity.get( Pair.of( adapterId, logicalId ) ).unwrap( PhysicalTable.class ); + if ( adapterLogicalEntity.get( Pair.of( adapterId, logicalId ) ) != null ) { + return adapterLogicalEntity.get( Pair.of( adapterId, logicalId ) ).unwrap( PhysicalTable.class ); + } + return null; } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index d001210029..5d693528e5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -205,7 +205,7 @@ public LogicalEntity getLogicalEntity( long id ) { return entity; } - return graph.getLogicalGraph( id ); + return graph.getGraph( id ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 4cb0c5fb77..ee1de0c7c9 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -2210,8 +2210,11 @@ public void createCollection( long namespaceId, String name, boolean ifNotExists private boolean assertEntityExists( long namespaceId, String name, boolean ifNotExists ) { + Snapshot snapshot = catalog.getSnapshot(); // Check if there is already an entity with this name - if ( catalog.getSnapshot().rel().checkIfExistsEntity( name ) ) { + if ( snapshot.rel().getTable( namespaceId, name ) != null + || snapshot.doc().getCollection( namespaceId, name ) != null + || snapshot.graph().getGraph( namespaceId ) != null ) { if ( ifNotExists ) { // It is ok that there is already a table with this name because "IF NOT EXISTS" was specified return true; @@ -2829,7 +2832,7 @@ public void dropNamespace( String namespaceName, boolean ifExists, Statement sta LogicalNamespace logicalNamespace = catalog.getSnapshot().getNamespace( namespaceName ); // Drop all collections in this namespace - List collections = catalog.getSnapshot().doc().getLogicalCollections( logicalNamespace.id, null ); + List collections = catalog.getSnapshot().doc().getCollections( logicalNamespace.id, null ); for ( LogicalCollection collection : collections ) { dropCollection( collection, statement ); } @@ -3030,7 +3033,9 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) { } // delete logical - + for ( long columnId : catalogTable.getColumnIds() ) { + catalog.getLogicalRel( catalogTable.namespaceId ).deleteColumn( columnId ); + } catalog.getLogicalRel( catalogTable.namespaceId ).deleteTable( catalogTable.id ); catalog.updateSnapshot(); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index ba41a8c6f4..0cb1dd9f9e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -170,7 +170,7 @@ protected StringBuilder buildCreateTableQuery( String schemaName, String physica @Override public void addColumn( Context context, AllocationTable catalogTable, LogicalColumn logicalColumn ) { String physicalColumnName = getPhysicalColumnName( logicalColumn.id ); - PhysicalTable physicalTable = context.getSnapshot().physical().getPhysicalTable( catalogTable.id, this.getAdapterId() ); + PhysicalTable physicalTable = context.getSnapshot().physical().fromAlloc( catalogTable.id ).get( 0 ).unwrap( PhysicalTable.class ); String physicalTableName = physicalTable.name; String physicalSchemaName = physicalTable.namespaceName; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 36458188ac..6a4c8c7727 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -25,10 +25,8 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.Operator; import org.polypheny.db.prepare.Context; @@ -46,7 +44,6 @@ public abstract class SqlDdl extends SqlCall { protected static final SqlOperator DDL_OPERATOR = new SqlSpecialOperator( "DDL", Kind.OTHER_DDL ); private final SqlOperator operator; - private final Snapshot snapshot = Catalog.getInstance().getSnapshot(); /** @@ -68,21 +65,21 @@ protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName long schemaId; String tableOldName; if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = snapshot.getNamespace( tableName.names.get( 1 ) ).id; + schemaId = context.getSnapshot().getNamespace( tableName.names.get( 1 ) ).id; tableOldName = tableName.names.get( 2 ); } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = snapshot.getNamespace( tableName.names.get( 0 ) ).id; + schemaId = context.getSnapshot().getNamespace( tableName.names.get( 0 ) ).id; tableOldName = tableName.names.get( 1 ); } else { // TableName - schemaId = snapshot.getNamespace( context.getDefaultSchemaName() ).id; + schemaId = context.getSnapshot().getNamespace( context.getDefaultSchemaName() ).id; tableOldName = tableName.names.get( 0 ); } - return snapshot.rel().getTable( schemaId, tableOldName ); + return context.getSnapshot().rel().getTable( schemaId, tableOldName ); } - protected LogicalColumn getCatalogColumn( long namespaceId, long tableId, SqlIdentifier columnName ) { - return snapshot.rel().getColumn( tableId, columnName.getSimple() ); + protected LogicalColumn getCatalogColumn( Context context, long tableId, SqlIdentifier columnName ) { + return context.getSnapshot().rel().getColumn( tableId, columnName.getSimple() ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java index ccb5e8bae4..afb93fe758 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java @@ -19,11 +19,11 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.prepare.Context; -import org.polypheny.db.runtime.PolyphenyDbContextException; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlOperator; import org.polypheny.db.sql.language.SqlSpecialOperator; @@ -48,16 +48,14 @@ public class SqlDropTable extends SqlDropObject { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - final LogicalTable table; + final LogicalTable table = getCatalogTable( context, name ); - try { - table = getCatalogTable( context, name ); - } catch ( PolyphenyDbContextException e ) { + if ( table == null ) { if ( ifExists ) { // It is ok that there is no database / schema / table with this name because "IF EXISTS" was specified return; } else { - throw e; + throw new GenericRuntimeException( "There exists no table with the name %s and 'IF EXISTS' was not used", name ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index 9f3e220b7d..249e04a3b3 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -22,6 +22,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; @@ -118,11 +119,11 @@ public void execute( Context context, Statement statement, QueryParameters param LogicalTable catalogTable = getCatalogTable( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { - throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); + throw new GenericRuntimeException( "Not possible to use ALTER TABLE because %s is not a table.", catalogTable.name ); } if ( column.names.size() != 1 ) { - throw new RuntimeException( "No FQDN allowed here: " + column.toString() ); + throw new GenericRuntimeException( "No FQDN allowed here: %s", column ); } // Make sure that all adapters are of type store (and not source) diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index 924358f696..33168f2ed2 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -127,7 +127,7 @@ public void execute( Context context, Statement statement, QueryParameters param List columnIds = new LinkedList<>(); for ( SqlNode node : columnList.getSqlList() ) { - LogicalColumn logicalColumn = getCatalogColumn( catalogTable.namespaceId, catalogTable.id, (SqlIdentifier) node ); + LogicalColumn logicalColumn = getCatalogColumn( context, catalogTable.id, (SqlIdentifier) node ); columnIds.add( logicalColumn.id ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java index 0fef9378f4..fb3ec7018d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java @@ -123,14 +123,14 @@ public void execute( Context context, Statement statement, QueryParameters param // Check if all columns exist for ( SqlNode node : columnList.getSqlList() ) { - getCatalogColumn( catalogTable.namespaceId, catalogTable.id, (SqlIdentifier) node ); + getCatalogColumn( context, catalogTable.id, (SqlIdentifier) node ); } DataStore storeInstance = getDataStoreInstance( storeName ); DdlManager.getInstance().modifyDataPlacement( catalogTable, columnList.getList().stream() - .map( c -> getCatalogColumn( catalogTable.namespaceId, catalogTable.id, (SqlIdentifier) c ).id ) + .map( c -> getCatalogColumn( context, catalogTable.id, (SqlIdentifier) c ).id ) .collect( Collectors.toList() ), partitionGroupList, partitionGroupNamesList.stream() From fb3ebc9d90fa649d6408a83c568cb713803fce3b Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 16 Apr 2023 22:50:24 +0200 Subject: [PATCH 069/436] started fixing and adapting key and index logic --- .../org/polypheny/db/adapter/DataStore.java | 6 +- .../db/adapter/index/IndexManager.java | 18 +-- .../common/LogicalConstraintEnforcer.java | 22 ++-- .../org/polypheny/db/catalog/IdBuilder.java | 2 +- .../catalogs/LogicalRelationalCatalog.java | 10 +- .../db/catalog/entity/CatalogConstraint.java | 4 +- ...ForeignKey.java => LogicalForeignKey.java} | 4 +- .../{CatalogIndex.java => LogicalIndex.java} | 10 +- .../{CatalogKey.java => LogicalKey.java} | 6 +- ...PrimaryKey.java => LogicalPrimaryKey.java} | 6 +- .../allocation/AllocationCollection.java | 10 +- .../entity/logical/LogicalCollection.java | 26 +--- .../catalog/entity/logical/LogicalTable.java | 5 +- .../catalog/snapshot/LogicalRelSnapshot.java | 36 +++--- .../snapshot/impl/LogicalRelSnapshotImpl.java | 118 ++++++++++++------ .../org/polypheny/db/ddl/DdlManagerImpl.java | 113 +++++++++-------- .../processing/ConstraintEnforceAttacher.java | 20 +-- .../db/processing/DataMigratorImpl.java | 10 +- .../db/transaction/TransactionImpl.java | 2 +- .../org/polypheny/db/avatica/DbmsMeta.java | 35 +++--- .../db/hsqldb/stores/HsqldbStore.java | 75 +++++------ .../allocation/PolyAllocDocCatalog.java | 16 ++- .../db/catalog/logical/RelationalCatalog.java | 90 +++++++------ .../org/polypheny/db/sql/language/SqlDdl.java | 2 +- .../db/sql/language/ddl/SqlCreateTable.java | 2 +- .../language/ddl/SqlDropMaterializedView.java | 2 +- .../db/sql/language/ddl/SqlDropTable.java | 2 +- .../db/sql/language/ddl/SqlDropView.java | 2 +- .../db/sql/language/ddl/SqlTruncate.java | 2 +- .../SqlAlterMaterializedViewAddIndex.java | 2 +- .../SqlAlterMaterializedViewDropIndex.java | 2 +- ...lAlterMaterializedViewFreshnessManual.java | 4 +- .../SqlAlterMaterializedViewRename.java | 2 +- .../SqlAlterMaterializedViewRenameColumn.java | 2 +- .../SqlAlterSourceTableAddColumn.java | 2 +- .../altertable/SqlAlterTableAddColumn.java | 2 +- .../SqlAlterTableAddForeignKey.java | 16 +-- .../ddl/altertable/SqlAlterTableAddIndex.java | 2 +- .../SqlAlterTableAddPartitions.java | 2 +- .../altertable/SqlAlterTableAddPlacement.java | 2 +- .../SqlAlterTableAddPrimaryKey.java | 2 +- .../SqlAlterTableAddUniqueConstraint.java | 2 +- .../altertable/SqlAlterTableDropColumn.java | 2 +- .../SqlAlterTableDropConstraint.java | 2 +- .../SqlAlterTableDropForeignKey.java | 7 +- .../altertable/SqlAlterTableDropIndex.java | 2 +- .../SqlAlterTableDropPlacement.java | 2 +- .../SqlAlterTableDropPrimaryKey.java | 5 +- .../SqlAlterTableMergePartitions.java | 2 +- .../altertable/SqlAlterTableModifyColumn.java | 2 +- .../SqlAlterTableModifyPartitions.java | 2 +- .../SqlAlterTableModifyPlacement.java | 2 +- ...SqlAlterTableModifyPlacementAddColumn.java | 2 +- ...qlAlterTableModifyPlacementDropColumn.java | 2 +- .../ddl/altertable/SqlAlterTableOwner.java | 2 +- .../ddl/altertable/SqlAlterTableRename.java | 2 +- .../altertable/SqlAlterTableRenameColumn.java | 2 +- .../ddl/alterview/SqlAlterViewRename.java | 2 +- .../alterview/SqlAlterViewRenameColumn.java | 2 +- .../db/sql/web/SchemaToJsonMapper.java | 8 +- .../db/sql/map/NamespaceToJsonMapperTest.java | 8 +- .../java/org/polypheny/db/webui/Crud.java | 70 +++++------ 62 files changed, 429 insertions(+), 397 deletions(-) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogForeignKey.java => LogicalForeignKey.java} (98%) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogIndex.java => LogicalIndex.java} (95%) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogKey.java => LogicalKey.java} (95%) rename core/src/main/java/org/polypheny/db/catalog/entity/{CatalogPrimaryKey.java => LogicalPrimaryKey.java} (93%) diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index e83239f7b7..4f893055bc 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -31,7 +31,7 @@ import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogIndex; +import org.polypheny.db.catalog.entity.LogicalIndex; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -73,9 +73,9 @@ public List getSupportedSchemaType() { public abstract void dropColumn( Context context, AllocationColumn columnPlacement ); - public abstract void addIndex( Context context, CatalogIndex catalogIndex, List partitionIds ); + public abstract String addIndex( Context context, LogicalIndex logicalIndex, AllocationTable allocation ); - public abstract void dropIndex( Context context, CatalogIndex catalogIndex, List partitionIds ); + public abstract void dropIndex( Context context, LogicalIndex logicalIndex, List partitionIds ); public abstract void updateColumnType( Context context, AllocationColumn columnPlacement, LogicalColumn logicalColumn, PolyType oldType ); diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index bc465254ee..266c0b2bca 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -30,10 +30,10 @@ import org.polypheny.db.adapter.DataStore.AvailableIndexMethod; import org.polypheny.db.adapter.index.Index.IndexFactory; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalIndex; +import org.polypheny.db.catalog.entity.LogicalKey; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationAction; @@ -144,7 +144,7 @@ public void initialize( final TransactionManager transactionManager ) { public void restoreIndexes() throws TransactionException { - for ( final CatalogIndex index : Catalog.getInstance().getSnapshot().rel().getIndexes() ) { + for ( final LogicalIndex index : Catalog.getInstance().getSnapshot().rel().getIndexes() ) { if ( index.location == 0 ) { addIndex( index ); } @@ -152,24 +152,24 @@ public void restoreIndexes() throws TransactionException { } - public void addIndex( final CatalogIndex index ) throws TransactionException { + public void addIndex( final LogicalIndex index ) throws TransactionException { addIndex( index, null ); } - public void addIndex( final CatalogIndex index, final Statement statement ) throws TransactionException { + public void addIndex( final LogicalIndex index, final Statement statement ) throws TransactionException { // TODO(s3lph): persistent addIndex( index.id, index.name, index.key, index.method, index.unique, null, statement ); } - protected void addIndex( final long id, final String name, final CatalogKey key, final String method, final Boolean unique, final Boolean persistent, final Statement statement ) throws TransactionException { + protected void addIndex( final long id, final String name, final LogicalKey key, final String method, final Boolean unique, final Boolean persistent, final Statement statement ) throws TransactionException { final IndexFactory factory = INDEX_FACTORIES.stream() .filter( it -> it.canProvide( method, unique, persistent ) ) .findFirst() .orElseThrow( IllegalArgumentException::new ); final LogicalTable table = statement.getTransaction().getSnapshot().rel().getTable( key.tableId ); - final CatalogPrimaryKey pk = statement.getTransaction().getSnapshot().rel().getPrimaryKey( table.primaryKey ); + final LogicalPrimaryKey pk = statement.getTransaction().getSnapshot().rel().getPrimaryKey( table.primaryKey ); final Index index = factory.create( id, name, @@ -197,7 +197,7 @@ protected void addIndex( final long id, final String name, final CatalogKey key, } - public void deleteIndex( final CatalogIndex index ) { + public void deleteIndex( final LogicalIndex index ) { deleteIndex( index.id ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index aa28c6cdad..d3ecdd99a8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -39,9 +39,9 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalForeignKey; +import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; @@ -101,19 +101,19 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem .stream() .filter( f -> f.key.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); - final List foreignKeys = snapshot + final List foreignKeys = snapshot .getForeignKeys( table.id ) .stream() .filter( f -> f.enforcementTime == enforcementTime ) .collect( Collectors.toList() ); - final List exportedKeys = snapshot + final List exportedKeys = snapshot .getExportedKeys( table.id ) .stream() .filter( f -> f.enforcementTime == enforcementTime ) .collect( Collectors.toList() ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); + LogicalPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); if ( pk.enforcementTime == enforcementTime ) { final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); @@ -161,7 +161,7 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem // Enforce FOREIGN KEY constraints in INSERT operations // if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { - for ( final CatalogForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { + for ( final LogicalForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { builder.clear(); final LogicalTable scanOptTable = snapshot.getTable( foreignKey.tableId ); final LogicalTable refOptTable = snapshot.getTable( foreignKey.referencedKeyTableId ); @@ -230,17 +230,17 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s .stream() .filter( c -> c.key.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); - final List foreignKeys = snapshot.getForeignKeys( table.id ) + final List foreignKeys = snapshot.getForeignKeys( table.id ) .stream() .filter( c -> c.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); - final List exportedKeys = snapshot.getExportedKeys( table.id ) + final List exportedKeys = snapshot.getExportedKeys( table.id ) .stream() .filter( c -> c.enforcementTime == enforcementTime ) .collect( Collectors.toCollection( ArrayList::new ) ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); + LogicalPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); if ( pk.enforcementTime == enforcementTime ) { final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); @@ -286,7 +286,7 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s // Enforce FOREIGN KEY constraints in INSERT operations // if ( RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { - for ( final CatalogForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { + for ( final LogicalForeignKey foreignKey : Stream.concat( foreignKeys.stream(), exportedKeys.stream() ).collect( Collectors.toList() ) ) { builder.clear(); //final AlgOptSchema algOptSchema = modify.getCatalogReader(); //final AlgOptTable scanOptTable = algOptSchema.getTableForMember( Collections.singletonList( foreignKey.getTableName() ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/IdBuilder.java b/core/src/main/java/org/polypheny/db/catalog/IdBuilder.java index a7e4de94ec..576a5a7c4b 100644 --- a/core/src/main/java/org/polypheny/db/catalog/IdBuilder.java +++ b/core/src/main/java/org/polypheny/db/catalog/IdBuilder.java @@ -51,7 +51,7 @@ public static IdBuilder getInstance() { if ( INSTANCE == null ) { INSTANCE = new IdBuilder(); } - return new IdBuilder(); + return INSTANCE; } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index f750dc5cd8..3416b121fb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -22,8 +22,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.LogicalIndex; +import org.polypheny.db.catalog.entity.LogicalKey; import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.LogicalView; @@ -281,7 +281,7 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { * @param indexName The name of the index * @return The id of the created index */ - long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ); + LogicalIndex addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ); /** * Set physical index name. @@ -304,9 +304,9 @@ public interface LogicalRelationalCatalog extends LogicalCatalog { LogicalNamespace getLogicalNamespace(); - Map getIndexes(); + Map getIndexes(); - Map getKeys(); + Map getKeys(); Map getConstraints(); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java index 4387a4cb66..5c1a103fec 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java @@ -39,7 +39,7 @@ public class CatalogConstraint implements Serializable { @Serialize public String name; @Serialize - public CatalogKey key; + public LogicalKey key; public CatalogConstraint( @@ -47,7 +47,7 @@ public CatalogConstraint( @Deserialize("keyId") final long keyId, @Deserialize("type") @NonNull final ConstraintType constraintType, @Deserialize("name") final String name, - @Deserialize("key") final CatalogKey key ) { + @Deserialize("key") final LogicalKey key ) { this.id = id; this.keyId = keyId; this.type = constraintType; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java similarity index 98% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java rename to core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java index a6e2b8979e..47b511e639 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogForeignKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java @@ -30,7 +30,7 @@ @EqualsAndHashCode(callSuper = true) -public final class CatalogForeignKey extends CatalogKey { +public final class LogicalForeignKey extends LogicalKey { public final String name; public final long referencedKeyId; @@ -41,7 +41,7 @@ public final class CatalogForeignKey extends CatalogKey { public final ImmutableList referencedKeyColumnIds; - public CatalogForeignKey( + public LogicalForeignKey( final long id, @NonNull final String name, final long tableId, diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalIndex.java similarity index 95% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java rename to core/src/main/java/org/polypheny/db/catalog/entity/LogicalIndex.java index 081a668f05..f16b56d35d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogIndex.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalIndex.java @@ -33,7 +33,7 @@ @EqualsAndHashCode(callSuper = false) @Value @SuperBuilder(toBuilder = true) -public class CatalogIndex implements Serializable { +public class LogicalIndex implements Serializable { private static final long serialVersionUID = -318228681682792406L; @@ -54,12 +54,12 @@ public class CatalogIndex implements Serializable { @Serialize public String methodDisplayName; @Serialize - public CatalogKey key; + public LogicalKey key; @Serialize public long keyId; - public CatalogIndex( + public LogicalIndex( @Deserialize("id") final long id, @Deserialize("name") @NonNull final String name, @Deserialize("unique") final boolean unique, @@ -68,7 +68,7 @@ public CatalogIndex( @Deserialize("type") final IndexType type, @Deserialize("location") final Long location, @Deserialize("keyId") final long keyId, - @Deserialize("key") final CatalogKey key, + @Deserialize("key") final LogicalKey key, @Deserialize("physicalName") final String physicalName ) { this.id = id; this.name = name; @@ -125,7 +125,7 @@ public static class CatalogIndexColumn implements CatalogObject { public String columnName; - public CatalogIndex index; + public LogicalIndex index; @Override diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java similarity index 95% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java rename to core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java index c58e1f22f0..bddfbac25d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java @@ -31,7 +31,7 @@ @EqualsAndHashCode @Value @NonFinal -public class CatalogKey implements CatalogObject, Comparable { +public class LogicalKey implements CatalogObject, Comparable { private static final long serialVersionUID = -5803762884192662540L; @@ -47,7 +47,7 @@ public class CatalogKey implements CatalogObject, Comparable { public EnforcementTime enforcementTime; - public CatalogKey( + public LogicalKey( @Deserialize("id") final long id, @Deserialize("tableId") final long tableId, @Deserialize("namespaceId") final long namespaceId, @@ -94,7 +94,7 @@ public Serializable[] getParameterArray() { @Override - public int compareTo( CatalogKey o ) { + public int compareTo( LogicalKey o ) { if ( o != null ) { return (int) (this.id - o.id); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalPrimaryKey.java similarity index 93% rename from core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java rename to core/src/main/java/org/polypheny/db/catalog/entity/LogicalPrimaryKey.java index 31424c4d7b..0a38e75497 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogPrimaryKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalPrimaryKey.java @@ -29,13 +29,13 @@ @EqualsAndHashCode(callSuper = true) -public final class CatalogPrimaryKey extends CatalogKey { +public final class LogicalPrimaryKey extends LogicalKey { @Serialize - private final CatalogKey key; + private final LogicalKey key; - public CatalogPrimaryKey( @Deserialize("key") @NonNull final CatalogKey key ) { + public LogicalPrimaryKey( @Deserialize("key") @NonNull final LogicalKey key ) { super( key.id, key.tableId, diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java index 1eca455527..f6ed0b9f65 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationCollection.java @@ -16,6 +16,7 @@ package org.polypheny.db.catalog.entity.allocation; +import io.activej.serializer.annotations.Deserialize; import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.Value; @@ -23,7 +24,6 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @@ -31,8 +31,12 @@ @NonFinal public class AllocationCollection extends AllocationEntity { - public AllocationCollection( LogicalCollection collection, long id, long adapterId ) { - super( id, collection.id, collection.namespaceId, adapterId, NamespaceType.DOCUMENT ); + public AllocationCollection( + @Deserialize("id") Long id, + @Deserialize("logicalId") long logicalId, + @Deserialize("namespaceId") long namespaceId, + @Deserialize("adapterId") long adapterId ) { + super( id, logicalId, namespaceId, adapterId, NamespaceType.DOCUMENT ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index 91c80025bd..5594c2b5b7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -16,9 +16,9 @@ package org.polypheny.db.catalog.entity.logical; +import io.activej.serializer.annotations.Deserialize; import java.io.Serializable; import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.Value; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; @@ -33,26 +33,13 @@ public class LogicalCollection extends LogicalEntity implements CatalogObject { private static final long serialVersionUID = -6490762948368178584L; - @Getter - public long id; - public String name; - public long namespaceId; - public EntityType entityType; - public String physicalName; - public LogicalCollection( - long namespaceId, - long id, - String name, - EntityType type, - String physicalName ) { - super( id, name, namespaceId, EntityType.ENTITY, NamespaceType.DOCUMENT ); - this.id = id; - this.namespaceId = namespaceId; - this.name = name; - this.entityType = type; - this.physicalName = physicalName; + @Deserialize("namespaceId") long namespaceId, + @Deserialize("id") long id, + @Deserialize("name") String name, + @Deserialize("entityType") EntityType entityType ) { + super( id, name, namespaceId, entityType, NamespaceType.DOCUMENT ); } @@ -62,7 +49,6 @@ public Serializable[] getParameterArray() { } - @Override public Expression asExpression() { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getCollection", Expressions.constant( id ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java index 701cd5093d..cb39c6912e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTable.java @@ -76,6 +76,8 @@ public LogicalTable( @Override public Serializable[] getParameterArray() { return new Serializable[]{ + "APP", + getNamespaceName(), name, entityType.name(), "", @@ -83,7 +85,8 @@ public Serializable[] getParameterArray() { null, null, null, - null + null, + "pa" }; } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 96b2fbfd6c..89b7967f81 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -21,10 +21,10 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalForeignKey; +import org.polypheny.db.catalog.entity.LogicalIndex; +import org.polypheny.db.catalog.entity.LogicalKey; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -64,7 +64,7 @@ public interface LogicalRelSnapshot { * * @return The keys */ - List getKeys(); + List getKeys(); /** @@ -73,7 +73,7 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table for which the keys are returned * @return The collection of keys */ - List getTableKeys( long tableId ); + List getTableKeys( long tableId ); /** @@ -135,7 +135,7 @@ public interface LogicalRelSnapshot { * @param key The id of the primary key * @return The primary key */ - CatalogPrimaryKey getPrimaryKey( long key ); + LogicalPrimaryKey getPrimaryKey( long key ); /** * Check whether a key is a primary key @@ -175,7 +175,7 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table * @return List of foreign keys */ - List getForeignKeys( long tableId ); + List getForeignKeys( long tableId ); /** * Returns all foreign keys that reference the specified table (exported keys). @@ -183,7 +183,7 @@ public interface LogicalRelSnapshot { * @param tableId The id of the table * @return List of foreign keys */ - List getExportedKeys( long tableId ); + List getExportedKeys( long tableId ); /** * Get all constraints of the specified table @@ -200,7 +200,7 @@ public interface LogicalRelSnapshot { * @param key The key for which the collection is returned * @return The collection of constraints */ - List getConstraints( CatalogKey key ); + List getConstraints( LogicalKey key ); /** * Returns the constraint with the specified name in the specified table. @@ -218,9 +218,9 @@ public interface LogicalRelSnapshot { * @param foreignKeyName The name of the foreign key * @return The foreign key */ - CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ); + LogicalForeignKey getForeignKey( long tableId, String foreignKeyName ); - List getIndexes(); + List getIndexes(); /** * Gets a collection of index for the given key. @@ -228,7 +228,7 @@ public interface LogicalRelSnapshot { * @param key The key for which the collection is returned * @return The collection of indexes */ - List getIndexes( CatalogKey key ); + List getIndexes( LogicalKey key ); /** * Gets a collection of foreign keys for a given {@link Catalog Key}. @@ -236,7 +236,7 @@ public interface LogicalRelSnapshot { * @param key The key for which the collection is returned * @return The collection foreign keys */ - List getForeignKeys( CatalogKey key ); + List getForeignKeys( LogicalKey key ); /** * Returns all indexes of a table @@ -245,7 +245,7 @@ public interface LogicalRelSnapshot { * @param onlyUnique true if only indexes for unique values are returned. false if all indexes are returned. * @return List of indexes */ - List getIndexes( long tableId, boolean onlyUnique ); + List getIndexes( long tableId, boolean onlyUnique ); /** * Returns the index with the specified name in the specified table @@ -254,7 +254,7 @@ public interface LogicalRelSnapshot { * @param indexName The name of the index * @return The Index */ - CatalogIndex getIndex( long tableId, String indexName ); + LogicalIndex getIndex( long tableId, String indexName ); /** * Checks if there is an index with the specified name in the specified table. @@ -271,7 +271,7 @@ public interface LogicalRelSnapshot { * @param indexId The id of the index * @return The Index */ - CatalogIndex getIndex( long indexId ); + LogicalIndex getIndex( long indexId ); LogicalTable getTable( long id ); @@ -284,4 +284,6 @@ public interface LogicalRelSnapshot { List getConnectedViews( long id ); + LogicalKey getKeys( long[] columnIds ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index fd56f31ca6..ce03940459 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -30,11 +30,11 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalForeignKey; +import org.polypheny.db.catalog.entity.LogicalIndex; +import org.polypheny.db.catalog.entity.LogicalKey; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -60,18 +60,18 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap, LogicalColumn> columnNames; - ImmutableMap keys; + ImmutableMap keys; - ImmutableMap> tableKeys; + ImmutableMap> tableKeys; - ImmutableMap index; + ImmutableMap index; ImmutableMap constraints; - ImmutableMap foreignKeys; - ImmutableMap primaryKeys; + ImmutableMap foreignKeys; + ImmutableMap primaryKeys; - ImmutableMap> keyToIndexes; + ImmutableMap> keyToIndexes; ImmutableMap, LogicalColumn> tableColumnIdColumn; @@ -79,9 +79,10 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap, LogicalColumn> tableIdColumnNameColumn; ImmutableMap> tableConstraints; - ImmutableMap> tableForeignKeys; + ImmutableMap> tableForeignKeys; ImmutableMap nodes; ImmutableMap> connectedViews; + ImmutableMap columnsKey; public LogicalRelSnapshotImpl( Map catalogs ) { @@ -113,19 +114,13 @@ public LogicalRelSnapshotImpl( Map catalogs ) { keys = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getKeys().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - Map> tableKeys = new HashMap<>(); - keys.forEach( ( k, v ) -> { - if ( !tableKeys.containsKey( v.tableId ) ) { - tableKeys.put( v.tableId, new ArrayList<>() ); - } - tableKeys.get( v.tableId ).add( v ); - } ); + this.tableKeys = buildTableKeys(); - this.tableKeys = ImmutableMap.copyOf( tableKeys ); + this.columnsKey = buildColumnsKey(); this.index = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getIndexes().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - Map> keyToIndexes = new HashMap<>(); + Map> keyToIndexes = new HashMap<>(); this.index.forEach( ( k, v ) -> { if ( !keyToIndexes.containsKey( v.keyId ) ) { keyToIndexes.put( v.keyId, new ArrayList<>() ); @@ -134,9 +129,9 @@ public LogicalRelSnapshotImpl( Map catalogs ) { } ); this.keyToIndexes = ImmutableMap.copyOf( keyToIndexes ); - this.foreignKeys = ImmutableMap.copyOf( keys.entrySet().stream().filter( f -> f.getValue() instanceof CatalogForeignKey ).collect( Collectors.toMap( Entry::getKey, e -> (CatalogForeignKey) e.getValue() ) ) ); + this.foreignKeys = ImmutableMap.copyOf( keys.entrySet().stream().filter( f -> f.getValue() instanceof LogicalForeignKey ).collect( Collectors.toMap( Entry::getKey, e -> (LogicalForeignKey) e.getValue() ) ) ); - HashMap> tableForeignKeys = new HashMap<>(); + HashMap> tableForeignKeys = new HashMap<>(); foreignKeys.forEach( ( k, v ) -> { if ( !tableForeignKeys.containsKey( v.tableId ) ) { tableForeignKeys.put( v.tableId, new ArrayList<>() ); @@ -145,7 +140,7 @@ public LogicalRelSnapshotImpl( Map catalogs ) { } ); this.tableForeignKeys = ImmutableMap.copyOf( tableForeignKeys ); - this.primaryKeys = ImmutableMap.copyOf( keys.entrySet().stream().filter( f -> f.getValue() instanceof CatalogPrimaryKey ).collect( Collectors.toMap( Entry::getKey, e -> (CatalogPrimaryKey) e.getValue() ) ) ); + this.primaryKeys = ImmutableMap.copyOf( keys.entrySet().stream().filter( f -> f.getValue() instanceof LogicalPrimaryKey ).collect( Collectors.toMap( Entry::getKey, e -> (LogicalPrimaryKey) e.getValue() ) ) ); //// CONSTRAINTS @@ -175,6 +170,25 @@ public LogicalRelSnapshotImpl( Map catalogs ) { } + private ImmutableMap buildColumnsKey() { + Map map = keys.entrySet().stream().collect( Collectors.toMap( e -> e.getValue().columnIds.stream().mapToLong( c -> c ).toArray(), Entry::getValue ) ); + + return ImmutableMap.copyOf( map ); + } + + + private ImmutableMap> buildTableKeys() { + Map> tableKeys = new HashMap<>(); + keys.forEach( ( k, v ) -> { + if ( !tableKeys.containsKey( v.tableId ) ) { + tableKeys.put( v.tableId, new ArrayList<>() ); + } + tableKeys.get( v.tableId ).add( v ); + } ); + return ImmutableMap.copyOf( tableKeys ); + } + + private ImmutableMap> buildConnectedViews() { Map> map = new HashMap<>(); @@ -203,11 +217,27 @@ public String getAdjustedName( long namespaceId, String entityName ) { @Override - public List getTables( @javax.annotation.Nullable Pattern namespace, Pattern name ) { - if ( name == null ) { - return tables.values().asList(); + public List getTables( @Nullable Pattern namespaceName, Pattern name ) { + List namespaceIds = getNamespaces( namespaceName ).stream().map( n -> n.id ).collect( Collectors.toList() ); + + List tables = this.tables.values().asList(); + if ( name != null ) { + tables = tables.stream() + .filter( t -> + this.namespaces.get( t.namespaceId ).caseSensitive + ? t.name.matches( name.toRegex() ) + : t.name.matches( name.toRegex().toLowerCase() ) ).collect( Collectors.toList() ); } - return tables.values().stream().filter( t -> namespaces.get( t.namespaceId ).caseSensitive ? t.name.matches( name.toRegex() ) : t.name.toLowerCase().matches( (name.toRegex().toLowerCase()) ) ).collect( Collectors.toList() ); + return tables.stream().filter( t -> namespaceIds.contains( t.namespaceId ) ).collect( Collectors.toList() ); + } + + + private List getNamespaces( @Nullable Pattern namespaceName ) { + if ( namespaceName == null ) { + return this.namespaces.values().asList(); + } + return this.namespaces.values().stream().filter( n -> n.caseSensitive ? n.name.matches( namespaceName.toRegex() ) : n.name.matches( namespaceName.toRegex().toLowerCase() ) ).collect( Collectors.toList() ); + } @@ -230,13 +260,13 @@ public LogicalTable getTable( long tableId ) { @Override - public List getKeys() { + public List getKeys() { return keys.values().asList(); } @Override - public List getTableKeys( long tableId ) { + public List getTableKeys( long tableId ) { return tableKeys.get( tableId ); } @@ -283,7 +313,7 @@ public boolean checkIfExistsColumn( long tableId, String columnName ) { @Override - public CatalogPrimaryKey getPrimaryKey( long key ) { + public LogicalPrimaryKey getPrimaryKey( long key ) { return primaryKeys.get( key ); } @@ -313,13 +343,13 @@ public boolean isConstraint( long keyId ) { @Override - public List getForeignKeys( long tableId ) { - return tableKeys.get( tableId ).stream().filter( k -> isForeignKey( k.id ) ).map( f -> (CatalogForeignKey) f ).collect( Collectors.toList() ); + public List getForeignKeys( long tableId ) { + return tableKeys.get( tableId ).stream().filter( k -> isForeignKey( k.id ) ).map( f -> (LogicalForeignKey) f ).collect( Collectors.toList() ); } @Override - public List getExportedKeys( long tableId ) { + public List getExportedKeys( long tableId ) { return foreignKeys.values().stream().filter( k -> k.referencedKeyTableId == tableId ).collect( Collectors.toList() ); } @@ -332,7 +362,7 @@ public List getConstraints( long tableId ) { @Override - public List getConstraints( CatalogKey key ) { + public List getConstraints( LogicalKey key ) { return constraints.values().stream().filter( c -> c.keyId == key.id ).collect( Collectors.toList() ); } @@ -344,37 +374,37 @@ public CatalogConstraint getConstraint( long tableId, String constraintName ) { @Override - public CatalogForeignKey getForeignKey( long tableId, String foreignKeyName ) { + public LogicalForeignKey getForeignKey( long tableId, String foreignKeyName ) { return tableForeignKeys.get( tableId ).stream().filter( e -> e.name.equals( foreignKeyName ) ).findFirst().orElse( null ); } @Override - public List getIndexes() { + public List getIndexes() { return index.values().asList(); } @Override - public List getIndexes( CatalogKey key ) { + public List getIndexes( LogicalKey key ) { return keyToIndexes.get( key.id ); } @Override - public List getForeignKeys( CatalogKey key ) { + public List getForeignKeys( LogicalKey key ) { return keyToIndexes.get( key.id ); } @Override - public List getIndexes( long tableId, boolean onlyUnique ) { + public List getIndexes( long tableId, boolean onlyUnique ) { return tableKeys.get( tableId ).stream().flatMap( k -> getIndexes( k ).stream() ).collect( Collectors.toList() ); } @Override - public CatalogIndex getIndex( long tableId, String indexName ) { + public LogicalIndex getIndex( long tableId, String indexName ) { return getIndex().values().stream().filter( i -> i.getKey().tableId == tableId && i.name.equals( indexName ) ).findFirst().orElse( null ); } @@ -386,7 +416,7 @@ public boolean checkIfExistsIndex( long tableId, String indexName ) { @Override - public CatalogIndex getIndex( long indexId ) { + public LogicalIndex getIndex( long indexId ) { return index.get( indexId ); } @@ -431,4 +461,10 @@ public List getConnectedViews( long id ) { return connectedViews.get( id ); } + + @Override + public LogicalKey getKeys( long[] columnIds ) { + return columnsKey.get( columnIds ); + } + } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index ee1de0c7c9..4581e446f9 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -57,14 +57,14 @@ import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalForeignKey; +import org.polypheny.db.catalog.entity.LogicalIndex; +import org.polypheny.db.catalog.entity.LogicalKey; import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; @@ -323,7 +323,7 @@ public void dropAdapter( String name, Statement statement ) { // Remove foreign keys for ( Long tableId : tablesToDrop ) { - for ( CatalogForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( tableId ) ) { + for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( tableId ) ) { catalog.getLogicalRel( defaultNamespaceId ).deleteForeignKey( fk.id ); } } @@ -539,6 +539,10 @@ public void addColumn( String columnName, LogicalTable catalogTable, String befo @Override public void addForeignKey( LogicalTable catalogTable, LogicalTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) { + // Make sure that this is a table of type TABLE (and not SOURCE) + checkIfDdlPossible( catalogTable.entityType ); + checkIfDdlPossible( refTable.entityType ); + List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { LogicalColumn logicalColumn = catalog.getSnapshot().rel().getColumn( catalogTable.id, columnName ); @@ -637,10 +641,10 @@ public void addIndex( LogicalTable catalogTable, String indexMethodName, List columnIds, IndexType type ) { // Check if all required columns are present on this store - for ( long columnId : columnIds ) { - if ( !catalog.getSnapshot().alloc().checkIfExistsColumnPlacement( location.getAdapterId(), columnId ) ) { - throw new GenericRuntimeException( "Not all required columns for this index are placed on this store. e.g %s ", catalog.getSnapshot().rel().getColumn( columnId ).name ); - } + AllocationTable alloc = catalog.getSnapshot().alloc().getAllocation( location.getAdapterId(), catalogTable.id ).unwrap( AllocationTable.class ); + + if ( !alloc.getColumns().keySet().containsAll( columnIds ) ) { + throw new GenericRuntimeException( "Not all required columns for this index are placed on this store." ); } String method; @@ -662,7 +666,7 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam methodDisplayName = location.getDefaultIndexMethod().displayName; } - long indexId = catalog.getLogicalRel( catalogTable.namespaceId ).addIndex( + LogicalIndex index = catalog.getLogicalRel( catalogTable.namespaceId ).addIndex( catalogTable.id, columnIds, isUnique, @@ -672,10 +676,12 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam type, indexName ); - location.addIndex( + String physicalName = location.addIndex( statement.getPrepareContext(), - catalog.getSnapshot().rel().getIndex( indexId ), - catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( location.getAdapterId(), catalogTable.id ) ); + index, + alloc ); + catalog.getLogicalRel( catalogTable.namespaceId ).setIndexPhysicalName( index.id, physicalName ); + //catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( location.getAdapterId(), catalogTable.id ) ); } @@ -718,7 +724,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName methodDisplayName = IndexManager.getDefaultIndexMethod().displayName; } - long indexId = catalog.getLogicalRel( catalogTable.namespaceId ).addIndex( + LogicalIndex index = catalog.getLogicalRel( catalogTable.namespaceId ).addIndex( catalogTable.id, columnIds, isUnique, @@ -728,7 +734,7 @@ public void addPolyphenyIndex( LogicalTable catalogTable, String indexMethodName type, indexName ); - IndexManager.getInstance().addIndex( catalog.getSnapshot().rel().getIndex( indexId ), statement ); + IndexManager.getInstance().addIndex( index, statement ); } @@ -834,7 +840,7 @@ else if ( partitionGroupIds.isEmpty() && partitionGroupNames.isEmpty() ) { addedColumns.add( catalog.getSnapshot().rel().getColumn( cid ) ); } // Check if placement includes primary key columns - CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( long cid : primaryKey.columnIds ) { if ( !columnIds.contains( cid ) ) { catalog.getAllocRel( catalogTable.namespaceId ).addColumn( @@ -877,7 +883,7 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, checkModelLogic( catalogTable ); - CatalogPrimaryKey oldPk = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey oldPk = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); List columnIds = new LinkedList<>(); for ( String columnName : columnNames ) { @@ -944,7 +950,7 @@ public void dropColumn( LogicalTable catalogTable, String columnName, Statement LogicalRelSnapshot snapshot = catalog.getSnapshot().rel(); // Check if column is part of a key - for ( CatalogKey key : snapshot.getTableKeys( catalogTable.id ) ) { + for ( LogicalKey key : snapshot.getTableKeys( catalogTable.id ) ) { if ( key.columnIds.contains( column.id ) ) { if ( snapshot.isPrimaryKey( key.id ) ) { throw new PolyphenyDbException( "Cannot drop column '" + column.name + "' because it is part of the primary key." ); @@ -1014,7 +1020,11 @@ public void dropForeignKey( LogicalTable catalogTable, String foreignKeyName ) { // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); - CatalogForeignKey foreignKey = catalog.getSnapshot().rel().getForeignKey( catalogTable.id, foreignKeyName ); + if ( !catalogTable.modifiable ) { + throw new GenericRuntimeException( "Not possible to use ALTER TABLE because %s is not a table.", catalogTable.name ); + } + + LogicalForeignKey foreignKey = catalog.getSnapshot().rel().getForeignKey( catalogTable.id, foreignKeyName ); catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } @@ -1024,7 +1034,7 @@ public void dropIndex( LogicalTable catalogTable, String indexName, Statement st // Make sure that this is a table of type TABLE (and not SOURCE) checkIfDdlPossible( catalogTable.entityType ); - CatalogIndex index = catalog.getSnapshot().rel().getIndex( catalogTable.id, indexName ); + LogicalIndex index = catalog.getSnapshot().rel().getIndex( catalogTable.id, indexName ); if ( index.location == 0 ) { IndexManager.getInstance().deleteIndex( index ); @@ -1053,7 +1063,7 @@ public void dropTableAllocation( LogicalTable catalogTable, DataStore storeInsta } // Drop all indexes on this store - for ( CatalogIndex index : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { + for ( LogicalIndex index : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() ) { if ( index.location == 0 ) { // Delete polystore index @@ -1252,13 +1262,13 @@ public void modifyDataPlacement( LogicalTable catalogTable, List columnIds for ( AllocationColumn placement : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( storeInstance.getAdapterId(), catalogTable.id ) ) { if ( !columnIds.contains( placement.columnId ) ) { // Check whether there are any indexes located on the store requiring this column - for ( CatalogIndex index : snapshot.getIndexes( catalogTable.id, false ) ) { + for ( LogicalIndex index : snapshot.getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( placement.columnId ) ) { throw new GenericRuntimeException( "The index with name %s prevents the removal of the placement %s", index.name, snapshot.getColumn( placement.columnId ).name ); } } // Check whether the column is a primary key column - CatalogPrimaryKey primaryKey = snapshot.getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey primaryKey = snapshot.getPrimaryKey( catalogTable.primaryKey ); if ( primaryKey.columnIds.contains( placement.columnId ) ) { // Check if the placement type is manual. If so, change to automatic if ( placement.placementType == PlacementType.MANUAL ) { @@ -1458,16 +1468,16 @@ public void modifyPartitionPlacement( LogicalTable catalogTable, List part dataMigrator.copyData( statement.getTransaction(), catalog.getSnapshot().getAdapter( storeId ), necessaryColumns, newPartitions ); // Add indexes on this new Partition Placement if there is already an index - for ( CatalogIndex currentIndex : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { + for ( LogicalIndex currentIndex : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( currentIndex.location == storeId ) { - storeInstance.addIndex( statement.getPrepareContext(), currentIndex, newPartitions ); + storeInstance.addIndex( statement.getPrepareContext(), currentIndex, null ); } } } if ( removedPartitions.size() > 0 ) { // Remove indexes - for ( CatalogIndex currentIndex : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { + for ( LogicalIndex currentIndex : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( currentIndex.location == storeId ) { storeInstance.dropIndex( statement.getPrepareContext(), currentIndex, removedPartitions ); } @@ -1540,7 +1550,7 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, @ throw new GenericRuntimeException( "The placement does not exist on the store" ); } // Check whether there are any indexes located on the store requiring this column - for ( CatalogIndex index : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { + for ( LogicalIndex index : catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ) ) { if ( index.location == storeInstance.getAdapterId() && index.key.columnIds.contains( logicalColumn.id ) ) { throw new GenericRuntimeException( "Cannot remove the column %s, as there is a index %s using it", columnName, index.name ); } @@ -1551,7 +1561,7 @@ public void dropColumnPlacement( LogicalTable catalogTable, String columnName, @ } // Check whether the column to drop is a primary key - CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); if ( primaryKey.columnIds.contains( logicalColumn.id ) ) { throw new GenericRuntimeException( "Cannot drop primary key" ); } @@ -2610,14 +2620,14 @@ public void addPartitioning( PartitionInformation partitionInfo, List } // Adjust indexes - List indexes = relSnapshot.getIndexes( unPartitionedTable.id, false ); - for ( CatalogIndex index : indexes ) { + List indexes = relSnapshot.getIndexes( unPartitionedTable.id, false ); + for ( LogicalIndex index : indexes ) { // Remove old index DataStore ds = ((DataStore) AdapterManager.getInstance().getAdapter( index.location )); ds.dropIndex( statement.getPrepareContext(), index, snapshot.alloc().getPartitionProperty( unPartitionedTable.id ).partitionIds ); catalog.getLogicalRel( partitionInfo.table.namespaceId ).deleteIndex( index.id ); // Add new index - long newIndexId = catalog.getLogicalRel( partitionInfo.table.namespaceId ).addIndex( + LogicalIndex newIndex = catalog.getLogicalRel( partitionInfo.table.namespaceId ).addIndex( partitionedTable.id, index.key.columnIds, index.unique, @@ -2627,12 +2637,13 @@ public void addPartitioning( PartitionInformation partitionInfo, List index.type, index.name ); if ( index.location == 0 ) { - IndexManager.getInstance().addIndex( relSnapshot.getIndex( newIndexId ), statement ); + IndexManager.getInstance().addIndex( index, statement ); } else { - ds.addIndex( + String physicalName = ds.addIndex( statement.getPrepareContext(), - relSnapshot.getIndex( newIndexId ), - catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); + index, + null );//catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( ds.getAdapterId(), unPartitionedTable.id ) ); + catalog.getLogicalRel( partitionInfo.table.namespaceId ).setIndexPhysicalName( index.id, physicalName ); } } @@ -2729,15 +2740,15 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme } // Adjust indexes - List indexes = relSnapshot.getIndexes( partitionedTable.id, false ); - for ( CatalogIndex index : indexes ) { + List indexes = relSnapshot.getIndexes( partitionedTable.id, false ); + for ( LogicalIndex index : indexes ) { // Remove old index DataStore ds = (DataStore) AdapterManager.getInstance().getAdapter( index.location ); PartitionProperty property = snapshot.alloc().getPartitionProperty( partitionedTable.id ); ds.dropIndex( statement.getPrepareContext(), index, property.partitionIds ); catalog.getLogicalRel( partitionedTable.namespaceId ).deleteIndex( index.id ); // Add new index - long newIndexId = catalog.getLogicalRel( partitionedTable.namespaceId ).addIndex( + LogicalIndex newIndex = catalog.getLogicalRel( partitionedTable.namespaceId ).addIndex( mergedTable.id, index.key.columnIds, index.unique, @@ -2747,16 +2758,16 @@ public void removePartitioning( LogicalTable partitionedTable, Statement stateme index.type, index.name ); if ( index.location == 0 ) { - IndexManager.getInstance().addIndex( relSnapshot.getIndex( newIndexId ), statement ); + IndexManager.getInstance().addIndex( newIndex, statement ); } else { ds.addIndex( statement.getPrepareContext(), - relSnapshot.getIndex( newIndexId ), - catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); + newIndex, + null );//catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( ds.getAdapterId(), mergedTable.id ) ); } } - // Needs to be separated from loop above. Otherwise we loose data + // Needs to be separated from loop above. Otherwise, we loose data for ( DataStore store : stores ) { List partitionIdsOnStore = new ArrayList<>(); PartitionProperty property = snapshot.alloc().getPartitionProperty( mergedTable.id ); @@ -2912,11 +2923,11 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) { checkViewDependencies( catalogTable ); // Check if there are foreign keys referencing this table - List selfRefsToDelete = new LinkedList<>(); + List selfRefsToDelete = new LinkedList<>(); LogicalRelSnapshot relSnapshot = snapshot.rel(); - List exportedKeys = relSnapshot.getExportedKeys( catalogTable.id ); + List exportedKeys = relSnapshot.getExportedKeys( catalogTable.id ); if ( exportedKeys.size() > 0 ) { - for ( CatalogForeignKey foreignKey : exportedKeys ) { + for ( LogicalForeignKey foreignKey : exportedKeys ) { if ( foreignKey.tableId == catalogTable.id ) { // If this is a self-reference, drop it later. selfRefsToDelete.add( foreignKey ); @@ -2933,7 +2944,7 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) { } // Delete all indexes - for ( CatalogIndex index : relSnapshot.getIndexes( catalogTable.id, false ) ) { + for ( LogicalIndex index : relSnapshot.getIndexes( catalogTable.id, false ) ) { if ( index.location == 0 ) { // Delete polystore index IndexManager.getInstance().deleteIndex( index ); @@ -2970,13 +2981,13 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) { // Delete the self-referencing foreign keys - for ( CatalogForeignKey foreignKey : selfRefsToDelete ) { + for ( LogicalForeignKey foreignKey : selfRefsToDelete ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } // Delete indexes of this table - List indexes = relSnapshot.getIndexes( catalogTable.id, false ); - for ( CatalogIndex index : indexes ) { + List indexes = relSnapshot.getIndexes( catalogTable.id, false ); + for ( LogicalIndex index : indexes ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteIndex( index.id ); IndexManager.getInstance().deleteIndex( index ); } @@ -2986,8 +2997,8 @@ public void dropTableOld( LogicalTable catalogTable, Statement statement ) { // Remove primary key catalog.getLogicalRel( catalogTable.namespaceId ).deletePrimaryKey( catalogTable.id ); // Delete all foreign keys of the table - List foreignKeys = relSnapshot.getForeignKeys( catalogTable.id ); - for ( CatalogForeignKey foreignKey : foreignKeys ) { + List foreignKeys = relSnapshot.getForeignKeys( catalogTable.id ); + for ( LogicalForeignKey foreignKey : foreignKeys ) { catalog.getLogicalRel( catalogTable.namespaceId ).deleteForeignKey( foreignKey.id ); } // Delete all constraints of the table diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 3891665047..0d5343730c 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -52,9 +52,9 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalForeignKey; +import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.ConstraintType; @@ -194,10 +194,10 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme final Catalog catalog = Catalog.getInstance(); final LogicalTable table; - final CatalogPrimaryKey primaryKey; + final LogicalPrimaryKey primaryKey; final List constraints; - final List foreignKeys; - final List exportedKeys; + final List foreignKeys; + final List exportedKeys; table = root.getEntity().unwrap( LogicalTable.class ); LogicalRelSnapshot snapshot = statement.getTransaction().getSnapshot().rel(); primaryKey = snapshot.getPrimaryKey( table.primaryKey ); @@ -205,7 +205,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme foreignKeys = snapshot.getForeignKeys( table.id ); exportedKeys = snapshot.getExportedKeys( table.id ); // Turn primary key into an artificial unique constraint - CatalogPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); + LogicalPrimaryKey pk = snapshot.getPrimaryKey( table.primaryKey ); final CatalogConstraint pkc = new CatalogConstraint( 0L, pk.id, ConstraintType.UNIQUE, "PRIMARY KEY", pk ); constraints.add( pkc ); @@ -325,7 +325,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme AlgBuilder builder = AlgBuilder.create( statement ); final AlgNode input = root.getInput().accept( new DeepCopyShuttle() ); final RexBuilder rexBuilder = root.getCluster().getRexBuilder(); - for ( final CatalogForeignKey foreignKey : foreignKeys ) { + for ( final LogicalForeignKey foreignKey : foreignKeys ) { final LogicalTable entity = statement.getDataContext().getSnapshot().rel().getTable( foreignKey.referencedKeyTableId ); final LogicalRelScan scan = LogicalRelScan.create( root.getCluster(), entity ); @@ -474,7 +474,7 @@ public static AlgRoot enforceConstraintBeforeQuery( AlgRoot logicalRoot, Stateme if ( (root.isUpdate() || root.isMerge()) && RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = builder.getRexBuilder(); - for ( final CatalogForeignKey foreignKey : foreignKeys ) { + for ( final LogicalForeignKey foreignKey : foreignKeys ) { final String constraintRule = "ON UPDATE " + foreignKey.updateRule; AlgNode input = root.getInput().accept( new DeepCopyShuttle() ); final List projects = new ArrayList<>( foreignKey.columnIds.size() ); @@ -535,7 +535,7 @@ public RexNode visitFieldAccess( RexFieldAccess fieldAccess ) { if ( (root.isDelete() || root.isUpdate() || root.isMerge()) && RuntimeConfig.FOREIGN_KEY_ENFORCEMENT.getBoolean() ) { AlgBuilder builder = AlgBuilder.create( statement ); final RexBuilder rexBuilder = builder.getRexBuilder(); - for ( final CatalogForeignKey foreignKey : exportedKeys ) { + for ( final LogicalForeignKey foreignKey : exportedKeys ) { final String constraintRule = root.isDelete() ? "ON DELETE " + foreignKey.deleteRule : "ON UPDATE " + foreignKey.updateRule; switch ( root.isDelete() ? foreignKey.deleteRule : foreignKey.updateRule ) { case RESTRICT: diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 1417a5c768..a5767ed25a 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -50,7 +50,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -171,7 +171,7 @@ public void copyData( Transaction transaction, CatalogAdapter store, List targetColumnPlacements = new LinkedList<>(); @@ -408,7 +408,7 @@ private AlgRoot buildUpdateStatement( Statement statement, List selectSourcePlacements( LogicalTable table, */ @Override public void copySelectiveData( Transaction transaction, CatalogAdapter store, LogicalTable sourceTable, LogicalTable targetTable, List columns, Map> placementDistribution, List targetPartitionIds ) { - CatalogPrimaryKey sourcePrimaryKey = Catalog.getInstance().getSnapshot().rel().getPrimaryKey( sourceTable.primaryKey ); + LogicalPrimaryKey sourcePrimaryKey = Catalog.getInstance().getSnapshot().rel().getPrimaryKey( sourceTable.primaryKey ); AllocSnapshot snapshot = Catalog.getInstance().getSnapshot().alloc(); // Check Lists @@ -622,7 +622,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Lo throw new RuntimeException( "Unsupported migration scenario. Table ID mismatch" ); } Snapshot snapshot = Catalog.getInstance().getSnapshot(); - CatalogPrimaryKey primaryKey = snapshot.rel().getPrimaryKey( sourceTable.primaryKey ); + LogicalPrimaryKey primaryKey = snapshot.rel().getPrimaryKey( sourceTable.primaryKey ); // Check Lists List targetColumnPlacements = new LinkedList<>(); diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java index c311893f4b..87566436c0 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java @@ -39,8 +39,8 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer.EnforcementInformation; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 76be0dd967..1111abcf1e 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -71,19 +71,19 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogDatabase.PrimitiveCatalogDatabase; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogForeignKey.CatalogForeignKeyColumn; -import org.polypheny.db.catalog.entity.CatalogForeignKey.CatalogForeignKeyColumn.PrimitiveCatalogForeignKeyColumn; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogIndex.CatalogIndexColumn; -import org.polypheny.db.catalog.entity.CatalogIndex.CatalogIndexColumn.PrimitiveCatalogIndexColumn; import org.polypheny.db.catalog.entity.CatalogObject; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey.CatalogPrimaryKeyColumn; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey.CatalogPrimaryKeyColumn.PrimitiveCatalogPrimaryKeyColumn; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalForeignKey; +import org.polypheny.db.catalog.entity.LogicalForeignKey.CatalogForeignKeyColumn; +import org.polypheny.db.catalog.entity.LogicalForeignKey.CatalogForeignKeyColumn.PrimitiveCatalogForeignKeyColumn; +import org.polypheny.db.catalog.entity.LogicalIndex; +import org.polypheny.db.catalog.entity.LogicalIndex.CatalogIndexColumn; +import org.polypheny.db.catalog.entity.LogicalIndex.CatalogIndexColumn.PrimitiveCatalogIndexColumn; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.LogicalNamespace.PrimitiveCatalogSchema; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey.CatalogPrimaryKeyColumn; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey.CatalogPrimaryKeyColumn.PrimitiveCatalogPrimaryKeyColumn; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalColumn.PrimitiveCatalogColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -303,12 +303,9 @@ private List getLogicalTables( Pat schemaPattern, Pat tablePattern @NotNull private List getLogicalTables( Pattern schemaPattern, Pattern tablePattern ) { - List namespaces = catalog.getSnapshot().getNamespaces( schemaPattern ); + //List namespaces = catalog.getSnapshot().getNamespaces( schemaPattern ); - return namespaces - .stream() - .flatMap( - n -> catalog.getSnapshot().rel().getTables( Pattern.of( n.name ), tablePattern ).stream() ).collect( Collectors.toList() ); + return catalog.getSnapshot().rel().getTables( schemaPattern, tablePattern ); } @@ -532,7 +529,7 @@ public MetaResultSet getPrimaryKeys( final ConnectionHandle ch, final String dat List primaryKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.primaryKey != null ) { - final CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + final LogicalPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); primaryKeyColumns.addAll( primaryKey.getCatalogPrimaryKeyColumns() ); } } @@ -568,7 +565,7 @@ public MetaResultSet getImportedKeys( final ConnectionHandle ch, final String da final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List importedKeys = catalog.getSnapshot().rel().getForeignKeys( catalogTable.id ); + List importedKeys = catalog.getSnapshot().rel().getForeignKeys( catalogTable.id ); importedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -611,7 +608,7 @@ public MetaResultSet getExportedKeys( final ConnectionHandle ch, final String da final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List foreignKeyColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List exportedKeys = catalog.getSnapshot().rel().getExportedKeys( catalogTable.id ); + List exportedKeys = catalog.getSnapshot().rel().getExportedKeys( catalogTable.id ); exportedKeys.forEach( catalogForeignKey -> foreignKeyColumns.addAll( catalogForeignKey.getCatalogForeignKeyColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); @@ -727,8 +724,8 @@ public MetaResultSet getIndexInfo( final ConnectionHandle ch, final String datab final List catalogEntities = getLogicalTables( schemaPattern, tablePattern ); List catalogIndexColumns = new LinkedList<>(); for ( LogicalTable catalogTable : catalogEntities ) { - List catalogIndexInfos = catalog.getSnapshot().rel().getIndexes( catalogTable.id, unique ); - catalogIndexInfos.forEach( info -> catalogIndexColumns.addAll( info.getCatalogIndexColumns() ) ); + List logicalIndexInfos = catalog.getSnapshot().rel().getIndexes( catalogTable.id, unique ); + logicalIndexInfos.forEach( info -> catalogIndexColumns.addAll( info.getCatalogIndexColumns() ) ); } StatementHandle statementHandle = createStatement( ch ); return createMetaResultSet( diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index 0bab4957ec..c128a55b45 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -34,14 +34,13 @@ import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.adapter.jdbc.stores.AbstractJdbcStore; -import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.IdBuilder; -import org.polypheny.db.catalog.entity.AllocationColumn; -import org.polypheny.db.catalog.entity.CatalogIndex; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.LogicalIndex; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plugins.PolyPluginManager; @@ -119,53 +118,55 @@ public Namespace getCurrentSchema() { @Override - public void addIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { - List ccps = context.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); - List partitionPlacements = new ArrayList<>(); - partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().alloc().getPartitionPlacement( getAdapterId(), id ) ) ); - - String physicalIndexName = getPhysicalIndexName( catalogIndex.key.tableId, catalogIndex.id ); - for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { - - StringBuilder builder = new StringBuilder(); - builder.append( "CREATE " ); - if ( catalogIndex.unique ) { - builder.append( "UNIQUE INDEX " ); - } else { - builder.append( "INDEX " ); - } + public String addIndex( Context context, LogicalIndex logicalIndex, AllocationTable allocation ) { + // List ccps = context.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( getAdapterId(), catalogIndex.key.tableId ); + // List partitionPlacements = new ArrayList<>(); + //partitionIds.forEach( id -> partitionPlacements.add( context.getSnapshot().alloc().getPartitionPlacement( getAdapterId(), id ) ) ); + + String physicalIndexName = getPhysicalIndexName( logicalIndex.key.tableId, logicalIndex.id ); + PhysicalTable physical = catalog.getSnapshot().physical().fromAlloc( allocation.id ).get( 0 ).unwrap( PhysicalTable.class ); + // for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { + + StringBuilder builder = new StringBuilder(); + builder.append( "CREATE " ); + if ( logicalIndex.unique ) { + builder.append( "UNIQUE INDEX " ); + } else { + builder.append( "INDEX " ); + } - builder.append( dialect.quoteIdentifier( physicalIndexName + "_" + partitionPlacement.partitionId ) ); - builder.append( " ON " ) - .append( dialect.quoteIdentifier( partitionPlacement.physicalSchemaName ) ) - .append( "." ) - .append( dialect.quoteIdentifier( partitionPlacement.physicalTableName ) ); - - builder.append( "(" ); - boolean first = true; - for ( long columnId : catalogIndex.key.columnIds ) { - if ( !first ) { - builder.append( ", " ); - } - first = false; - builder.append( dialect.quoteIdentifier( getPhysicalColumnName( columnId ) ) ).append( " " ); + builder.append( dialect.quoteIdentifier( physicalIndexName ) );//+ "_" + partitionPlacement.partitionId ) ); + builder.append( " ON " ) + .append( dialect.quoteIdentifier( physical.namespaceName ) ) + .append( "." ) + .append( dialect.quoteIdentifier( physical.name ) ); + + builder.append( "(" ); + boolean first = true; + for ( long columnId : logicalIndex.key.columnIds ) { + if ( !first ) { + builder.append( ", " ); } - builder.append( ")" ); - executeUpdate( builder, context ); + first = false; + builder.append( dialect.quoteIdentifier( getPhysicalColumnName( columnId ) ) ).append( " " ); } - Catalog.getInstance().getLogicalRel( catalogIndex.key.namespaceId ).setIndexPhysicalName( catalogIndex.id, physicalIndexName ); + builder.append( ")" ); + executeUpdate( builder, context ); + //} + return physicalIndexName; + // Catalog.getInstance().getLogicalRel( catalogIndex.key.namespaceId ).setIndexPhysicalName( catalogIndex.id, physicalIndexName ); } @Override - public void dropIndex( Context context, CatalogIndex catalogIndex, List partitionIds ) { + public void dropIndex( Context context, LogicalIndex logicalIndex, List partitionIds ) { List partitionPlacements = new ArrayList<>(); partitionIds.forEach( id -> partitionPlacements.add( catalog.getSnapshot().alloc().getPartitionPlacement( getAdapterId(), id ) ) ); for ( CatalogPartitionPlacement partitionPlacement : partitionPlacements ) { StringBuilder builder = new StringBuilder(); builder.append( "DROP INDEX " ); - builder.append( dialect.quoteIdentifier( catalogIndex.physicalName + "_" + partitionPlacement.partitionId ) ); + builder.append( dialect.quoteIdentifier( logicalIndex.physicalName + "_" + partitionPlacement.partitionId ) ); executeUpdate( builder, context ); } } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java index 5518adf299..80c0e8b00c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java @@ -17,6 +17,10 @@ package org.polypheny.db.catalog.allocation; import io.activej.serializer.BinarySerializer; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; import org.polypheny.db.catalog.Serializable; @@ -28,14 +32,24 @@ public class PolyAllocDocCatalog implements Serializable, AllocationDocumentCatalog { @Getter + @Serialize public final LogicalNamespace namespace; @Getter - public ConcurrentHashMap collections; + @Serialize + public final ConcurrentHashMap collections; public PolyAllocDocCatalog( LogicalNamespace namespace ) { + this( namespace, new HashMap<>() ); + } + + + public PolyAllocDocCatalog( + @Deserialize("namespace") LogicalNamespace namespace, + @Deserialize("collections") Map collections ) { this.namespace = namespace; + this.collections = new ConcurrentHashMap<>( collections ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index abf79810b6..63cdf360a0 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -36,19 +36,20 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDefaultValue; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalForeignKey; +import org.polypheny.db.catalog.entity.LogicalIndex; +import org.polypheny.db.catalog.entity.LogicalKey; +import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; @@ -88,17 +89,13 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog @Serialize @Getter - public Map indexes; + public Map indexes; @Serialize @Getter - public Map keys; + public Map keys; - @Serialize - @Getter - public Map keyColumns; - @Serialize @Getter public Map constraints; @@ -118,9 +115,8 @@ public RelationalCatalog( @Deserialize("logicalNamespace") LogicalNamespace logicalNamespace, @Deserialize("tables") Map tables, @Deserialize("columns") Map columns, - @Deserialize("indexes") Map indexes, - @Deserialize("keys") Map keys, - @Deserialize("keyColumns") Map keyColumns, + @Deserialize("indexes") Map indexes, + @Deserialize("keys") Map keys, @Deserialize("constraints") Map constraints, @Deserialize("nodes") Map nodes ) { this.logicalNamespace = logicalNamespace; @@ -129,14 +125,13 @@ public RelationalCatalog( this.columns = columns; this.indexes = indexes; this.keys = keys; - this.keyColumns = keyColumns; this.constraints = constraints; this.nodes = nodes; } public RelationalCatalog( LogicalNamespace namespace ) { - this( namespace, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); + this( namespace, new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>() ); } @@ -221,39 +216,40 @@ public void deleteTable( long tableId ) { public void setPrimaryKey( long tableId, Long keyId ) { tables.put( tableId, tables.get( tableId ).toBuilder().primaryKey( keyId ).build() ); - keys.put( keyId, new CatalogPrimaryKey( keys.get( keyId ) ) ); + keys.put( keyId, new LogicalPrimaryKey( keys.get( keyId ) ) ); } @Override - public long addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ) { + public LogicalIndex addIndex( long tableId, List columnIds, boolean unique, String method, String methodDisplayName, long adapterId, IndexType type, String indexName ) { long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY ); if ( unique ) { // TODO: Check if the current values are unique } long id = idBuilder.getNewIndexId(); + LogicalIndex index = new LogicalIndex( + id, + indexName, + unique, + method, + methodDisplayName, + type, + adapterId, + keyId, + Objects.requireNonNull( keys.get( keyId ) ), + null ); synchronized ( this ) { - indexes.put( id, new CatalogIndex( - id, - indexName, - unique, - method, - methodDisplayName, - type, - adapterId, - keyId, - Objects.requireNonNull( keys.get( keyId ) ), - null ) ); + indexes.put( id, index ); } listeners.firePropertyChange( "index", null, keyId ); - return id; + return index; } private long getOrAddKey( long tableId, List columnIds, EnforcementTime enforcementTime ) { - Long keyId = keyColumns.get( columnIds.stream().mapToLong( Long::longValue ).toArray() ); - if ( keyId != null ) { - return keyId; + LogicalKey key = Catalog.snapshot().rel().getKeys( columnIds.stream().mapToLong( Long::longValue ).toArray() ); + if ( key != null ) { + return key.id; } return addKey( tableId, columnIds, enforcementTime ); } @@ -262,10 +258,9 @@ private long getOrAddKey( long tableId, List columnIds, EnforcementTime en private long addKey( long tableId, List columnIds, EnforcementTime enforcementTime ) { LogicalTable table = Objects.requireNonNull( tables.get( tableId ) ); long id = idBuilder.getNewKeyId(); - CatalogKey key = new CatalogKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); + LogicalKey key = new LogicalKey( id, table.id, table.namespaceId, columnIds, enforcementTime ); synchronized ( this ) { keys.put( id, key ); - keyColumns.put( columnIds.stream().mapToLong( Long::longValue ).toArray(), id ); } listeners.firePropertyChange( "key", null, key ); return id; @@ -375,12 +370,12 @@ public void addPrimaryKey( long tableId, List columnIds ) { private boolean isForeignKey( long key ) { - return keys.values().stream().filter( k -> k instanceof CatalogForeignKey ).map( k -> (CatalogForeignKey) k ).anyMatch( k -> k.referencedKeyId == key ); + return keys.values().stream().filter( k -> k instanceof LogicalForeignKey ).map( k -> (LogicalForeignKey) k ).anyMatch( k -> k.referencedKeyId == key ); } private boolean isPrimaryKey( long key ) { - return keys.values().stream().filter( k -> k instanceof CatalogPrimaryKey ).map( k -> (CatalogPrimaryKey) k ).anyMatch( k -> k.id == key ); + return keys.values().stream().filter( k -> k instanceof LogicalPrimaryKey ).map( k -> (LogicalPrimaryKey) k ).anyMatch( k -> k.id == key ); } @@ -391,7 +386,7 @@ private void deleteKeyIfNoLongerUsed( Long keyId ) { if ( keyId == null ) { return; } - CatalogKey key = keys.get( keyId ); + LogicalKey key = keys.get( keyId ); LogicalTable table = tables.get( key.tableId ); if ( table.primaryKey != null && table.primaryKey.equals( keyId ) ) { return; @@ -399,7 +394,7 @@ private void deleteKeyIfNoLongerUsed( Long keyId ) { if ( constraints.values().stream().anyMatch( c -> c.keyId == keyId ) ) { return; } - if ( keys.values().stream().filter( k -> k instanceof CatalogForeignKey ).anyMatch( f -> f.id == keyId ) ) { + if ( keys.values().stream().filter( k -> k instanceof LogicalForeignKey ).anyMatch( f -> f.id == keyId ) ) { return; } if ( indexes.values().stream().anyMatch( i -> i.keyId == keyId ) ) { @@ -407,14 +402,13 @@ private void deleteKeyIfNoLongerUsed( Long keyId ) { } synchronized ( this ) { keys.remove( keyId ); - keyColumns.remove( key.columnIds.stream().mapToLong( Long::longValue ).toArray() ); } listeners.firePropertyChange( "key", key, null ); } private int getKeyUniqueCount( long keyId ) { - CatalogKey key = keys.get( keyId ); + LogicalKey key = keys.get( keyId ); int count = 0; if ( isPrimaryKey( keyId ) ) { count++; @@ -426,7 +420,7 @@ private int getKeyUniqueCount( long keyId ) { } } - for ( CatalogIndex index : indexes.values().stream().filter( i -> i.keyId == keyId ).collect( Collectors.toList() ) ) { + for ( LogicalIndex index : indexes.values().stream().filter( i -> i.keyId == keyId ).collect( Collectors.toList() ) ) { if ( index.unique ) { count++; } @@ -439,9 +433,9 @@ private int getKeyUniqueCount( long keyId ) { @Override public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) { LogicalTable table = tables.get( tableId ); - List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); + List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); - for ( CatalogKey refKey : childKeys ) { + for ( LogicalKey refKey : childKeys ) { if ( refKey.columnIds.size() == referencesIds.size() && refKey.columnIds.containsAll( referencesIds ) && new HashSet<>( referencesIds ).containsAll( refKey.columnIds ) ) { int i = 0; @@ -455,7 +449,7 @@ public void addForeignKey( long tableId, List columnIds, long referencesTa // TODO same keys for key and foreign key if ( getKeyUniqueCount( refKey.id ) > 0 ) { long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); - CatalogForeignKey key = new CatalogForeignKey( + LogicalForeignKey key = new LogicalForeignKey( keyId, constraintName, tableId, @@ -517,10 +511,10 @@ public void deletePrimaryKey( long tableId ) { @Override public void deleteForeignKey( long foreignKeyId ) { - CatalogForeignKey catalogForeignKey = (CatalogForeignKey) keys.get( foreignKeyId ); + LogicalForeignKey logicalForeignKey = (LogicalForeignKey) keys.get( foreignKeyId ); synchronized ( this ) { - keys.remove( catalogForeignKey.id ); - deleteKeyIfNoLongerUsed( catalogForeignKey.id ); + keys.remove( logicalForeignKey.id ); + deleteKeyIfNoLongerUsed( logicalForeignKey.id ); } } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java index 6a4c8c7727..107fbe030a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlDdl.java @@ -61,7 +61,7 @@ public Operator getOperator() { } - protected LogicalTable getCatalogTable( Context context, SqlIdentifier tableName ) { + protected LogicalTable getFromCatalog( Context context, SqlIdentifier tableName ) { long schemaId; String tableOldName; if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 56a290940f..ef6f2c1e1b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -232,7 +232,7 @@ public void execute( Context context, Statement statement, QueryParameters param if ( partitionType != null ) { DdlManager.getInstance().addPartitioning( PartitionInformation.fromNodeLists( - getCatalogTable( context, new SqlIdentifier( tableName, ParserPos.ZERO ) ), + getFromCatalog( context, new SqlIdentifier( tableName, ParserPos.ZERO ) ), partitionType.getSimple(), partitionColumn.getSimple(), partitionGroupNamesList.stream().map( n -> (Identifier) n ).collect( Collectors.toList() ), diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java index 5a8d7e7949..882ce9447e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropMaterializedView.java @@ -48,7 +48,7 @@ public void execute( Context context, Statement statement, QueryParameters param final LogicalTable catalogTable; try { - catalogTable = getCatalogTable( context, name ); + catalogTable = getFromCatalog( context, name ); } catch ( PolyphenyDbContextException e ) { if ( ifExists ) { // It is ok that there is no database / schema / table with this name because "IF EXISTS" was specified diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java index afb93fe758..2ecb0ac70f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropTable.java @@ -48,7 +48,7 @@ public class SqlDropTable extends SqlDropObject { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - final LogicalTable table = getCatalogTable( context, name ); + final LogicalTable table = getFromCatalog( context, name ); if ( table == null ) { if ( ifExists ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java index 93f0e8dc06..690b662b07 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlDropView.java @@ -52,7 +52,7 @@ public void execute( Context context, Statement statement, QueryParameters param final LogicalTable catalogTable; try { - catalogTable = getCatalogTable( context, name ); + catalogTable = getFromCatalog( context, name ); } catch ( PolyphenyDbContextException e ) { if ( ifExists ) { // It is ok that there is no database / schema / table with this name because "IF EXISTS" was specified diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlTruncate.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlTruncate.java index 08818d927a..e3f8da8ddc 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlTruncate.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlTruncate.java @@ -77,7 +77,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable table = getCatalogTable( context, name ); + LogicalTable table = getFromCatalog( context, name ); DdlManager.getInstance().truncate( table, statement ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java index e4e12f84fc..de14e233ec 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewAddIndex.java @@ -108,7 +108,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java index 71a55efb16..5fbceadefd 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewDropIndex.java @@ -71,7 +71,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java index c91eb99762..4c370d2f9a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewFreshnessManual.java @@ -17,8 +17,8 @@ package org.polypheny.db.sql.language.ddl.altermaterializedview; import java.util.List; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -66,7 +66,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, name ); + LogicalTable catalogTable = getFromCatalog( context, name ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java index f75c894bcc..608166713e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRename.java @@ -74,7 +74,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, oldName ); + LogicalTable catalogTable = getFromCatalog( context, oldName ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java index c7cfcb49c1..6c036c42d0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altermaterializedview/SqlAlterMaterializedViewRenameColumn.java @@ -75,7 +75,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, materializedView ); + LogicalTable catalogTable = getFromCatalog( context, materializedView ); if ( catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not Possible to use ALTER MATERIALIZED VIEW because " + catalogTable.name + " is not a Materialized View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java index b1be5a5bc3..ff3e20cfff 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java @@ -105,7 +105,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.SOURCE ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a source table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java index 249e04a3b3..80a243ed4d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddColumn.java @@ -116,7 +116,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new GenericRuntimeException( "Not possible to use ALTER TABLE because %s is not a table.", catalogTable.name ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java index e90b485125..9115acde21 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddForeignKey.java @@ -17,13 +17,10 @@ package org.polypheny.db.sql.language.ddl.altertable; -import static org.polypheny.db.util.Static.RESOURCE; - import java.util.List; import java.util.Objects; import java.util.stream.Collectors; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -36,7 +33,6 @@ import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -103,16 +99,8 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); - LogicalTable refTable = getCatalogTable( context, referencesTable ); - - // Make sure that this is a table of type TABLE (and not SOURCE) - if ( catalogTable.entityType != EntityType.ENTITY ) { - throw CoreUtil.newContextException( table.getPos(), RESOURCE.ddlOnSourceTable() ); - } - if ( refTable.entityType != EntityType.ENTITY ) { - throw CoreUtil.newContextException( referencesTable.getPos(), RESOURCE.ddlOnSourceTable() ); - } + LogicalTable catalogTable = getFromCatalog( context, table ); + LogicalTable refTable = getFromCatalog( context, referencesTable ); DdlManager.getInstance().addForeignKey( catalogTable, diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java index 123cb8b141..caa25aec23 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddIndex.java @@ -111,7 +111,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY && catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not possible to use ALTER TABLE ADD INDEX because " + catalogTable.name + " is not a table or materialized view." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java index 5ce3be656d..401e189296 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPartitions.java @@ -134,7 +134,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index 33168f2ed2..fe9eb81c61 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -113,7 +113,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); DataStore storeInstance = getDataStoreInstance( storeName ); if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java index 75a56c9d45..f353ef5121 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPrimaryKey.java @@ -78,7 +78,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java index 1551830f36..119d348091 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddUniqueConstraint.java @@ -85,7 +85,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java index d606a2d350..ba7dd60a8d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropColumn.java @@ -75,7 +75,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY && catalogTable.entityType != EntityType.SOURCE ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java index 8a79d0f69c..35490e3214 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropConstraint.java @@ -75,7 +75,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java index 21a77f2263..aa91b113f8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropForeignKey.java @@ -20,7 +20,6 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -76,11 +75,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); - - if ( catalogTable.entityType != EntityType.ENTITY ) { - throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); - } + LogicalTable catalogTable = getFromCatalog( context, table ); DdlManager.getInstance().dropForeignKey( catalogTable, foreignKeyName.getSimple() ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java index 68c0917d60..334461fc14 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropIndex.java @@ -75,7 +75,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY && catalogTable.entityType != EntityType.MATERIALIZED_VIEW ) { throw new RuntimeException( "Not possible to use ALTER TABLE DROP INDEX because " + catalogTable.name + " is not a table or materialized view." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java index 99a7defc7a..b1e4ae4d03 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPlacement.java @@ -78,7 +78,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); DataStore storeInstance = getDataStoreInstance( storeName ); if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java index 83a81fff8e..b0dfe9e2e9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableDropPrimaryKey.java @@ -20,6 +20,7 @@ import java.util.List; import java.util.Objects; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; @@ -73,10 +74,10 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { - throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); + throw new GenericRuntimeException( "Not possible to use ALTER TABLE because %s is not a table.", catalogTable.name ); } DdlManager.getInstance().dropPrimaryKey( catalogTable ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java index d55804e895..c4e7eae5da 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergePartitions.java @@ -77,7 +77,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java index d5a9b01e60..8411ae834d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyColumn.java @@ -138,7 +138,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, tableName ); + LogicalTable catalogTable = getFromCatalog( context, tableName ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java index 1dabe62c78..923c34a81e 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPartitions.java @@ -98,7 +98,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { Catalog catalog = Catalog.getInstance(); - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java index fb3ec7018d..6a152e2cb9 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacement.java @@ -110,7 +110,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java index 1b969f2fec..13b06e5a4d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementAddColumn.java @@ -87,7 +87,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); DataStore storeInstance = getDataStoreInstance( storeName ); if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java index e5b7a735a7..88ceb32d7d 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableModifyPlacementDropColumn.java @@ -83,7 +83,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); DataStore storeInstance = getDataStoreInstance( storeName ); if ( catalogTable.entityType != EntityType.ENTITY ) { diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java index 6f85e3d1dd..4cd5a65c7c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableOwner.java @@ -75,7 +75,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java index 90a65e944b..99d612df12 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRename.java @@ -74,7 +74,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable table = getCatalogTable( context, oldName ); + LogicalTable table = getFromCatalog( context, oldName ); if ( newName.names.size() != 1 ) { throw new RuntimeException( "No FQDN allowed here: " + newName ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java index 01e3dfd149..07792b74a0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableRenameColumn.java @@ -78,7 +78,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, table ); + LogicalTable catalogTable = getFromCatalog( context, table ); DdlManager.getInstance().renameColumn( catalogTable, columnOldName.getSimple(), columnNewName.getSimple(), statement ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java index 1788eadbd3..ee4e94c8e0 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRename.java @@ -77,7 +77,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, oldName ); + LogicalTable catalogTable = getFromCatalog( context, oldName ); if ( catalogTable.entityType != EntityType.VIEW ) { throw new GenericRuntimeException( "Not Possible to use ALTER VIEW because %s is not a View.", catalogTable.name ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java index 13b4f21b6a..28e242fd17 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterview/SqlAlterViewRenameColumn.java @@ -74,7 +74,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { @Override public void execute( Context context, Statement statement, QueryParameters parameters ) { - LogicalTable catalogTable = getCatalogTable( context, view ); + LogicalTable catalogTable = getFromCatalog( context, view ); if ( catalogTable.entityType != EntityType.VIEW ) { throw new RuntimeException( "Not Possible to use ALTER VIEW because " + catalogTable.name + " is not a View." ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java index 21154faed8..81052b7a7f 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java @@ -25,7 +25,7 @@ import lombok.Getter; import lombok.NonNull; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogKey; +import org.polypheny.db.catalog.entity.LogicalKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.sql.language.SqlWriter; @@ -61,9 +61,9 @@ public static String exportTableDefinitionAsJson( @NonNull LogicalTable catalogT } List primaryKeyColumnNames = null; if ( exportPrimaryKey ) { - for ( CatalogKey catalogKey : Catalog.getInstance().getSnapshot().rel().getTableKeys( catalogTable.id ) ) { - if ( catalogKey.id == catalogTable.primaryKey ) { - primaryKeyColumnNames = catalogKey.getColumnNames(); + for ( LogicalKey logicalKey : Catalog.getInstance().getSnapshot().rel().getTableKeys( catalogTable.id ) ) { + if ( logicalKey.id == catalogTable.primaryKey ) { + primaryKeyColumnNames = logicalKey.getColumnNames(); break; } } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java index ded8df2bb7..281f6fb25e 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java @@ -27,9 +27,9 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogDefaultValue; -import org.polypheny.db.catalog.entity.CatalogKey; -import org.polypheny.db.catalog.entity.CatalogKey.EnforcementTime; import org.polypheny.db.catalog.entity.CatalogUser; +import org.polypheny.db.catalog.entity.LogicalKey; +import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; @@ -78,8 +78,8 @@ public void exportTest() { new HashMap<>(); new HashMap<>(); Arrays.asList( - new CatalogKey( 23L, 4, 1, 1, Arrays.asList( 5L, 6L ), EnforcementTime.ON_COMMIT ), - new CatalogKey( 24L, 4, 1, 1, List.of( 6L ), EnforcementTime.ON_COMMIT ) + new LogicalKey( 23L, 4, 1, 1, Arrays.asList( 5L, 6L ), EnforcementTime.ON_COMMIT ), + new LogicalKey( 24L, 4, 1, 1, List.of( 6L ), EnforcementTime.ON_COMMIT ) ); String json = SchemaToJsonMapper.exportTableDefinitionAsJson( catalogTable, true, true ); Assert.assertEquals( json, mockJson ); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index de6e788e53..e49153cd5c 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -104,11 +104,11 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.CatalogForeignKey; -import org.polypheny.db.catalog.entity.CatalogIndex; -import org.polypheny.db.catalog.entity.CatalogPrimaryKey; +import org.polypheny.db.catalog.entity.LogicalForeignKey; +import org.polypheny.db.catalog.entity.LogicalIndex; import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; @@ -307,7 +307,7 @@ Result getTable( final UIRequest request ) { ArrayList cols = new ArrayList<>(); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); @@ -942,7 +942,7 @@ private String computeWherePK( final String tableName, final String columnName, } LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( catalogColumns.values().iterator().next().namespaceId, tableName ); - CatalogPrimaryKey pk = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey pk = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( long colId : pk.columnIds ) { String colName = catalog.getSnapshot().rel().getColumn( colId ).name; String condition; @@ -1116,7 +1116,7 @@ void getColumns( final Context ctx ) { LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( t[0], t[1] ); ArrayList primaryColumns; if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); primaryColumns = new ArrayList<>( primaryKey.getColumnNames() ); } else { primaryColumns = new ArrayList<>(); @@ -1182,7 +1182,7 @@ void getDataSourceColumns( final Context ctx ) { } long adapterId = allocs.get( 0 ).adapterId; - CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); List pkColumnNames = primaryKey.getColumnNames(); List columns = new ArrayList<>(); for ( AllocationColumn ccp : catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( adapterId, catalogTable.id ) ) { @@ -1586,7 +1586,7 @@ void getConstraints( final Context ctx ) { // get primary key if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey primaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : primaryKey.getColumnNames() ) { if ( !temp.containsKey( "" ) ) { temp.put( "", new ArrayList<>() ); @@ -1737,7 +1737,7 @@ void getIndexes( final Context ctx ) { Result result; LogicalTable catalogTable = getLogicalTable( request.schema, request.table ); - List catalogIndexes = catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ); + List logicalIndices = catalog.getSnapshot().rel().getIndexes( catalogTable.id, false ); DbColumn[] header = { new DbColumn( "Name" ), @@ -1749,20 +1749,20 @@ void getIndexes( final Context ctx ) { ArrayList data = new ArrayList<>(); // Get explicit indexes - for ( CatalogIndex catalogIndex : catalogIndexes ) { + for ( LogicalIndex logicalIndex : logicalIndices ) { String[] arr = new String[5]; String storeUniqueName; - if ( catalogIndex.location == 0 ) { + if ( logicalIndex.location == 0 ) { // a polystore index storeUniqueName = "Polypheny-DB"; } else { - storeUniqueName = catalog.getSnapshot().getAdapter( catalogIndex.location ).uniqueName; + storeUniqueName = catalog.getSnapshot().getAdapter( logicalIndex.location ).uniqueName; } - arr[0] = catalogIndex.name; - arr[1] = String.join( ", ", catalogIndex.key.getColumnNames() ); + arr[0] = logicalIndex.name; + arr[1] = String.join( ", ", logicalIndex.key.getColumnNames() ); arr[2] = storeUniqueName; - arr[3] = catalogIndex.methodDisplayName; - arr[4] = catalogIndex.type.name(); + arr[3] = logicalIndex.methodDisplayName; + arr[4] = logicalIndex.type.name(); data.add( arr ); } @@ -2487,19 +2487,19 @@ void getUml( final Context ctx ) { for ( LogicalTable catalogTable : catalogEntities ) { if ( catalogTable.entityType == EntityType.ENTITY || catalogTable.entityType == EntityType.SOURCE ) { // get foreign keys - List foreignKeys = catalog.getSnapshot().rel().getForeignKeys( catalogTable.id ); - for ( CatalogForeignKey catalogForeignKey : foreignKeys ) { - for ( int i = 0; i < catalogForeignKey.getReferencedKeyColumnNames().size(); i++ ) { + List foreignKeys = catalog.getSnapshot().rel().getForeignKeys( catalogTable.id ); + for ( LogicalForeignKey logicalForeignKey : foreignKeys ) { + for ( int i = 0; i < logicalForeignKey.getReferencedKeyColumnNames().size(); i++ ) { fKeys.add( ForeignKey.builder() - .targetSchema( catalogForeignKey.getReferencedKeySchemaName() ) - .targetTable( catalogForeignKey.getReferencedKeyTableName() ) - .targetColumn( catalogForeignKey.getReferencedKeyColumnNames().get( i ) ) - .sourceSchema( catalogForeignKey.getSchemaName() ) - .sourceTable( catalogForeignKey.getTableName() ) - .sourceColumn( catalogForeignKey.getColumnNames().get( i ) ) - .fkName( catalogForeignKey.name ) - .onUpdate( catalogForeignKey.updateRule.toString() ) - .onDelete( catalogForeignKey.deleteRule.toString() ) + .targetSchema( logicalForeignKey.getReferencedKeySchemaName() ) + .targetTable( logicalForeignKey.getReferencedKeyTableName() ) + .targetColumn( logicalForeignKey.getReferencedKeyColumnNames().get( i ) ) + .sourceSchema( logicalForeignKey.getSchemaName() ) + .sourceTable( logicalForeignKey.getTableName() ) + .sourceColumn( logicalForeignKey.getColumnNames().get( i ) ) + .fkName( logicalForeignKey.name ) + .onUpdate( logicalForeignKey.updateRule.toString() ) + .onDelete( logicalForeignKey.deleteRule.toString() ) .build() ); } } @@ -2513,7 +2513,7 @@ void getUml( final Context ctx ) { // get primary key with its columns if ( catalogTable.primaryKey != null ) { - CatalogPrimaryKey catalogPrimaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); + LogicalPrimaryKey catalogPrimaryKey = catalog.getSnapshot().rel().getPrimaryKey( catalogTable.primaryKey ); for ( String columnName : catalogPrimaryKey.getColumnNames() ) { table.addPrimaryKeyField( columnName ); } @@ -2534,13 +2534,13 @@ void getUml( final Context ctx ) { } // get unique indexes - List catalogIndexes = catalog.getSnapshot().rel().getIndexes( catalogTable.id, true ); - for ( CatalogIndex catalogIndex : catalogIndexes ) { + List logicalIndices = catalog.getSnapshot().rel().getIndexes( catalogTable.id, true ); + for ( LogicalIndex logicalIndex : logicalIndices ) { // TODO: unique indexes can be over multiple columns. - if ( catalogIndex.key.getColumnNames().size() == 1 && - catalogIndex.key.getSchemaName().equals( table.getSchema() ) && - catalogIndex.key.getTableName().equals( table.getTableName() ) ) { - table.addUniqueColumn( catalogIndex.key.getColumnNames().get( 0 ) ); + if ( logicalIndex.key.getColumnNames().size() == 1 && + logicalIndex.key.getSchemaName().equals( table.getSchema() ) && + logicalIndex.key.getTableName().equals( table.getTableName() ) ) { + table.addUniqueColumn( logicalIndex.key.getColumnNames().get( 0 ) ); } // table.addUnique( new ArrayList<>( catalogIndex.key.columnNames )); } From 92eebfe09b0d3ffe3e813a05f839e7d30a5d2c89 Mon Sep 17 00:00:00 2001 From: datomo Date: Mon, 17 Apr 2023 13:00:15 +0200 Subject: [PATCH 070/436] adjusting key logic in catalog --- .../common/LogicalConstraintEnforcer.java | 4 +- .../org/polypheny/db/catalog/Catalog.java | 3 + .../db/catalog/entity/AllocationColumn.java | 8 -- .../db/catalog/entity/CatalogObject.java | 2 +- .../db/catalog/entity/LogicalNamespace.java | 4 +- .../catalog/entity/logical/LogicalColumn.java | 13 +- .../catalog/snapshot/LogicalRelSnapshot.java | 2 + .../snapshot/impl/LogicalRelSnapshotImpl.java | 120 ++++++++++++------ .../org/polypheny/db/avatica/DbmsMeta.java | 4 +- .../avatica/PolyphenyDbConnectionHandle.java | 3 +- .../polypheny/db/cql/CqlLanguagePlugin.java | 3 +- .../db/catalog/logical/RelationalCatalog.java | 66 +++++----- .../polypheny/db/restapi/RequestColumn.java | 2 +- .../polypheny/db/restapi/RequestParser.java | 2 +- .../db/restapi/RestInterfacePlugin.java | 2 +- 15 files changed, 138 insertions(+), 100 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index d3ecdd99a8..c44267e7cd 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -129,7 +129,7 @@ private static EnforcementInformation getControl( AlgNode node, Statement statem List errorMessages = new ArrayList<>(); List> errorClasses = new ArrayList<>(); if ( (modify.isInsert() || modify.isMerge() || modify.isUpdate()) && RuntimeConfig.UNIQUE_CONSTRAINT_ENFORCEMENT.getBoolean() ) { - //builder.scan( table.getSchemaName(), table.name ); + //builder.scan( table.getNamespaceName(), table.name ); for ( CatalogConstraint constraint : constraints ) { builder.clear(); final AlgNode scan = LogicalRelScan.create( modify.getCluster(), modify.getEntity() ); @@ -256,7 +256,7 @@ public static EnforcementInformation getControl( LogicalTable table, Statement s List errorMessages = new ArrayList<>(); List> errorClasses = new ArrayList<>(); if ( RuntimeConfig.UNIQUE_CONSTRAINT_ENFORCEMENT.getBoolean() ) { - //builder.scan( table.getSchemaName(), table.name ); + //builder.scan( table.getNamespaceName(), table.name ); for ( CatalogConstraint constraint : constraints ) { builder.clear(); builder.scan( table );//LogicalTableScan.create( modify.getCluster(), modify.getTable() ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index ed10ee1474..c40fd7d123 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -41,6 +41,9 @@ public abstract class Catalog implements ExtensionPoint { + public static String DATABASE_NAME = "APP"; + public static String USER_NAME = "pa"; // change with user management + public static Adapter defaultStore; public static Adapter defaultSource; public static long defaultUserId = 0; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java index a871a9c1a4..8039a4eb47 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/AllocationColumn.java @@ -21,7 +21,6 @@ import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.NonNull; -import lombok.SneakyThrows; import lombok.Value; import lombok.With; import org.polypheny.db.algebra.type.AlgDataType; @@ -68,23 +67,16 @@ public AllocationColumn( - @SneakyThrows public String getLogicalTableName() { return Catalog.snapshot().rel().getTable( tableId ).name; } - @SneakyThrows public String getLogicalColumnName() { return Catalog.snapshot().rel().getColumn( columnId ).name; } - @SneakyThrows - public String getAdapterUniqueName() { - return Catalog.snapshot().getAdapter( adapterId ).uniqueName; - } - // Used for creating ResultSets @Override diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogObject.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogObject.java index e2ad386a26..e8ae65e7fd 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogObject.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogObject.java @@ -28,7 +28,7 @@ public interface CatalogObject extends Serializable { Serializable[] getParameterArray(); - static String getEnumNameOrNull( Enum theEnum ) { + static String getEnumNameOrNull( Enum theEnum ) { if ( theEnum == null ) { return null; } else { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java index 7c38ff66cd..18efdbc352 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java @@ -26,6 +26,7 @@ import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.With; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -65,7 +66,7 @@ public LogicalNamespace( // Used for creating ResultSets @Override public Serializable[] getParameterArray() { - return new Serializable[]{ name, CatalogObject.getEnumNameOrNull( namespaceType ) }; + return new Serializable[]{ name, Catalog.DATABASE_NAME, Catalog.USER_NAME, CatalogObject.getEnumNameOrNull( namespaceType ) }; } @@ -78,6 +79,7 @@ public int compareTo( LogicalNamespace o ) { return -1; } + @RequiredArgsConstructor public static class PrimitiveCatalogSchema { diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java index b02e3063f9..774f197bae 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java @@ -22,13 +22,12 @@ import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import lombok.SneakyThrows; import lombok.Value; import lombok.experimental.NonFinal; import lombok.experimental.SuperBuilder; -import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.logistic.Collation; @@ -146,22 +145,20 @@ public AlgDataType getAlgDataType( final AlgDataTypeFactory typeFactory ) { } - @SneakyThrows - public String getSchemaName() { - throw new NotImplementedException(); + public String getNamespaceName() { + return Catalog.snapshot().getNamespace( namespaceId ).name; } - @SneakyThrows public String getTableName() { - throw new NotImplementedException(); + return Catalog.snapshot().rel().getTable( tableId ).name; } @Override public Serializable[] getParameterArray() { return new Serializable[]{ - getSchemaName(), + getNamespaceName(), getTableName(), name, type.getJdbcOrdinal(), diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 89b7967f81..bb12befe80 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -286,4 +286,6 @@ public interface LogicalRelSnapshot { LogicalKey getKeys( long[] columnIds ); + LogicalKey getKey( long id ); + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index ce03940459..ee9e85c084 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -26,6 +26,7 @@ import java.util.TreeSet; import java.util.stream.Collectors; import lombok.Value; +import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; @@ -63,6 +64,7 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap keys; ImmutableMap> tableKeys; + ImmutableMap columnsKey; ImmutableMap index; @@ -82,29 +84,21 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap> tableForeignKeys; ImmutableMap nodes; ImmutableMap> connectedViews; - ImmutableMap columnsKey; public LogicalRelSnapshotImpl( Map catalogs ) { - namespaces = ImmutableMap.copyOf( catalogs.values().stream().map( LogicalRelationalCatalog::getLogicalNamespace ).collect( Collectors.toMap( n -> n.id, n -> n ) ) ); - namespaceNames = ImmutableMap.copyOf( namespaces.values().stream().collect( Collectors.toMap( n -> n.name, n -> n ) ) ); + this.namespaces = ImmutableMap.copyOf( catalogs.values().stream().map( LogicalRelationalCatalog::getLogicalNamespace ).collect( Collectors.toMap( n -> n.id, n -> n ) ) ); + this.namespaceNames = ImmutableMap.copyOf( namespaces.values().stream().collect( Collectors.toMap( n -> n.name, n -> n ) ) ); - tables = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getTables().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> Pair.of( e.getValue().namespaceId, getAdjustedName( e.getValue().namespaceId, e.getValue().name ) ), Entry::getValue ) ) ); + this.tables = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getTables().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); + this.tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> Pair.of( e.getValue().namespaceId, getAdjustedName( e.getValue().namespaceId, e.getValue().name ) ), Entry::getValue ) ) ); - columns = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getColumns().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespaces.get( e.getValue().namespaceId ).caseSensitive ? Pair.of( e.getValue().tableId, e.getValue().name ) : Pair.of( e.getValue().tableId, e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); + this.columns = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getColumns().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); + this.columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespaces.get( e.getValue().namespaceId ).caseSensitive ? Pair.of( e.getValue().tableId, e.getValue().name ) : Pair.of( e.getValue().tableId, e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); //// tables - Map> tableChildren = new HashMap<>(); - columns.forEach( ( k, v ) -> { - if ( !tableChildren.containsKey( v.tableId ) ) { - tableChildren.put( v.tableId, new TreeSet<>( Comparator.comparingInt( a -> a.position ) ) ); - } - tableChildren.get( v.tableId ).add( v ); - } ); - this.tableColumns = ImmutableMap.copyOf( tableChildren ); + this.tableColumns = buildTableColumns(); this.tableColumnIdColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( c.getValue().tableId, c.getValue().id ), Entry::getValue ) ) ); this.tableColumnNameColumn = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( c -> Pair.of( tables.get( c.getValue().tableId ).name, c.getValue().name ), Entry::getValue ) ) ); @@ -112,7 +106,7 @@ public LogicalRelSnapshotImpl( Map catalogs ) { //// KEYS - keys = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getKeys().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); + this.keys = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getKeys().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); this.tableKeys = buildTableKeys(); @@ -120,25 +114,11 @@ public LogicalRelSnapshotImpl( Map catalogs ) { this.index = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getIndexes().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - Map> keyToIndexes = new HashMap<>(); - this.index.forEach( ( k, v ) -> { - if ( !keyToIndexes.containsKey( v.keyId ) ) { - keyToIndexes.put( v.keyId, new ArrayList<>() ); - } - keyToIndexes.get( v.keyId ).add( v ); - } ); - this.keyToIndexes = ImmutableMap.copyOf( keyToIndexes ); + this.keyToIndexes = buildKeyToIndexes(); this.foreignKeys = ImmutableMap.copyOf( keys.entrySet().stream().filter( f -> f.getValue() instanceof LogicalForeignKey ).collect( Collectors.toMap( Entry::getKey, e -> (LogicalForeignKey) e.getValue() ) ) ); - HashMap> tableForeignKeys = new HashMap<>(); - foreignKeys.forEach( ( k, v ) -> { - if ( !tableForeignKeys.containsKey( v.tableId ) ) { - tableForeignKeys.put( v.tableId, new ArrayList<>() ); - } - tableForeignKeys.get( v.tableId ).add( v ); - } ); - this.tableForeignKeys = ImmutableMap.copyOf( tableForeignKeys ); + this.tableForeignKeys = buildTableForeignKeys(); this.primaryKeys = ImmutableMap.copyOf( keys.entrySet().stream().filter( f -> f.getValue() instanceof LogicalPrimaryKey ).collect( Collectors.toMap( Entry::getKey, e -> (LogicalPrimaryKey) e.getValue() ) ) ); @@ -146,27 +126,76 @@ public LogicalRelSnapshotImpl( Map catalogs ) { this.constraints = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getConstraints().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - HashMap> tableConstraints = new HashMap<>(); - constraints.forEach( ( k, v ) -> { - if ( !tableConstraints.containsKey( v.key.tableId ) ) { - tableConstraints.put( v.key.tableId, new ArrayList<>() ); - } - tableConstraints.get( v.key.tableId ).add( v ); - } ); - this.tableConstraints = ImmutableMap.copyOf( tableConstraints ); + this.tableConstraints = buildTableConstraints(); /// ALGNODES e.g. views and materializedViews this.nodes = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getNodes().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); - this.views = ImmutableMap.copyOf( tables + this.views = buildViews(); + + this.connectedViews = buildConnectedViews(); + + } + + + private ImmutableMap buildViews() { + return ImmutableMap.copyOf( tables .values() .stream() .filter( t -> t.unwrap( LogicalView.class ) != null ) .map( t -> t.unwrap( LogicalView.class ) ) .collect( Collectors.toMap( e -> e.id, e -> e ) ) ); + } + + + private ImmutableMap> buildTableColumns() { + Map> map = new HashMap<>(); + columns.forEach( ( k, v ) -> { + if ( !map.containsKey( v.tableId ) ) { + map.put( v.tableId, new TreeSet<>( Comparator.comparingInt( a -> a.position ) ) ); + } + map.get( v.tableId ).add( v ); + } ); + return ImmutableMap.copyOf( map ); + } + + + @NotNull + private ImmutableMap> buildKeyToIndexes() { + Map> map = new HashMap<>(); + this.index.forEach( ( k, v ) -> { + if ( !map.containsKey( v.keyId ) ) { + map.put( v.keyId, new ArrayList<>() ); + } + map.get( v.keyId ).add( v ); + } ); + return ImmutableMap.copyOf( map ); + } + + + @NotNull + private ImmutableMap> buildTableConstraints() { + Map> map = new HashMap<>(); + constraints.forEach( ( k, v ) -> { + if ( !map.containsKey( v.key.tableId ) ) { + map.put( v.key.tableId, new ArrayList<>() ); + } + map.get( v.key.tableId ).add( v ); + } ); + return ImmutableMap.copyOf( map ); + } - this.connectedViews = buildConnectedViews(); + @NotNull + private ImmutableMap> buildTableForeignKeys() { + Map> map = new HashMap<>(); + foreignKeys.forEach( ( k, v ) -> { + if ( !map.containsKey( v.tableId ) ) { + map.put( v.tableId, new ArrayList<>() ); + } + map.get( v.tableId ).add( v ); + } ); + return ImmutableMap.copyOf( map ); } @@ -467,4 +496,11 @@ public LogicalKey getKeys( long[] columnIds ) { return columnsKey.get( columnIds ); } + + @Override + public LogicalKey getKey( long id ) { + return keys.get( id ); + } + + } diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 1111abcf1e..5dc75d773a 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -1403,11 +1403,11 @@ public void openConnection( final ConnectionHandle ch, final Map String databaseName = connectionParameters.getOrDefault( "database", connectionParameters.get( "db" ) ); if ( databaseName == null || databaseName.isEmpty() ) { - databaseName = "APP"; + databaseName = Catalog.DATABASE_NAME; } String defaultSchemaName = connectionParameters.get( "schema" ); if ( defaultSchemaName == null || defaultSchemaName.isEmpty() ) { - defaultSchemaName = "public"; + defaultSchemaName = Catalog.defaultNamespaceName; } // Create transaction diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java index 1acbe0b9ba..8851c9e3b0 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java @@ -23,6 +23,7 @@ import org.apache.calcite.avatica.Meta; import org.apache.calcite.avatica.Meta.ConnectionHandle; import org.apache.calcite.avatica.Meta.ConnectionProperties; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.catalog.entity.LogicalNamespace; @@ -52,7 +53,7 @@ public class PolyphenyDbConnectionHandle { private final TransactionManager transactionManager; - private final ConnectionProperties connectionProperties = new ConnectionPropertiesImpl( true, false, java.sql.Connection.TRANSACTION_SERIALIZABLE, "APP", "public" ); + private final ConnectionProperties connectionProperties = new ConnectionPropertiesImpl( true, false, java.sql.Connection.TRANSACTION_SERIALIZABLE, Catalog.DATABASE_NAME, Catalog.defaultNamespaceName ); public PolyphenyDbConnectionHandle( final Meta.ConnectionHandle handle, final CatalogUser catalogUser, final ConnectionId connectionId, final CatalogDatabase database, final LogicalNamespace schema, final TransactionManager transactionManager ) { diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java index 35e78e81b9..c5526712eb 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java @@ -25,6 +25,7 @@ import org.polypheny.db.PolyImplementation; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.cql.parser.CqlParser; import org.polypheny.db.information.InformationManager; @@ -112,7 +113,7 @@ public static List processCqlRequest( if ( transaction.isAnalyze() ) { statement.getOverviewDuration().start( "Parsing" ); } - CqlParser cqlParser = new CqlParser( query, "APP" ); + CqlParser cqlParser = new CqlParser( query, Catalog.DATABASE_NAME ); CqlQuery cqlQuery = cqlParser.parse(); if ( transaction.isAnalyze() ) { statement.getOverviewDuration().start( "Parsing" ); diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 63cdf360a0..7d9faaf0a1 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -60,6 +60,7 @@ import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.IndexType; +import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.type.PolyType; @@ -91,6 +92,7 @@ public class RelationalCatalog implements Serializable, LogicalRelationalCatalog @Getter public Map indexes; + // while keys "belong" to a specific table, they can reference other namespaces, atm they are place here, might change later @Serialize @Getter public Map keys; @@ -408,9 +410,9 @@ private void deleteKeyIfNoLongerUsed( Long keyId ) { private int getKeyUniqueCount( long keyId ) { - LogicalKey key = keys.get( keyId ); + LogicalKey key = Catalog.snapshot().rel().getKey( keyId ); int count = 0; - if ( isPrimaryKey( keyId ) ) { + if ( Catalog.snapshot().rel().getPrimaryKey( keyId ) != null ) { count++; } @@ -433,40 +435,42 @@ private int getKeyUniqueCount( long keyId ) { @Override public void addForeignKey( long tableId, List columnIds, long referencesTableId, List referencesIds, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) { LogicalTable table = tables.get( tableId ); - List childKeys = keys.values().stream().filter( k -> k.tableId == referencesTableId ).collect( Collectors.toList() ); + Snapshot snapshot = Catalog.getInstance().getSnapshot(); + List childKeys = snapshot.rel().getTableKeys( referencesTableId ); for ( LogicalKey refKey : childKeys ) { - if ( refKey.columnIds.size() == referencesIds.size() && refKey.columnIds.containsAll( referencesIds ) && new HashSet<>( referencesIds ).containsAll( refKey.columnIds ) ) { - - int i = 0; - for ( long referencedColumnId : refKey.columnIds ) { - LogicalColumn referencingColumn = columns.get( columnIds.get( i++ ) ); - LogicalColumn referencedColumn = columns.get( referencedColumnId ); - if ( referencedColumn.type != referencingColumn.type ) { - throw new GenericRuntimeException( "The data type of the referenced columns does not match the data type of the referencing column: %s != %s", referencingColumn.type.name(), referencedColumn.type ); - } + if ( refKey.columnIds.size() != referencesIds.size() || !refKey.columnIds.containsAll( referencesIds ) || !new HashSet<>( referencesIds ).containsAll( refKey.columnIds ) ) { + continue; + } + int i = 0; + for ( long referencedColumnId : refKey.columnIds ) { + LogicalColumn referencingColumn = snapshot.rel().getColumn( columnIds.get( i++ ) ); + LogicalColumn referencedColumn = snapshot.rel().getColumn( referencedColumnId ); + if ( referencedColumn.type != referencingColumn.type ) { + throw new GenericRuntimeException( "The data type of the referenced columns does not match the data type of the referencing column: %s != %s", referencingColumn.type.name(), referencedColumn.type ); } - // TODO same keys for key and foreign key - if ( getKeyUniqueCount( refKey.id ) > 0 ) { - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); - LogicalForeignKey key = new LogicalForeignKey( - keyId, - constraintName, - tableId, - table.namespaceId, - refKey.id, - refKey.tableId, - refKey.namespaceId, - columnIds, - referencesIds, - onUpdate, - onDelete ); - synchronized ( this ) { - keys.put( keyId, key ); - } - return; + } + // TODO same keys for key and foreign key + if ( getKeyUniqueCount( refKey.id ) > 0 ) { + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); + LogicalForeignKey key = new LogicalForeignKey( + keyId, + constraintName, + tableId, + table.namespaceId, + refKey.id, + refKey.tableId, + refKey.namespaceId, + columnIds, + referencesIds, + onUpdate, + onDelete ); + synchronized ( this ) { + keys.put( keyId, key ); } + return; } + } } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestColumn.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestColumn.java index 1da84e6c19..f30a17082c 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestColumn.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestColumn.java @@ -37,7 +37,7 @@ public class RequestColumn { this.column = Objects.requireNonNull( column ); this.tableScanIndex = tableScanIndex; this.logicalIndex = logicalIndex; - this.fullyQualifiedName = column.getSchemaName() + "." + column.getTableName() + "." + column.name; + this.fullyQualifiedName = column.getNamespaceName() + "." + column.getTableName() + "." + column.name; if ( alias == null ) { this.alias = this.fullyQualifiedName; } else { diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 47c8a47710..33beeba99c 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -736,7 +736,7 @@ public Map generateNameMapping( List tables Map nameMapping = new HashMap<>(); for ( LogicalTable table : tables ) { for ( LogicalColumn column : snapshop.rel().getColumns( table.id ) ) { - nameMapping.put( column.getSchemaName() + "." + column.getTableName() + "." + column.name, column ); + nameMapping.put( column.getNamespaceName() + "." + column.getTableName() + "." + column.name, column ); } } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java index 5ad2ab7942..71d522f506 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java @@ -132,7 +132,7 @@ public static class HttpRestServer extends QueryInterface { public HttpRestServer( TransactionManager transactionManager, Authenticator authenticator, long ifaceId, String uniqueName, Map settings ) { super( transactionManager, authenticator, ifaceId, uniqueName, settings, true, false ); - this.requestParser = new RequestParser( transactionManager, authenticator, "pa", "APP" ); + this.requestParser = new RequestParser( transactionManager, authenticator, Catalog.USER_NAME, Catalog.DATABASE_NAME ); this.uniqueName = uniqueName; this.port = Integer.parseInt( settings.get( "port" ) ); if ( !Util.checkIfPortIsAvailable( port ) ) { From 257cae46eedea3c12fd0ba6cc8dbd45b610d5629 Mon Sep 17 00:00:00 2001 From: datomo Date: Mon, 17 Apr 2023 14:50:21 +0200 Subject: [PATCH 071/436] adjusted foreignkey for now --- .../db/catalog/entity/LogicalForeignKey.java | 9 +++-- .../db/catalog/entity/LogicalKey.java | 16 ++++---- .../db/catalog/logical/RelationalCatalog.java | 40 ++++++++++--------- 3 files changed, 34 insertions(+), 31 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java index 47b511e639..7b59ffd9d7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java @@ -26,7 +26,9 @@ import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import org.apache.commons.lang.NotImplementedException; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.snapshot.Snapshot; @EqualsAndHashCode(callSuper = true) @@ -80,13 +82,12 @@ public String getReferencedKeyTableName() { @SneakyThrows public List getReferencedKeyColumnNames() { - /*Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.snapshot(); List columnNames = new LinkedList<>(); for ( long columnId : referencedKeyColumnIds ) { - columnNames.add( catalog.getColumn( columnId ).name ); + columnNames.add( snapshot.rel().getColumn( columnId ).name ); } - return columnNames;*/ - throw new NotImplementedException(); + return columnNames; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java index bddfbac25d..65b1967a02 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java @@ -20,12 +20,14 @@ import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.io.Serializable; +import java.util.LinkedList; import java.util.List; import lombok.EqualsAndHashCode; import lombok.SneakyThrows; import lombok.Value; import lombok.experimental.NonFinal; -import org.apache.commons.lang.NotImplementedException; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.snapshot.Snapshot; @EqualsAndHashCode @@ -63,27 +65,25 @@ public LogicalKey( @SneakyThrows public String getSchemaName() { - // return Catalog.getInstance().getNamespace( namespaceId ).name; - throw new NotImplementedException(); + return Catalog.snapshot().getNamespace( namespaceId ).name; } @SneakyThrows public String getTableName() { // return Catalog.getInstance().getTable( tableId ).name; - throw new NotImplementedException(); + return Catalog.snapshot().rel().getTable( tableId ).name; } @SneakyThrows public List getColumnNames() { - /*Catalog catalog = Catalog.getInstance(); + Snapshot snapshot = Catalog.snapshot(); List columnNames = new LinkedList<>(); for ( long columnId : columnIds ) { - columnNames.add( catalog.getColumn( columnId ).name ); + columnNames.add( snapshot.rel().getColumn( columnId ).name ); } - return columnNames;*/ - throw new NotImplementedException(); + return columnNames; } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index 7d9faaf0a1..f8f977bdde 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -451,25 +451,27 @@ public void addForeignKey( long tableId, List columnIds, long referencesTa } } // TODO same keys for key and foreign key - if ( getKeyUniqueCount( refKey.id ) > 0 ) { - long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); - LogicalForeignKey key = new LogicalForeignKey( - keyId, - constraintName, - tableId, - table.namespaceId, - refKey.id, - refKey.tableId, - refKey.namespaceId, - columnIds, - referencesIds, - onUpdate, - onDelete ); - synchronized ( this ) { - keys.put( keyId, key ); - } - return; + /*if ( getKeyUniqueCount( refKey.id ) > 0 ) { + continue; + }*/ + long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_COMMIT ); + LogicalForeignKey key = new LogicalForeignKey( + keyId, + constraintName, + tableId, + table.namespaceId, + refKey.id, + refKey.tableId, + refKey.namespaceId, + columnIds, + referencesIds, + onUpdate, + onDelete ); + synchronized ( this ) { + keys.put( keyId, key ); } + return; + } @@ -517,7 +519,7 @@ public void deletePrimaryKey( long tableId ) { public void deleteForeignKey( long foreignKeyId ) { LogicalForeignKey logicalForeignKey = (LogicalForeignKey) keys.get( foreignKeyId ); synchronized ( this ) { - keys.remove( logicalForeignKey.id ); + //keys.remove( logicalForeignKey.id ); deleteKeyIfNoLongerUsed( logicalForeignKey.id ); } } From 6e1d9c35b4eb6705e1cb06b007a505130765c50c Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 18 Apr 2023 01:20:51 +0200 Subject: [PATCH 072/436] adjusting default insertion of configuration --- .../org/polypheny/db/adapter/DataStore.java | 2 +- .../db/adapter/index/CoWHashIndex.java | 2 +- .../db/adapter/index/CowMultiHashIndex.java | 2 +- .../org/polypheny/db/adapter/index/Index.java | 2 +- .../db/adapter/index/IndexManager.java | 8 +- .../core/common/ConditionalExecute.java | 2 +- .../common/LogicalConstraintEnforcer.java | 6 +- .../relational/LogicalRelViewScan.java | 2 +- .../catalog/catalogs/AllocationCatalog.java | 2 +- .../db/catalog/catalogs/LogicalCatalog.java | 2 +- .../catalogs/LogicalRelationalCatalog.java | 10 +- .../db/catalog/entity/CatalogConstraint.java | 1 + .../db/catalog/entity/CatalogNamespace.java | 35 --- .../catalog/entity/logical/LogicalColumn.java | 1 + .../{ => logical}/LogicalForeignKey.java | 21 +- .../entity/{ => logical}/LogicalIndex.java | 7 +- .../entity/{ => logical}/LogicalKey.java | 14 +- .../LogicalMaterializedView.java | 3 +- .../{ => logical}/LogicalNamespace.java | 9 +- .../{ => logical}/LogicalPrimaryKey.java | 12 +- .../entity/{ => logical}/LogicalView.java | 3 +- .../catalog/snapshot/LogicalRelSnapshot.java | 10 +- .../db/catalog/snapshot/Snapshot.java | 2 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 14 +- .../snapshot/impl/SnapshotBuilder.java | 2 +- .../catalog/snapshot/impl/SnapshotImpl.java | 2 +- .../processing/LogicalAlgAnalyzeShuttle.java | 5 +- .../db/schema/PolyphenyDbSchema.java | 4 - .../polypheny/db/transaction/Transaction.java | 2 +- .../db/transaction/TransactionManager.java | 2 +- .../db/view/MaterializedViewManager.java | 2 +- .../org/polypheny/db/view/ViewManager.java | 2 +- .../org/polypheny/db/catalog/MockCatalog.java | 2 +- .../java/org/polypheny/db/PolyphenyDb.java | 9 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 42 +-- .../org/polypheny/db/ddl/DefaultInserter.java | 264 ++++++++++++++++++ .../db/processing/AbstractQueryProcessor.java | 2 +- .../processing/ConstraintEnforceAttacher.java | 6 +- .../db/processing/DataMigratorImpl.java | 2 +- .../db/routing/routers/BaseRouter.java | 7 +- .../db/transaction/TransactionImpl.java | 4 +- .../transaction/TransactionManagerImpl.java | 2 +- .../db/view/MaterializedViewManagerImpl.java | 4 +- .../java/org/polypheny/db/cypher/DdlTest.java | 3 +- .../org/polypheny/db/jdbc/JdbcMetaTest.java | 8 +- .../java/org/polypheny/db/mql/DdlTest.java | 2 +- .../statistics/StatisticQueryProcessor.java | 2 +- .../org/polypheny/db/avatica/DbmsMeta.java | 31 +- .../avatica/PolyphenyDbConnectionHandle.java | 2 +- .../org/polypheny/db/cql/ColumnIndex.java | 2 +- .../java/org/polypheny/db/cql/TableIndex.java | 1 - .../db/adapter/csv/CsvEnumerator.java | 23 +- .../admin/CypherAlterDatabaseAlias.java | 2 +- .../admin/CypherCreateDatabaseAlias.java | 2 +- .../db/cypher/admin/CypherDropAlias.java | 2 +- .../db/cypher/admin/CypherDropDatabase.java | 2 +- .../db/cypher/ddl/CypherDropPlacement.java | 2 +- .../db/hsqldb/stores/HsqldbStore.java | 2 +- .../polypheny/db/languages/mql/MqlDrop.java | 2 +- .../languages/mql2alg/MqlToAlgConverter.java | 2 +- .../db/mql/mql2alg/MqlMockCatalog.java | 2 +- .../org/polypheny/db/catalog/PolyCatalog.java | 173 +----------- .../allocation/PolyAllocDocCatalog.java | 2 +- .../allocation/PolyAllocGraphCatalog.java | 3 +- .../allocation/PolyAllocRelCatalog.java | 2 +- .../db/catalog/logical/DocumentCatalog.java | 2 +- .../db/catalog/logical/GraphCatalog.java | 2 +- .../db/catalog/logical/RelationalCatalog.java | 16 +- .../polypheny/db/restapi/RequestParser.java | 2 +- .../polypheny/db/sql/language/SqlUtil.java | 1 - .../language/validate/SqlValidatorUtil.java | 16 +- .../db/sql/language/validate/WithScope.java | 2 +- .../db/sql/web/SchemaToJsonMapper.java | 2 +- .../db/sql/map/NamespaceToJsonMapperTest.java | 6 +- .../java/org/polypheny/db/webui/Crud.java | 12 +- .../org/polypheny/db/webui/WebSocket.java | 2 +- .../polypheny/db/webui/crud/LanguageCrud.java | 2 +- .../models/requests/BatchUpdateRequest.java | 1 - 78 files changed, 467 insertions(+), 408 deletions(-) delete mode 100644 core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java rename core/src/main/java/org/polypheny/db/catalog/entity/{ => logical}/LogicalForeignKey.java (89%) rename core/src/main/java/org/polypheny/db/catalog/entity/{ => logical}/LogicalIndex.java (95%) rename core/src/main/java/org/polypheny/db/catalog/entity/{ => logical}/LogicalKey.java (90%) rename core/src/main/java/org/polypheny/db/catalog/entity/{ => logical}/LogicalMaterializedView.java (96%) rename core/src/main/java/org/polypheny/db/catalog/entity/{ => logical}/LogicalNamespace.java (89%) rename core/src/main/java/org/polypheny/db/catalog/entity/{ => logical}/LogicalPrimaryKey.java (85%) rename core/src/main/java/org/polypheny/db/catalog/entity/{ => logical}/LogicalView.java (97%) create mode 100644 dbms/src/main/java/org/polypheny/db/ddl/DefaultInserter.java diff --git a/core/src/main/java/org/polypheny/db/adapter/DataStore.java b/core/src/main/java/org/polypheny/db/adapter/DataStore.java index 4f893055bc..9930e80a48 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataStore.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataStore.java @@ -31,11 +31,11 @@ import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; -import org.polypheny.db.catalog.entity.LogicalIndex; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.logistic.NamespaceType; diff --git a/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java b/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java index 2f56b07690..1a584b8ded 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/CoWHashIndex.java @@ -29,7 +29,7 @@ import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.exceptions.ConstraintViolationException; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; diff --git a/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java b/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java index 969dee07d1..c8182d213a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/CowMultiHashIndex.java @@ -30,7 +30,7 @@ import org.apache.commons.lang3.tuple.Triple; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; diff --git a/core/src/main/java/org/polypheny/db/adapter/index/Index.java b/core/src/main/java/org/polypheny/db/adapter/index/Index.java index 6abd615d45..fc115a46e3 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/Index.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/Index.java @@ -28,7 +28,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.processing.QueryProcessor; import org.polypheny.db.rex.RexBuilder; diff --git a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java index 266c0b2bca..e8a38fd333 100644 --- a/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/index/IndexManager.java @@ -30,10 +30,10 @@ import org.polypheny.db.adapter.DataStore.AvailableIndexMethod; import org.polypheny.db.adapter.index.Index.IndexFactory; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalIndex; -import org.polypheny.db.catalog.entity.LogicalKey; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; +import org.polypheny.db.catalog.entity.logical.LogicalKey; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.information.InformationAction; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java b/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java index 9f4a7d9e09..cd8b76e834 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/ConditionalExecute.java @@ -25,7 +25,7 @@ import org.polypheny.db.algebra.AlgWriter; import org.polypheny.db.algebra.BiAlg; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java index c44267e7cd..b5716fcdcb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalConstraintEnforcer.java @@ -39,9 +39,9 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.LogicalForeignKey; -import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; +import org.polypheny.db.catalog.entity.logical.LogicalKey.EnforcementTime; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java index de966d3213..8d37dd6c9f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java @@ -26,7 +26,7 @@ import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalView; +import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java index 3c78906003..910b8ffd0f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationCatalog.java @@ -16,7 +16,7 @@ package org.polypheny.db.catalog.catalogs; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; public interface AllocationCatalog { diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java index 682f90664f..51c5928e42 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalCatalog.java @@ -16,7 +16,7 @@ package org.polypheny.db.catalog.catalogs; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; public interface LogicalCatalog { diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java index 3416b121fb..6aefe38f7f 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/LogicalRelationalCatalog.java @@ -22,14 +22,14 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.LogicalIndex; -import org.polypheny.db.catalog.entity.LogicalKey; -import org.polypheny.db.catalog.entity.LogicalMaterializedView; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; +import org.polypheny.db.catalog.entity.logical.LogicalKey; +import org.polypheny.db.catalog.entity.logical.LogicalMaterializedView; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java index 5c1a103fec..72b3e2e57a 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogConstraint.java @@ -23,6 +23,7 @@ import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.Value; +import org.polypheny.db.catalog.entity.logical.LogicalKey; import org.polypheny.db.catalog.logistic.ConstraintType; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java deleted file mode 100644 index f82532991f..0000000000 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogNamespace.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.catalog.entity; - -import java.io.Serializable; -import org.polypheny.db.catalog.logistic.NamespaceType; - -public abstract class CatalogNamespace implements CatalogObject, Serializable { - - public final long id; - public final NamespaceType namespaceType; - public final String name; - - - public CatalogNamespace( long id, String name, NamespaceType type ) { - this.id = id; - this.namespaceType = type; - this.name = name; - } - -} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java index 774f197bae..c5e5f7bd15 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalColumn.java @@ -158,6 +158,7 @@ public String getTableName() { @Override public Serializable[] getParameterArray() { return new Serializable[]{ + Catalog.DATABASE_NAME, getNamespaceName(), getTableName(), name, diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalForeignKey.java similarity index 89% rename from core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalForeignKey.java index 7b59ffd9d7..c7767549ad 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalForeignKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalForeignKey.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import com.google.common.collect.ImmutableList; @@ -25,8 +25,8 @@ import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; -import org.apache.commons.lang.NotImplementedException; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.snapshot.Snapshot; @@ -68,15 +68,13 @@ public LogicalForeignKey( @SneakyThrows public String getReferencedKeySchemaName() { - // return Catalog.getInstance().getNamespace( referencedKeySchemaId ).name; - throw new NotImplementedException(); + return Catalog.snapshot().getNamespace( referencedKeySchemaId ).name; } @SneakyThrows public String getReferencedKeyTableName() { - // return Catalog.getInstance().getLogicalRel( referencedKeySchemaId ).getTable( referencedKeyTableId ).name; - throw new NotImplementedException(); + return Catalog.snapshot().rel().getTable( referencedKeyTableId ).name; } @@ -106,9 +104,11 @@ public List getCatalogForeignKeyColumns() { public Serializable[] getParameterArray( String referencedKeyColumnName, String foreignKeyColumnName, int keySeq ) { return new Serializable[]{ + Catalog.DATABASE_NAME, getReferencedKeySchemaName(), getReferencedKeyTableName(), referencedKeyColumnName, + Catalog.DATABASE_NAME, getSchemaName(), getTableName(), foreignKeyColumnName, @@ -138,11 +138,10 @@ public static class CatalogForeignKeyColumn implements CatalogObject { @SneakyThrows @Override public Serializable[] getParameterArray() { - /*return Catalog.getInstance() - .getLogicalRel( ) + return Catalog.snapshot() + .rel() .getForeignKey( tableId, foreignKeyName ) - .getParameterArray( referencedKeyColumnName, foreignKeyColumnName, keySeq );*/ - throw new NotImplementedException(); + .getParameterArray( referencedKeyColumnName, foreignKeyColumnName, keySeq ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalIndex.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalIndex.java similarity index 95% rename from core/src/main/java/org/polypheny/db/catalog/entity/LogicalIndex.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalIndex.java index f16b56d35d..d342f05768 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalIndex.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalIndex.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import io.activej.serializer.annotations.Deserialize; @@ -27,6 +27,8 @@ import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.experimental.SuperBuilder; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.logistic.IndexType; @@ -96,6 +98,7 @@ public List getCatalogIndexColumns() { public Serializable[] getParameterArray( int ordinalPosition, String columnName ) { return new Serializable[]{ + Catalog.DATABASE_NAME, key.getSchemaName(), key.getTableName(), !unique, diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalKey.java similarity index 90% rename from core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalKey.java index 65b1967a02..fdb7b0c458 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalKey.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import com.google.common.collect.ImmutableList; import io.activej.serializer.annotations.Deserialize; @@ -26,7 +26,9 @@ import lombok.SneakyThrows; import lombok.Value; import lombok.experimental.NonFinal; +import org.jetbrains.annotations.NotNull; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.snapshot.Snapshot; @@ -71,7 +73,6 @@ public String getSchemaName() { @SneakyThrows public String getTableName() { - // return Catalog.getInstance().getTable( tableId ).name; return Catalog.snapshot().rel().getTable( tableId ).name; } @@ -94,11 +95,8 @@ public Serializable[] getParameterArray() { @Override - public int compareTo( LogicalKey o ) { - if ( o != null ) { - return (int) (this.id - o.id); - } - return -1; + public int compareTo( @NotNull LogicalKey o ) { + return (int) (this.id - o.id); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalMaterializedView.java similarity index 96% rename from core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalMaterializedView.java index 687647bada..1dd188f56c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalMaterializedView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalMaterializedView.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import com.google.common.collect.ImmutableList; import io.activej.serializer.annotations.Deserialize; @@ -29,6 +29,7 @@ import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.languages.QueryLanguage; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalNamespace.java similarity index 89% rename from core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalNamespace.java index 18efdbc352..3eeac26231 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalNamespace.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalNamespace.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import io.activej.serializer.annotations.Deserialize; @@ -27,13 +27,14 @@ import lombok.Value; import lombok.With; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = false) @With @Value -public class LogicalNamespace extends CatalogNamespace implements CatalogObject, Comparable { +public class LogicalNamespace implements CatalogObject, Comparable { private static final long serialVersionUID = 3090632164988970558L; @@ -55,7 +56,6 @@ public LogicalNamespace( @Deserialize("name") @NonNull final String name, @Deserialize("namespaceType") @NonNull final NamespaceType namespaceType, @Deserialize("caseSensitive") boolean caseSensitive ) { - super( id, name, namespaceType ); this.id = id; this.name = name; this.namespaceType = namespaceType; @@ -85,6 +85,7 @@ public static class PrimitiveCatalogSchema { public final String tableSchem; public final String tableCatalog; + public final String owner; public final String schemaType; } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalPrimaryKey.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalPrimaryKey.java similarity index 85% rename from core/src/main/java/org/polypheny/db/catalog/entity/LogicalPrimaryKey.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalPrimaryKey.java index 0a38e75497..e6f4487772 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalPrimaryKey.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalPrimaryKey.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The Polypheny Project + * Copyright 2019-2023 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import io.activej.serializer.annotations.Deserialize; @@ -25,7 +25,8 @@ import lombok.EqualsAndHashCode; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import org.apache.commons.lang.NotImplementedException; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogObject; @EqualsAndHashCode(callSuper = true) @@ -59,7 +60,7 @@ public List getCatalogPrimaryKeyColumns() { public Serializable[] getParameterArray( String columnName, int keySeq ) { - return new Serializable[]{ getSchemaName(), getTableName(), columnName, keySeq, null }; + return new Serializable[]{ Catalog.DATABASE_NAME, getSchemaName(), getTableName(), columnName, keySeq, null }; } @@ -77,8 +78,7 @@ public static class CatalogPrimaryKeyColumn implements CatalogObject { @Override public Serializable[] getParameterArray() { - throw new NotImplementedException(); - //return Catalog.getInstance().getPrimaryKey( pkId ).getParameterArray( columnName, keySeq ); + return Catalog.snapshot().rel().getPrimaryKey( pkId ).getParameterArray( columnName, keySeq ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalView.java similarity index 97% rename from core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java rename to core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalView.java index e12a8d1e81..3263443466 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/LogicalView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalView.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.polypheny.db.catalog.entity; +package org.polypheny.db.catalog.entity.logical; import com.google.common.collect.ImmutableMap; @@ -33,7 +33,6 @@ import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.plan.AlgOptCluster; diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index bb12befe80..3e6bbfbffa 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -21,13 +21,13 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.LogicalForeignKey; -import org.polypheny.db.catalog.entity.LogicalIndex; -import org.polypheny.db.catalog.entity.LogicalKey; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; -import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; +import org.polypheny.db.catalog.entity.logical.LogicalKey; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.catalog.logistic.Pattern; public interface LogicalRelSnapshot { diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java index 7efcec4377..81ff62c6a8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/Snapshot.java @@ -29,8 +29,8 @@ import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.config.RuntimeConfig; diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index ee9e85c084..ca09e321ff 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -31,14 +31,14 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.LogicalForeignKey; -import org.polypheny.db.catalog.entity.LogicalIndex; -import org.polypheny.db.catalog.entity.LogicalKey; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; -import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; +import org.polypheny.db.catalog.entity.logical.LogicalKey; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.util.Pair; @@ -428,7 +428,7 @@ public List getForeignKeys( LogicalKey key ) { @Override public List getIndexes( long tableId, boolean onlyUnique ) { - return tableKeys.get( tableId ).stream().flatMap( k -> getIndexes( k ).stream() ).collect( Collectors.toList() ); + return index.values().stream().filter( i -> i.key.tableId == tableId && (!onlyUnique || i.unique) ).collect( Collectors.toList() ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java index 2e9bb66d03..cb5d0d5df5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotBuilder.java @@ -26,7 +26,7 @@ import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.catalogs.PhysicalCatalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.AllocSnapshot; import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index 5d693528e5..594f784d52 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -30,8 +30,8 @@ import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalEntity; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.AllocSnapshot; diff --git a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java index 639103fdd4..923dbc0d6c 100644 --- a/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java +++ b/core/src/main/java/org/polypheny/db/processing/LogicalAlgAnalyzeShuttle.java @@ -26,7 +26,7 @@ import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; -import org.apache.commons.lang3.NotImplementedException; +import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.relational.RelScan; @@ -70,6 +70,7 @@ /** * Universal routing alg shuttle class to extract partition and column information from AlgNode. */ +@Slf4j public class LogicalAlgAnalyzeShuttle extends AlgShuttleImpl { protected final LogicalAlgAnalyzeRexShuttle rexShuttle; @@ -422,7 +423,7 @@ private void getPartitioningInfo( LogicalFilter filter ) { private void handleIfPartitioned( AlgNode node, LogicalTable catalogTable ) { // Only if table is partitioned - throw new NotImplementedException(); + log.warn( "todo" ); /*if ( Catalog.getInstance().getSnapshot().alloc().isPartitioned( catalogTable.id ) ) { WhereClauseVisitor whereClauseVisitor = new WhereClauseVisitor( statement, diff --git a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java index 019caccfa2..2f083c768b 100644 --- a/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java +++ b/core/src/main/java/org/polypheny/db/schema/PolyphenyDbSchema.java @@ -17,10 +17,7 @@ package org.polypheny.db.schema; import java.util.List; -import java.util.stream.Collectors; import org.apache.commons.lang.NotImplementedException; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.allocation.AllocationTable; @@ -30,7 +27,6 @@ import org.polypheny.db.catalog.entity.physical.PhysicalCollection; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; import org.polypheny.db.catalog.entity.physical.PhysicalTable; -import org.polypheny.db.catalog.logistic.Pattern; public interface PolyphenyDbSchema { diff --git a/core/src/main/java/org/polypheny/db/transaction/Transaction.java b/core/src/main/java/org/polypheny/db/transaction/Transaction.java index 65b51cb8a7..916900c25c 100644 --- a/core/src/main/java/org/polypheny/db/transaction/Transaction.java +++ b/core/src/main/java/org/polypheny/db/transaction/Transaction.java @@ -22,7 +22,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.information.InformationManager; diff --git a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java index 3756d268ae..8a0aa6b4e3 100644 --- a/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java +++ b/core/src/main/java/org/polypheny/db/transaction/TransactionManager.java @@ -18,7 +18,7 @@ import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.transaction.Transaction.MultimediaFlavor; diff --git a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java index fce0ad185b..e6850c3a42 100644 --- a/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/MaterializedViewManager.java @@ -25,9 +25,9 @@ import org.polypheny.db.algebra.AlgShuttleImpl; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.logical.relational.LogicalRelModify; -import org.polypheny.db.catalog.entity.LogicalMaterializedView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalMaterializedView; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.transaction.Transaction; diff --git a/core/src/main/java/org/polypheny/db/view/ViewManager.java b/core/src/main/java/org/polypheny/db/view/ViewManager.java index bbd39eb3aa..991bc54617 100644 --- a/core/src/main/java/org/polypheny/db/view/ViewManager.java +++ b/core/src/main/java/org/polypheny/db/view/ViewManager.java @@ -46,7 +46,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalUnion; import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.LogicalMaterializedView; +import org.polypheny.db.catalog.entity.logical.LogicalMaterializedView; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.rex.RexBuilder; diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index b8522bd3c1..2c61b215fa 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -33,7 +33,7 @@ import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.Snapshot; diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index e79e181dac..a8a3dfc9ff 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -40,6 +40,7 @@ import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManagerImpl; +import org.polypheny.db.ddl.DefaultInserter; import org.polypheny.db.docker.DockerManager; import org.polypheny.db.gui.GuiUtils; import org.polypheny.db.gui.SplashHelper; @@ -360,15 +361,12 @@ public void join( final long millis ) throws InterruptedException { } // Initialize DDL Manager - DdlManager.setAndGetInstance( new DdlManagerImpl( catalog ) ); + DdlManager.setAndGetInstance( new DdlManagerImpl( catalog, transactionManager ) ); // Initialize PartitionMangerFactory PartitionManagerFactory.setAndGetInstance( new PartitionManagerFactoryImpl() ); FrequencyMap.setAndGetInstance( new FrequencyMapImpl( catalog ) ); - // Initialize statistic settings - StatisticsManager.getInstance().initializeStatisticSettings(); - // Start Polypheny-UI final HttpServer httpServer = new HttpServer( transactionManager, authenticator ); Thread polyphenyUiThread = new Thread( httpServer ); @@ -416,6 +414,9 @@ public void join( final long millis ) throws InterruptedException { } PolyPluginManager.startUp( transactionManager, authenticator ); + new DefaultInserter( DdlManager.getInstance(), transactionManager ); + // Initialize statistic settings + StatisticsManager.getInstance().initializeStatisticSettings(); // Add tracker, which rechecks constraints after enabling ConstraintTracker tracker = new ConstraintTracker( transactionManager ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 4581e446f9..e68bdbedbd 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -59,21 +59,21 @@ import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionGroup; -import org.polypheny.db.catalog.entity.LogicalForeignKey; -import org.polypheny.db.catalog.entity.LogicalIndex; -import org.polypheny.db.catalog.entity.LogicalKey; -import org.polypheny.db.catalog.entity.LogicalMaterializedView; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; -import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; +import org.polypheny.db.catalog.entity.logical.LogicalKey; +import org.polypheny.db.catalog.entity.logical.LogicalMaterializedView; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.Collation; @@ -106,6 +106,7 @@ import org.polypheny.db.runtime.PolyphenyDbException; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.TransactionException; +import org.polypheny.db.transaction.TransactionManager; import org.polypheny.db.type.ArrayType; import org.polypheny.db.type.PolyType; import org.polypheny.db.view.MaterializedViewManager; @@ -117,7 +118,7 @@ public class DdlManagerImpl extends DdlManager { private final Catalog catalog; - public DdlManagerImpl( Catalog catalog ) { + public DdlManagerImpl( Catalog catalog, TransactionManager manager ) { this.catalog = catalog; } @@ -205,7 +206,6 @@ public void addAdapter( String uniqueName, String adapterName, AdapterType adapt private void handleSource( DataSource adapter ) { - long defaultNamespaceId = 1; Map> exportedColumns; try { exportedColumns = adapter.getExportedColumns(); @@ -225,13 +225,14 @@ private void handleSource( DataSource adapter ) { tableName += i; } - LogicalTable table = catalog.getLogicalRel( defaultNamespaceId ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); + LogicalTable table = catalog.getLogicalRel( Catalog.defaultNamespaceId ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); + AllocationEntity allocation = catalog.getAllocRel( Catalog.defaultNamespaceId ).createAllocationTable( adapter.getAdapterId(), table.id ); List primaryKeyColIds = new ArrayList<>(); int colPos = 1; String physicalSchemaName = null; String physicalTableName = null; for ( ExportedColumn exportedColumn : entry.getValue() ) { - LogicalColumn column = catalog.getLogicalRel( defaultNamespaceId ).addColumn( + LogicalColumn column = catalog.getLogicalRel( Catalog.defaultNamespaceId ).addColumn( exportedColumn.name, table.id, colPos++, @@ -243,13 +244,13 @@ private void handleSource( DataSource adapter ) { exportedColumn.cardinality, exportedColumn.nullable, Collation.getDefaultCollation() ); - AllocationEntity allocation = catalog.getSnapshot().alloc().getAllocation( adapter.getAdapterId(), table.id ); - catalog.getAllocRel( defaultNamespaceId ).addColumn( + + catalog.getAllocRel( Catalog.defaultNamespaceId ).addColumn( allocation.id, column.id, PlacementType.STATIC, - exportedColumn.physicalPosition ); // Not a valid partitionGroupID --> placeholder - catalog.getAllocRel( defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), column.id, exportedColumn.physicalPosition ); + exportedColumn.physicalPosition - 1 ); // Not a valid partitionGroupID --> placeholder + //catalog.getAllocRel( Catalog.defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), column.id, exportedColumn.physicalPosition - 1 ); if ( exportedColumn.primary ) { primaryKeyColIds.add( column.id ); } @@ -261,18 +262,19 @@ private void handleSource( DataSource adapter ) { } } - catalog.getLogicalRel( defaultNamespaceId ).addPrimaryKey( table.id, primaryKeyColIds ); - LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( table.id ); + catalog.getLogicalRel( Catalog.defaultNamespaceId ).addPrimaryKey( table.id, primaryKeyColIds ); + //LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( table.id ); + - CatalogDataPlacement placement = catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ).get( 0 ); - catalog.getAllocRel( defaultNamespaceId ) + /*CatalogDataPlacement placement = catalog.getSnapshot().alloc().getDataPlacements( catalogTable.id ).get( 0 ); + catalog.getAllocRel( Catalog.defaultNamespaceId ) .addPartitionPlacement( catalogTable.namespaceId, adapter.getAdapterId(), catalogTable.id, placement.getAdapterId(), PlacementType.AUTOMATIC, - DataPlacementRole.UPTODATE ); + DataPlacementRole.UPTODATE );*/ } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DefaultInserter.java b/dbms/src/main/java/org/polypheny/db/ddl/DefaultInserter.java new file mode 100644 index 0000000000..253ad6f2f4 --- /dev/null +++ b/dbms/src/main/java/org/polypheny/db/ddl/DefaultInserter.java @@ -0,0 +1,264 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.ddl; + +import com.google.common.collect.ImmutableList; +import java.util.Collections; +import java.util.List; +import org.apache.calcite.linq4j.function.Deterministic; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.Collation; +import org.polypheny.db.catalog.logistic.ConstraintType; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.catalog.logistic.ForeignKeyOption; +import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.catalog.logistic.PlacementType; +import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; +import org.polypheny.db.ddl.DdlManager.ConstraintInformation; +import org.polypheny.db.ddl.DdlManager.FieldInformation; +import org.polypheny.db.iface.QueryInterfaceManager; +import org.polypheny.db.iface.QueryInterfaceManager.QueryInterfaceType; +import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.transaction.TransactionManager; +import org.polypheny.db.type.PolyType; + +@Deterministic +public class DefaultInserter { + + private final Catalog catalog = Catalog.getInstance(); + private final TransactionManager manager; + private final DdlManager ddlManager; + private Transaction transaction; + + + public DefaultInserter( DdlManager ddlManager, TransactionManager manager ) { + this.manager = manager; + this.ddlManager = ddlManager; + insertDefaultData(); + if ( Catalog.snapshot().getQueryInterface( "avatica" ) == null ) { + QueryInterfaceType avatica = QueryInterfaceManager.getREGISTER().get( "AvaticaInterface" ); + catalog.addQueryInterface( "avatica", avatica.clazz.getName(), avatica.defaultSettings ); + } + } + + + /** + * Fills the catalog database with default data, skips if data is already inserted + */ + private void insertDefaultData() { + + ////////////// + // init users + long systemId = catalog.addUser( "system", "" ); + + catalog.addUser( "pa", "" ); + + Catalog.defaultUserId = systemId; + + ////////////// + // init schema + + long namespaceId = catalog.addNamespace( "public", NamespaceType.getDefault(), false ); + + ////////////// + // init adapters + if ( catalog.getAdapters().size() != 0 ) { + catalog.commit(); + return; + } + + catalog.updateSnapshot(); + this.transaction = manager.startTransaction( catalog.getSnapshot().getUser( Catalog.defaultUserId ), catalog.getSnapshot().getNamespace( namespaceId ), false, "Defaults" ); + + // Deploy default store + ddlManager.addAdapter( "hsqldb", Catalog.defaultStore.getAdapterName(), AdapterType.STORE, Catalog.defaultStore.getDefaultSettings() ); + //AdapterManager.getInstance().addAdapter( Catalog.defaultStore.getAdapterName(), "hsqldb", AdapterType.STORE, Catalog.defaultStore.getDefaultSettings() ); + + // Deploy default CSV view + //Adapter adapter = AdapterManager.getInstance().addAdapter( Catalog.defaultSource.getAdapterName(), "hr", AdapterType.SOURCE, Catalog.defaultSource.getDefaultSettings() ); + ddlManager.addAdapter( "hr", Catalog.defaultSource.getAdapterName(), AdapterType.SOURCE, Catalog.defaultSource.getDefaultSettings() ); + //adapter.createNewSchema( Catalog.snapshot(), "public", namespaceId ); + // init schema + + //catalog.updateSnapshot(); + + //CatalogAdapter csv = catalog.getSnapshot().getAdapter( "hr" ); + //addDefaultCsvColumns( csv, namespaceId ); + + catalog.commit(); + + } + + + /** + * Initiates default columns for csv files + */ + private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) { + LogicalTable depts = getDepts( csv, namespaceId ); + + LogicalTable emps = getEmps( csv, namespaceId ); + + LogicalTable emp = getEmp( csv, namespaceId ); + + LogicalTable work = getWork( csv, namespaceId ); + + catalog.updateSnapshot(); + + // set foreign keys + catalog.getLogicalRel( namespaceId ).addForeignKey( + emps.id, + ImmutableList.of( catalog.getSnapshot().rel().getColumn( emps.id, "deptno" ).id ), + depts.id, + ImmutableList.of( catalog.getSnapshot().rel().getColumn( depts.id, "deptno" ).id ), + "fk_emps_depts", + ForeignKeyOption.NONE, + ForeignKeyOption.NONE ); + catalog.getLogicalRel( namespaceId ).addForeignKey( + work.id, + ImmutableList.of( catalog.getSnapshot().rel().getColumn( work.id, "employeeno" ).id ), + emp.id, + ImmutableList.of( catalog.getSnapshot().rel().getColumn( emp.id, "employeeno" ).id ), + "fk_work_emp", + ForeignKeyOption.NONE, + ForeignKeyOption.NONE ); + } + + + private LogicalTable getWork( CatalogAdapter csv, long namespaceId ) { + catalog.getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); + LogicalTable work = Catalog.snapshot().rel().getTable( namespaceId, "work" ); + catalog.getLogicalRel( namespaceId ).addPrimaryKey( work.id, Collections.singletonList( catalog.getSnapshot().rel().getColumn( work.id, "employeeno" ).id ) ); + addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); + addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null ); + addDefaultCsvColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 ); + addDefaultCsvColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); + addDefaultCsvColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 ); + addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); + addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); + return work; + } + + + private LogicalTable getEmp( CatalogAdapter csv, long namespaceId ) { + catalog.getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); + LogicalTable emp = Catalog.snapshot().rel().getTable( namespaceId, "emp" ); + catalog.getLogicalRel( namespaceId ).addPrimaryKey( emp.id, Collections.singletonList( catalog.getSnapshot().rel().getColumn( emp.id, "employeeno" ).id ) ); + addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); + addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 ); + addDefaultCsvColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 ); + addDefaultCsvColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); + addDefaultCsvColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null ); + addDefaultCsvColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); + addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); + addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); + return emp; + } + + + private LogicalTable getEmps( CatalogAdapter csv, long namespaceId ) { + catalog.getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); + LogicalTable emps = Catalog.snapshot().rel().getTable( namespaceId, "emps" ); + catalog.getLogicalRel( namespaceId ).addPrimaryKey( emps.id, Collections.singletonList( catalog.getSnapshot().rel().getColumn( emps.id, "empid" ).id ) ); + addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); + addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); + addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); + addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); + return emps; + } + + + private LogicalTable getDepts( CatalogAdapter csv, long namespaceId ) { + List fields = List.of( + new FieldInformation( + "deptno", + new ColumnTypeInformation( PolyType.INTEGER, null, null, null, null, null, false ), + null, + null, + 1 ), + new FieldInformation( + "name", + new ColumnTypeInformation( PolyType.VARCHAR, null, 20, null, null, null, false ), + Collation.CASE_INSENSITIVE, + null, + 2 ) + ); + List constraints = List.of( + new ConstraintInformation( "primary", ConstraintType.PRIMARY, List.of( "deptno" ) ) + ); + + //DdlManager.getInstance().createTable( namespaceId, "depts", fields, constraints, true, List.of( AdapterManager.getInstance().getSource( csv.id ) ), PlacementType.AUTOMATIC, transaction.createStatement() ); + catalog.getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); + + LogicalTable depts = Catalog.snapshot().rel().getTable( namespaceId, "depts" ); + catalog.getLogicalRel( namespaceId ).addPrimaryKey( depts.id, Collections.singletonList( catalog.getSnapshot().rel().getColumn( depts.id, "deptno" ).id ) ); + addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); + addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); + return depts; + } + + + private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { + if ( catalog.getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { + return; + } + LogicalColumn column = catalog.getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); + String filename = table.name + ".csv"; + if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { + filename += ".gz"; + } + + catalog.updateSnapshot(); + AllocationEntity alloc; + if ( !catalog.getSnapshot().alloc().adapterHasPlacement( csv.id, table.id ) ) { + alloc = catalog.getAllocRel( table.namespaceId ).createAllocationTable( csv.id, table.id ); + } else { + alloc = catalog.getSnapshot().alloc().getAllocation( csv.id, table.id ); + } + + catalog.getAllocRel( table.namespaceId ).addColumn( alloc.id, column.id, PlacementType.AUTOMATIC, position ); + //getAllocRel( table.namespaceId ).addColumn( alloc.id, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); + //getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( allocId, colId, position ); + + catalog.updateSnapshot(); + + // long partitionId = table.partitionProperty.partitionIds.get( 0 ); + // getAllocRel( table.namespaceId ).addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); + + } + + + private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { + /*if ( !getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { + LogicalColumn column = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); + AllocationEntity entity = getSnapshot().alloc().getAllocation( adapter.id, table.id ); + getAllocRel( table.namespaceId ).addColumn( entity.id, column.id, PlacementType.AUTOMATIC, position ); + //getAllocRel( table.namespaceId ).addColumn( entity.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); + getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, column.id, position ); + }*/ + } + +} diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 52fc4a3935..bd2cf3c0dd 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -79,7 +79,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.config.RuntimeConfig; diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 0d5343730c..56bd4c05fd 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -52,10 +52,10 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogConstraint; -import org.polypheny.db.catalog.entity.LogicalForeignKey; -import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; +import org.polypheny.db.catalog.entity.logical.LogicalKey.EnforcementTime; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.EntityType; diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index a5767ed25a..706f189069 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -50,9 +50,9 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 0d4b808355..1c539c3993 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -55,13 +55,12 @@ import org.polypheny.db.catalog.entity.AllocationColumn; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.CatalogNamespace; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalGraph; @@ -443,7 +442,7 @@ public AlgNode handleGraphScan( LogicalLpgScan alg, Statement statement, @Nullab } - private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { + private AlgNode handleGraphOnRelational( LogicalLpgScan alg, LogicalNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); List tables = Catalog.snapshot().rel().getTables( namespace.id, null ); List> scans = tables.stream() @@ -457,7 +456,7 @@ private AlgNode handleGraphOnRelational( LogicalLpgScan alg, CatalogNamespace na } - private AlgNode handleGraphOnDocument( LogicalLpgScan alg, CatalogNamespace namespace, Statement statement, Long placementId ) { + private AlgNode handleGraphOnDocument( LogicalLpgScan alg, LogicalNamespace namespace, Statement statement, Long placementId ) { AlgOptCluster cluster = alg.getCluster(); List collections = Catalog.snapshot().doc().getCollections( namespace.id, null ); List> scans = collections.stream() diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java index 87566436c0..058aa651e8 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionImpl.java @@ -40,8 +40,8 @@ import org.polypheny.db.algebra.logical.common.LogicalConstraintEnforcer.EnforcementInformation; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalKey.EnforcementTime; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; diff --git a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java index 9e69a25001..8ff75c976a 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/TransactionManagerImpl.java @@ -23,7 +23,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; diff --git a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java index 8f7b7a628b..ff49f06654 100644 --- a/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/view/MaterializedViewManagerImpl.java @@ -42,14 +42,14 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.AllocationColumn; -import org.polypheny.db.catalog.entity.LogicalMaterializedView; -import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTable; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalMaterializedView; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.snapshot.Snapshot; diff --git a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java index 9e3e40ab8f..11c31f0bb6 100644 --- a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java @@ -28,9 +28,8 @@ import org.polypheny.db.AdapterTestSuite; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; -import org.polypheny.db.catalog.logistic.Pattern; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.excluded.CassandraExcluded; import org.polypheny.db.webui.models.Result; diff --git a/dbms/src/test/java/org/polypheny/db/jdbc/JdbcMetaTest.java b/dbms/src/test/java/org/polypheny/db/jdbc/JdbcMetaTest.java index 9f0702e021..09ce08cec0 100644 --- a/dbms/src/test/java/org/polypheny/db/jdbc/JdbcMetaTest.java +++ b/dbms/src/test/java/org/polypheny/db/jdbc/JdbcMetaTest.java @@ -196,7 +196,7 @@ public void testMetaGetSchemas() throws SQLException { Assert.assertEquals( "Wrong column name", "SCHEMA_TYPE", rsmd.getColumnName( 4 ) ); // Check data - final Object[] schemaPublic = new Object[]{ "public", "APP", "system", "RELATIONAL" }; + final Object[] schemaPublic = new Object[]{ "public", "APP", "pa", "RELATIONAL" }; final Object[] schemaDoc = new Object[]{ "doc", "APP", "pa", "DOCUMENT" }; final Object[] schemaTest = new Object[]{ "test", "APP", "pa", "RELATIONAL" }; @@ -473,9 +473,9 @@ public void testGetIndexInfo() throws SQLException { Assert.assertEquals( "Wrong column name", "INDEX_TYPE", rsmd.getColumnName( 15 ) ); // Check data - final Object[] index1 = new Object[]{ "APP", "public", "foo", false, null, "i_foo", 0, 1, "id", null, -1, null, null, 1, 1 }; - final Object[] index2a = new Object[]{ "APP", "test", "foo2", true, null, "i_foo2", 0, 1, "name", null, -1, null, null, 1, 1 }; - final Object[] index2b = new Object[]{ "APP", "test", "foo2", true, null, "i_foo2", 0, 2, "foobar", null, -1, null, null, 1, 1 }; + final Object[] index1 = new Object[]{ "APP", "public", "foo", false, null, "i_foo", 0, 1, "id", null, -1, null, null, 0, 1 }; + final Object[] index2a = new Object[]{ "APP", "test", "foo2", true, null, "i_foo2", 0, 1, "name", null, -1, null, null, 0, 1 }; + final Object[] index2b = new Object[]{ "APP", "test", "foo2", true, null, "i_foo2", 0, 2, "foobar", null, -1, null, null, 0, 1 }; TestHelper.checkResultSet( connection.getMetaData().getIndexInfo( "APP", "public", "foo", false, false ), diff --git a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java index 5570ee246a..8a19c9410c 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java @@ -32,8 +32,8 @@ import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.TestHelper.MongoConnection; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.excluded.CassandraExcluded; diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 62a4831074..541fd2010d 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -27,8 +27,8 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java index 5dc75d773a..c43143f757 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/DbmsMeta.java @@ -69,23 +69,22 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeSystem; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogDatabase.PrimitiveCatalogDatabase; import org.polypheny.db.catalog.entity.CatalogObject; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalForeignKey; -import org.polypheny.db.catalog.entity.LogicalForeignKey.CatalogForeignKeyColumn; -import org.polypheny.db.catalog.entity.LogicalForeignKey.CatalogForeignKeyColumn.PrimitiveCatalogForeignKeyColumn; -import org.polypheny.db.catalog.entity.LogicalIndex; -import org.polypheny.db.catalog.entity.LogicalIndex.CatalogIndexColumn; -import org.polypheny.db.catalog.entity.LogicalIndex.CatalogIndexColumn.PrimitiveCatalogIndexColumn; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.LogicalNamespace.PrimitiveCatalogSchema; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey.CatalogPrimaryKeyColumn; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey.CatalogPrimaryKeyColumn.PrimitiveCatalogPrimaryKeyColumn; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalColumn.PrimitiveCatalogColumn; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey.CatalogForeignKeyColumn; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey.CatalogForeignKeyColumn.PrimitiveCatalogForeignKeyColumn; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; +import org.polypheny.db.catalog.entity.logical.LogicalIndex.CatalogIndexColumn; +import org.polypheny.db.catalog.entity.logical.LogicalIndex.CatalogIndexColumn.PrimitiveCatalogIndexColumn; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace.PrimitiveCatalogSchema; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey.CatalogPrimaryKeyColumn; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey.CatalogPrimaryKeyColumn.PrimitiveCatalogPrimaryKeyColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.logical.LogicalTable.PrimitiveCatalogTable; import org.polypheny.db.catalog.logistic.EntityType; @@ -386,12 +385,13 @@ public MetaResultSet getCatalogs( final ConnectionHandle ch ) { if ( log.isTraceEnabled() ) { log.trace( "getCatalogs( ConnectionHandle {} )", ch ); } - final List databases = List.of(); + //final List databases = Linq4j.asEnumerable( new String[]{ "APP", "system", "public" } ); + List databases = Collections.singletonList( new Serializable[]{ Catalog.DATABASE_NAME, "system", Catalog.defaultNamespaceName } ); StatementHandle statementHandle = createStatement( ch ); return createMetaResultSet( ch, statementHandle, - toEnumerable( databases ), + Linq4j.asEnumerable( databases ), PrimitiveCatalogDatabase.class, // According to JDBC standard: "TABLE_CAT", @@ -980,7 +980,7 @@ public StatementHandle prepare( final ConnectionHandle ch, final String sql, fin List avaticaParameters = deriveAvaticaParameters( parameterRowType ); - PolyphenyDbSignature signature = new PolyphenyDbSignature<>( + PolyphenyDbSignature signature = new PolyphenyDbSignature<>( sql, avaticaParameters, ImmutableMap.of(), @@ -1413,7 +1413,6 @@ public void openConnection( final ConnectionHandle ch, final Map // Create transaction Transaction transaction = transactionManager.startTransaction( user, null, false, "AVATICA Interface" ); - // Authorizer.hasAccess( user, database ); // Check schema access diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java index 8851c9e3b0..9424d89336 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/PolyphenyDbConnectionHandle.java @@ -26,7 +26,7 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.transaction.PUID.ConnectionId; import org.polypheny.db.transaction.PUID.UserId; import org.polypheny.db.transaction.Transaction; diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java index 0571628034..8afbfbf7ac 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/ColumnIndex.java @@ -18,8 +18,8 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; /** diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java index 8f3b211952..6c253b5b07 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/TableIndex.java @@ -18,7 +18,6 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.cql.exception.UnknownIndexException; diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java index e72b085073..8ee83354a1 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java @@ -35,15 +35,6 @@ import au.com.bytecode.opencsv.CSVReader; -import org.apache.calcite.avatica.util.DateTimeUtils; -import org.apache.calcite.linq4j.Enumerator; -import org.apache.commons.lang3.time.FastDateFormat; -import org.polypheny.db.adapter.java.JavaTypeFactory; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.type.PolyType; -import org.polypheny.db.util.Pair; -import org.polypheny.db.util.Source; - import java.io.IOException; import java.io.Reader; import java.text.ParseException; @@ -52,6 +43,14 @@ import java.util.List; import java.util.TimeZone; import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.calcite.avatica.util.DateTimeUtils; +import org.apache.calcite.linq4j.Enumerator; +import org.apache.commons.lang3.time.FastDateFormat; +import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Source; /** @@ -393,7 +392,7 @@ public Object[] convertNormalRow( String[] strings ) { final Object[] objects = new Object[fields.length]; for ( int i = 0; i < fields.length; i++ ) { int field = fields[i]; - objects[i] = convert( fieldTypes[i], strings[field - 1] ); + objects[i] = convert( fieldTypes[i], strings[field] ); } return objects; } @@ -404,7 +403,7 @@ public Object[] convertStreamRow( String[] strings ) { objects[0] = System.currentTimeMillis(); for ( int i = 0; i < fields.length; i++ ) { int field = fields[i]; - objects[i + 1] = convert( fieldTypes[i], strings[field - 1] ); + objects[i + 1] = convert( fieldTypes[i], strings[field] ); } return objects; } @@ -415,7 +414,7 @@ public Object[] convertStreamRow( String[] strings ) { /** * Single column row converter. */ - private static class SingleColumnRowConverter extends RowConverter { + private static class SingleColumnRowConverter extends RowConverter { private final CsvFieldType fieldType; private final int fieldIndex; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java index e4225ac268..a23c118509 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherAlterDatabaseAlias.java @@ -18,7 +18,7 @@ import java.util.List; import lombok.Getter; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java index d4b04de4d7..48db5bd2c9 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherCreateDatabaseAlias.java @@ -18,7 +18,7 @@ import java.util.List; import lombok.Getter; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java index 630953eb5c..57853d319e 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropAlias.java @@ -18,7 +18,7 @@ import java.util.List; import lombok.Getter; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java index a179a88c1f..5ebdd26761 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/admin/CypherDropDatabase.java @@ -18,7 +18,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java index 58b63b0fb6..4447402635 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java @@ -21,7 +21,7 @@ import java.util.stream.Stream; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.cypher.CypherParameter; import org.polypheny.db.cypher.CypherSimpleEither; diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java index c128a55b45..3dd2cf06c7 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbStore.java @@ -36,8 +36,8 @@ import org.polypheny.db.adapter.jdbc.stores.AbstractJdbcStore; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.LogicalIndex; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java index 6d0b05d500..1eae267a90 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlDrop.java @@ -17,8 +17,8 @@ package org.polypheny.db.languages.mql; import java.util.List; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index af98bc0639..277bf4f24d 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -63,7 +63,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; diff --git a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java index b6dbc6027d..be4d893a6e 100644 --- a/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java +++ b/plugins/mql-language/src/test/java/org/polypheny/db/mql/mql2alg/MqlMockCatalog.java @@ -18,7 +18,7 @@ import org.polypheny.db.catalog.MockCatalog; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.NamespaceType; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java index 3abcdd0ee1..2b27eb6045 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/PolyCatalog.java @@ -16,13 +16,11 @@ package org.polypheny.db.catalog; -import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import io.activej.serializer.annotations.Serialize; import java.beans.PropertyChangeSupport; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -48,25 +46,17 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogQueryInterface; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.logical.DocumentCatalog; import org.polypheny.db.catalog.logical.GraphCatalog; import org.polypheny.db.catalog.logical.RelationalCatalog; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.physical.PolyPhysicalCatalog; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.catalog.snapshot.impl.SnapshotBuilder; -import org.polypheny.db.iface.QueryInterfaceManager; -import org.polypheny.db.iface.QueryInterfaceManager.QueryInterfaceType; import org.polypheny.db.transaction.Transaction; -import org.polypheny.db.type.PolyType; /** @@ -119,14 +109,7 @@ public PolyCatalog() { } - @Override - public void init() { - insertDefaultData(); - if ( snapshot.getQueryInterface( "avatica" ) == null ) { - QueryInterfaceType avatica = QueryInterfaceManager.getREGISTER().get( "AvaticaInterface" ); - addQueryInterface( "avatica", avatica.clazz.getName(), avatica.defaultSettings ); - } - } + public PolyCatalog( @@ -147,155 +130,9 @@ public PolyCatalog( } - /** - * Fills the catalog database with default data, skips if data is already inserted - */ - private void insertDefaultData() { - - ////////////// - // init users - long systemId = addUser( "system", "" ); - - addUser( "pa", "" ); - - Catalog.defaultUserId = systemId; - - ////////////// - // init schema - - long namespaceId = addNamespace( "public", NamespaceType.getDefault(), false ); - - ////////////// - // init adapters - if ( adapters.size() == 0 ) { - // Deploy default store - AdapterManager.getInstance().addAdapter( defaultStore.getAdapterName(), "hsqldb", AdapterType.STORE, defaultStore.getDefaultSettings() ); - - // Deploy default CSV view - Adapter adapter = AdapterManager.getInstance().addAdapter( defaultSource.getAdapterName(), "hr", AdapterType.SOURCE, defaultSource.getDefaultSettings() ); - - adapter.createNewSchema( getSnapshot(), "public", namespaceId ); - // init schema - - // getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); - // getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); - // getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); - // getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); - - // updateSnapshot(); - - // CatalogAdapter csv = getSnapshot().getAdapter( "hr" ); - // addDefaultCsvColumns( csv, namespaceId ); - - } - - commit(); - - } - - - /** - * Initiates default columns for csv files - */ - private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) { - LogicalTable depts = getSnapshot().rel().getTable( namespaceId, "depts" ); - addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - - LogicalTable emps = getSnapshot().rel().getTable( namespaceId, "emps" ); - addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); - addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); - addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); - - LogicalTable emp = getSnapshot().rel().getTable( namespaceId, "emp" ); - addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); - addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 ); - addDefaultCsvColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 ); - addDefaultCsvColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); - addDefaultCsvColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null ); - addDefaultCsvColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); - addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); - addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); - - LogicalTable work = getSnapshot().rel().getTable( namespaceId, "work" ); - addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null ); - addDefaultCsvColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 ); - addDefaultCsvColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); - addDefaultCsvColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 ); - addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); - addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); - - updateSnapshot(); - - // set all needed primary keys - getLogicalRel( namespaceId ).addPrimaryKey( depts.id, Collections.singletonList( getSnapshot().rel().getColumn( depts.id, "deptno" ).id ) ); - getLogicalRel( namespaceId ).addPrimaryKey( emps.id, Collections.singletonList( getSnapshot().rel().getColumn( emps.id, "empid" ).id ) ); - getLogicalRel( namespaceId ).addPrimaryKey( emp.id, Collections.singletonList( getSnapshot().rel().getColumn( emp.id, "employeeno" ).id ) ); - getLogicalRel( namespaceId ).addPrimaryKey( work.id, Collections.singletonList( getSnapshot().rel().getColumn( work.id, "employeeno" ).id ) ); - - // set foreign keys - getLogicalRel( namespaceId ).addForeignKey( - emps.id, - ImmutableList.of( getSnapshot().rel().getColumn( emps.id, "deptno" ).id ), - depts.id, - ImmutableList.of( getSnapshot().rel().getColumn( depts.id, "deptno" ).id ), - "fk_emps_depts", - ForeignKeyOption.NONE, - ForeignKeyOption.NONE ); - getLogicalRel( namespaceId ).addForeignKey( - work.id, - ImmutableList.of( getSnapshot().rel().getColumn( work.id, "employeeno" ).id ), - emp.id, - ImmutableList.of( getSnapshot().rel().getColumn( emp.id, "employeeno" ).id ), - "fk_work_emp", - ForeignKeyOption.NONE, - ForeignKeyOption.NONE ); - } - - - private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { - LogicalColumn column = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - String filename = table.name + ".csv"; - if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { - filename += ".gz"; - } - - updateSnapshot(); - AllocationEntity alloc; - if ( !getSnapshot().alloc().adapterHasPlacement( csv.id, table.id ) ) { - alloc = getAllocRel( table.namespaceId ).createAllocationTable( csv.id, table.id ); - } else { - alloc = getSnapshot().alloc().getAllocation( csv.id, table.id ); - } - - getAllocRel( table.namespaceId ).addColumn( alloc.id, column.id, PlacementType.AUTOMATIC, position ); - //getAllocRel( table.namespaceId ).addColumn( alloc.id, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); - //getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( allocId, colId, position ); - - updateSnapshot(); - - // long partitionId = table.partitionProperty.partitionIds.get( 0 ); - // getAllocRel( table.namespaceId ).addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); - } - } - - - private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( !getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { - LogicalColumn column = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - AllocationEntity entity = getSnapshot().alloc().getAllocation( adapter.id, table.id ); - getAllocRel( table.namespaceId ).addColumn( entity.id, column.id, PlacementType.AUTOMATIC, position ); - //getAllocRel( table.namespaceId ).addColumn( entity.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); - getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, column.id, position ); - } + @Override + public void init() { + //new DefaultInserter(); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java index 80c0e8b00c..3c7f9027d5 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocDocCatalog.java @@ -25,8 +25,8 @@ import lombok.Getter; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationDocumentCatalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationCollection; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.PlacementType; public class PolyAllocDocCatalog implements Serializable, AllocationDocumentCatalog { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java index 23194ef712..e04708313c 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java @@ -19,11 +19,10 @@ import io.activej.serializer.BinarySerializer; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; -import org.polypheny.db.catalog.PolyCatalog; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; public class PolyAllocGraphCatalog implements Serializable, AllocationGraphCatalog { diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java index 275c155737..51088e4713 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocRelCatalog.java @@ -28,8 +28,8 @@ import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationRelationalCatalog; import org.polypheny.db.catalog.entity.AllocationColumn; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.DataPlacementRole; import org.polypheny.db.catalog.logistic.PartitionType; import org.polypheny.db.catalog.logistic.PlacementType; diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java index 5270b4ff6a..8c0995f7ab 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/DocumentCatalog.java @@ -31,8 +31,8 @@ import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.logistic.EntityType; @Value diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java index ab1e8ae987..6dd6655885 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java @@ -30,8 +30,8 @@ import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.LogicalCatalog; import org.polypheny.db.catalog.catalogs.LogicalGraphCatalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; @Value @SuperBuilder(toBuilder = true) diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java index f8f977bdde..c1631ffc8e 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/RelationalCatalog.java @@ -43,17 +43,17 @@ import org.polypheny.db.catalog.catalogs.LogicalRelationalCatalog; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDefaultValue; -import org.polypheny.db.catalog.entity.LogicalForeignKey; -import org.polypheny.db.catalog.entity.LogicalIndex; -import org.polypheny.db.catalog.entity.LogicalKey; -import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; -import org.polypheny.db.catalog.entity.LogicalMaterializedView; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; -import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; +import org.polypheny.db.catalog.entity.logical.LogicalKey; +import org.polypheny.db.catalog.entity.logical.LogicalKey.EnforcementTime; +import org.polypheny.db.catalog.entity.logical.LogicalMaterializedView; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.ConstraintType; diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java index 33beeba99c..1ee5672b55 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RequestParser.java @@ -44,8 +44,8 @@ import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.iface.AuthenticationException; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java index 75751d9ebf..a1c78a254c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlUtil.java @@ -42,7 +42,6 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypePrecedenceList; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java index 6dee63fd40..ee46077780 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/SqlValidatorUtil.java @@ -38,6 +38,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -617,16 +618,13 @@ public static boolean isTableRelational( SqlValidatorImpl validator ) { return false; } SqlIdentifier id = ((SqlIdentifier) validator.getTableScope().getNode()); - return validator.snapshot.getNamespace( id.names.get( 0 ) ).namespaceType == NamespaceType.RELATIONAL; - /*LogicalGraph graph = validator.snapshot.getGraphSnapshot( namespace.id ).getTable( names.get( 1 ) ); - if ( graph != null ) { - return false; - } - LogicalCollection collection = validator.getSnapshot().getLogicalCollection( id.names ); - if ( collection != null ) { - return false; + + String namespace = id.names.get( 0 ); + if ( id.names.size() == 1 ) { + namespace = Catalog.defaultNamespaceName; } - return true;*/ + + return validator.snapshot.getNamespace( namespace ).namespaceType == NamespaceType.RELATIONAL; } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java index f3688eac90..479195ad2a 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/validate/WithScope.java @@ -20,7 +20,7 @@ import java.util.List; import org.polypheny.db.algebra.type.StructKind; import org.polypheny.db.catalog.entity.CatalogEntity; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.sql.language.SqlNode; import org.polypheny.db.sql.language.SqlWithItem; import org.polypheny.db.util.NameMatcher; diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java index 81052b7a7f..81bce67ab8 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/web/SchemaToJsonMapper.java @@ -25,8 +25,8 @@ import lombok.Getter; import lombok.NonNull; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalKey; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.dialect.PolyphenyDbSqlDialect; diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java index 281f6fb25e..91fb59f8d8 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/NamespaceToJsonMapperTest.java @@ -28,10 +28,10 @@ import org.polypheny.db.catalog.entity.CatalogDatabase; import org.polypheny.db.catalog.entity.CatalogDefaultValue; import org.polypheny.db.catalog.entity.CatalogUser; -import org.polypheny.db.catalog.entity.LogicalKey; -import org.polypheny.db.catalog.entity.LogicalKey.EnforcementTime; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalKey; +import org.polypheny.db.catalog.entity.logical.LogicalKey.EnforcementTime; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index e49153cd5c..158964595c 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -104,17 +104,17 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogConstraint; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.LogicalForeignKey; -import org.polypheny.db.catalog.entity.LogicalIndex; -import org.polypheny.db.catalog.entity.LogicalMaterializedView; -import org.polypheny.db.catalog.entity.LogicalNamespace; -import org.polypheny.db.catalog.entity.LogicalPrimaryKey; -import org.polypheny.db.catalog.entity.LogicalView; import org.polypheny.db.catalog.entity.MaterializedCriteria; import org.polypheny.db.catalog.entity.MaterializedCriteria.CriteriaType; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalColumn; +import org.polypheny.db.catalog.entity.logical.LogicalForeignKey; +import org.polypheny.db.catalog.entity.logical.LogicalIndex; +import org.polypheny.db.catalog.entity.logical.LogicalMaterializedView; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalPrimaryKey; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.catalog.logistic.ConstraintType; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.ForeignKeyOption; diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index 221e079a83..688b797752 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -34,7 +34,7 @@ import org.eclipse.jetty.websocket.api.Session; import org.eclipse.jetty.websocket.api.annotations.OnWebSocketMessage; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.schema.graph.PolyGraph; import org.polypheny.db.webui.crud.LanguageCrud; diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index b31cbd7f5e..d5e1efe625 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -36,10 +36,10 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogDataPlacement; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java index aeeb29c7e5..98ea1a0188 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/BatchUpdateRequest.java @@ -30,7 +30,6 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.transaction.Statement; From 97a77360775583e1bc4f96d1592b7196487cd8cf Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 18 Apr 2023 01:53:43 +0200 Subject: [PATCH 073/436] fixed queries for csv --- .../entity/allocation/AllocationTable.java | 16 +++++++++++----- .../org/polypheny/db/ddl/DdlManagerImpl.java | 16 ++++++++-------- .../routers/FullPlacementQueryRouter.java | 4 ++-- .../polypheny/db/adapter/csv/CsvEnumerator.java | 4 ++-- .../org/polypheny/db/adapter/csv/CsvSchema.java | 4 ++-- .../org/polypheny/db/adapter/csv/CsvTable.java | 2 +- .../polypheny/db/adapter/jdbc/JdbcEntity.java | 2 +- 7 files changed, 27 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java index 525f10889f..2910d22b75 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationTable.java @@ -19,6 +19,7 @@ import io.activej.serializer.annotations.Deserialize; import java.io.Serializable; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -59,12 +60,12 @@ public Expression asExpression() { public Map getColumnNames() { - return getColumns().values().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getLogicalColumnName ) ); + return getColumns().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getLogicalColumnName ) ); } - public Map getColumns() { - return Catalog.snapshot().alloc().getColumns( id ).stream().collect( Collectors.toMap( c -> c.columnId, c -> c ) ); + public List getColumns() { + return Catalog.snapshot().alloc().getColumns( id ).stream().sorted( Comparator.comparingLong( a -> a.position ) ).collect( Collectors.toList() ); } @@ -74,7 +75,7 @@ public String getNamespaceName() { public Map getColumnTypes() { - return getColumns().values().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getAlgDataType ) ); + return getColumns().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getAlgDataType ) ); } @@ -84,10 +85,15 @@ public Map getColumnNamesId() { public List getColumnOrder() { - List columns = new ArrayList<>( getColumns().values() ); + List columns = new ArrayList<>( getColumns() ); columns.sort( ( a, b ) -> Math.toIntExact( a.position - b.position ) ); return columns.stream().map( c -> c.columnId ).collect( Collectors.toList() ); } + + public List getColumnIds() { + return getColumns().stream().map( c -> c.columnId ).collect( Collectors.toList() ); + } + } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index e68bdbedbd..569a783e59 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -249,9 +249,9 @@ private void handleSource( DataSource adapter ) { allocation.id, column.id, PlacementType.STATIC, - exportedColumn.physicalPosition - 1 ); // Not a valid partitionGroupID --> placeholder + exportedColumn.physicalPosition ); // Not a valid partitionGroupID --> placeholder //catalog.getAllocRel( Catalog.defaultNamespaceId ).updateColumnPlacementPhysicalPosition( adapter.getAdapterId(), column.id, exportedColumn.physicalPosition - 1 ); - if ( exportedColumn.primary ) { + /*if ( exportedColumn.primary ) { primaryKeyColIds.add( column.id ); } if ( physicalSchemaName == null ) { @@ -259,10 +259,10 @@ private void handleSource( DataSource adapter ) { } if ( physicalTableName == null ) { physicalTableName = exportedColumn.physicalTableName; - } + }*/ } - catalog.getLogicalRel( Catalog.defaultNamespaceId ).addPrimaryKey( table.id, primaryKeyColIds ); + // catalog.getLogicalRel( Catalog.defaultNamespaceId ).addPrimaryKey( table.id, primaryKeyColIds ); //LogicalTable catalogTable = catalog.getSnapshot().rel().getTable( table.id ); @@ -645,7 +645,7 @@ private void addDataStoreIndex( LogicalTable catalogTable, String indexMethodNam // Check if all required columns are present on this store AllocationTable alloc = catalog.getSnapshot().alloc().getAllocation( location.getAdapterId(), catalogTable.id ).unwrap( AllocationTable.class ); - if ( !alloc.getColumns().keySet().containsAll( columnIds ) ) { + if ( !new HashSet<>( alloc.getColumns().stream().map( c -> c.columnId ).collect( Collectors.toList() ) ).containsAll( columnIds ) ) { throw new GenericRuntimeException( "Not all required columns for this index are placed on this store." ); } @@ -898,9 +898,9 @@ public void addPrimaryKey( LogicalTable catalogTable, List columnNames, // long pkColumnId = oldPk.columnIds.get( 0 ); // It is sufficient to check for one because all get replicated on all stores List allocations = catalog.getSnapshot().alloc().getFromLogical( catalogTable.id ); for ( AllocationEntity allocation : allocations ) { - Map allocColumns = allocation.unwrap( AllocationTable.class ).getColumns(); + List allocColumns = allocation.unwrap( AllocationTable.class ).getColumnIds(); for ( long columnId : columnIds ) { - if ( !allocColumns.containsKey( columnId ) ) { + if ( !allocColumns.contains( columnId ) ) { catalog.getAllocRel( catalogTable.namespaceId ).addColumn( allocation.id, columnId, // Will be set later @@ -3039,7 +3039,7 @@ public void dropTable( LogicalTable catalogTable, Statement statement ) { for ( PhysicalEntity physical : snapshot.physical().fromAlloc( allocation.id ) ) { catalog.getPhysical( catalogTable.namespaceId ).deleteEntity( physical.id ); } - for ( long columnId : allocation.unwrap( AllocationTable.class ).getColumns().keySet() ) { + for ( long columnId : allocation.unwrap( AllocationTable.class ).getColumnIds() ) { catalog.getAllocRel( allocation.namespaceId ).deleteColumn( allocation.id, columnId ); } catalog.getAllocRel( allocation.namespaceId ).deleteAllocation( allocation.id ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java index 2277ebfedf..14e0dac653 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/FullPlacementQueryRouter.java @@ -151,7 +151,7 @@ protected Set> selectPlacement( LogicalTable catalogTable // Filter for placements by adapters List allocs = Catalog.snapshot().alloc().getFromLogical( catalogTable.id ).stream() .map( a -> a.unwrap( AllocationTable.class ) ) - .filter( a -> new HashSet<>( a.getColumns().values().stream().map( AllocationColumn::getColumnId ).collect( Collectors.toList() ) ).containsAll( usedColumns ) ) + .filter( a -> new HashSet<>( a.getColumnIds() ).containsAll( usedColumns ) ) .collect( Collectors.toList() ); /*List adapters = Catalog.snapshot().alloc().getColumnPlacementsByAdapter( catalogTable.id ).entrySet() @@ -163,7 +163,7 @@ protected Set> selectPlacement( LogicalTable catalogTable final Set> result = new HashSet<>(); for ( AllocationEntity alloc : allocs ) { List placements = usedColumns.stream() - .map( colId -> alloc.unwrap( AllocationTable.class ).getColumns().get( colId ) ) + .map( colId -> alloc.unwrap( AllocationTable.class ).getColumns().stream().filter( c -> c.columnId == colId ).findFirst().get() ) .collect( Collectors.toList() ); if ( !placements.isEmpty() ) { diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java index 8ee83354a1..ec9eba3be0 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java @@ -392,7 +392,7 @@ public Object[] convertNormalRow( String[] strings ) { final Object[] objects = new Object[fields.length]; for ( int i = 0; i < fields.length; i++ ) { int field = fields[i]; - objects[i] = convert( fieldTypes[i], strings[field] ); + objects[i] = convert( fieldTypes[i], strings[field - 1] ); } return objects; } @@ -403,7 +403,7 @@ public Object[] convertStreamRow( String[] strings ) { objects[0] = System.currentTimeMillis(); for ( int i = 0; i < fields.length; i++ ) { int field = fields[i]; - objects[i + 1] = convert( fieldTypes[i], strings[field] ); + objects[i + 1] = convert( fieldTypes[i], strings[field - 1] ); } return objects; } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 4404b578be..8e4f62d20e 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -92,10 +92,10 @@ public PhysicalTable createCsvTable( long id, LogicalTable catalogTable, Allocat List columns = csvSource.getExportedColumns().get( catalogTable.name ); - for ( AllocationColumn placement : allocationTable.getColumns().values() ) { + for ( AllocationColumn placement : allocationTable.getColumns() ) { LogicalColumn logicalColumn = Catalog.getInstance().getSnapshot().rel().getColumn( placement.columnId ); AlgDataType sqlType = sqlType( typeFactory, logicalColumn.type, logicalColumn.length, logicalColumn.scale, null ); - fieldInfo.add( logicalColumn.name, columns.get( (int) placement.position ).physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); + fieldInfo.add( logicalColumn.name, columns.get( (int) placement.position - 1 ).physicalColumnName, sqlType ).nullable( logicalColumn.nullable ); fieldTypes.add( CsvFieldType.getCsvFieldType( logicalColumn.type ) ); fieldIds.add( (int) placement.position ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java index 8eade2af0a..817697413d 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvTable.java @@ -63,7 +63,7 @@ public abstract class CsvTable extends PhysicalTable { allocationTable.getNamespaceName(), allocationTable.getColumnNames(), allocationTable.getColumnNames(), - allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getAlgDataType ) ), + allocationTable.getColumns().stream().collect( Collectors.toMap( c -> c.columnId, AllocationColumn::getAlgDataType ) ), allocationTable.getColumnOrder() ); this.source = source; this.fieldTypes = fieldTypes; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java index 387ddecdfc..fabb0eb99a 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcEntity.java @@ -126,7 +126,7 @@ public JdbcEntity( private static Map getPhysicalColumnNames( Adapter adapter, AllocationTable allocationTable ) { AbstractJdbcStore store = (AbstractJdbcStore) adapter; - return allocationTable.getColumns().values().stream().collect( Collectors.toMap( c -> c.columnId, c -> store.getPhysicalColumnName( c.columnId ) ) ); + return allocationTable.getColumns().stream().collect( Collectors.toMap( c -> c.columnId, c -> store.getPhysicalColumnName( c.columnId ) ) ); } From 2d864ba17c2e00093e3fb0e5beb1e54d4700bbca Mon Sep 17 00:00:00 2001 From: datomo Date: Tue, 18 Apr 2023 14:40:56 +0200 Subject: [PATCH 074/436] updating dependencies, removing fmpp for now --- core/build.gradle | 56 +- .../catalogs/AllocationGraphCatalog.java | 4 +- .../entity/allocation/AllocationGraph.java | 16 +- .../catalog/entity/logical/LogicalGraph.java | 14 +- .../db/catalog/snapshot/AllocSnapshot.java | 2 +- .../catalog/snapshot/LogicalRelSnapshot.java | 5 +- .../snapshot/impl/LogicalRelSnapshotImpl.java | 34 +- .../polypheny/db/plugins/PluginContext.java | 35 + .../org/polypheny/db/plugins/PolyPlugin.java | 32 + .../db/plugins/PolyPluginManager.java | 25 +- .../java/org/polypheny/db/PolyphenyDb.java | 17 +- .../org/polypheny/db/ddl/DefaultInserter.java | 232 +- gradle.properties | 7 +- .../statistics/StatisticQueryProcessor.java | 35 - .../db/avatica/AvaticaInterfacePlugin.java | 21 +- plugins/cql-language/build.gradle | 3 + .../polypheny/db/cql/CqlLanguagePlugin.java | 10 +- .../polypheny/db/adapter/csv/CsvPlugin.java | 10 +- plugins/cypher-language/build.gradle | 2 +- .../src/main/codegen/CypherParser.jj | 16 +- .../db/cypher/CypherLanguagePlugin.java | 10 +- .../db/cypher/parser/CypherCharStream.java | 308 --- .../ExploreByExamplePlugin.java | 10 +- .../db/hsqldb/stores/HsqldbPlugin.java | 10 +- .../db/http/HttpInterfacePlugin.java | 10 +- .../db/adapter/jdbc/JdbcAdapterFramework.java | 10 +- .../db/monitoring/MapDBMonitoringPlugin.java | 10 +- plugins/mql-language/build.gradle | 5 +- .../db/languages/MongoLanguagePlugin.java | 10 +- .../db/languages/mql/MqlRenameCollection.java | 3 +- .../src/main/codegen/javacc/PigletParser.jj | 2 +- .../org/polypheny/db/PigLanguagePlugin.java | 10 +- .../polypheny/db/catalog/CatalogPlugin.java | 12 +- .../allocation/PolyAllocGraphCatalog.java | 18 +- .../db/catalog/logical/GraphCatalog.java | 10 +- .../db/restapi/RestInterfacePlugin.java | 10 +- plugins/sql-language/build.gradle | 7 +- .../main/codegen/{templates => }/Parser.jj | 2150 +++++++++++++++-- .../main/codegen/{ => includes}/config.fmpp | 0 .../polypheny/db/sql/SqlLanguagePlugin.java | 10 +- .../language/validate/DelegatingScope.java | 452 ++-- .../db/sql/language/validate/EmptyScope.java | 21 +- .../validate/IdentifierNamespace.java | 2 +- .../db/sql/language/validate/ListScope.java | 29 +- .../java/org/polypheny/db/webui/Crud.java | 12 +- 45 files changed, 2499 insertions(+), 1208 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/plugins/PluginContext.java create mode 100644 core/src/main/java/org/polypheny/db/plugins/PolyPlugin.java delete mode 100644 plugins/cypher-language/src/main/java/org/polypheny/db/cypher/parser/CypherCharStream.java rename plugins/sql-language/src/main/codegen/{templates => }/Parser.jj (75%) rename plugins/sql-language/src/main/codegen/{ => includes}/config.fmpp (100%) diff --git a/core/build.gradle b/core/build.gradle index 6bcd367851..11a9977f3a 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -1,28 +1,28 @@ -import ca.coglinc.gradle.plugins.javacc.CompileJavaccTask +//import ca.coglinc.gradle.plugins.javacc.CompileJavaccTask group "org.polypheny" apply plugin: "io.freefair.lombok" configurations { - javacc + //javacc tests { extendsFrom testRuntimeOnly } testClasses { - extendsFrom(testImplementation) + extendsFrom testImplementation } } buildscript { - dependencies { + /*dependencies { // JavaCC (https://github.com/johnmartel/javaccPlugin) classpath group: "gradle.plugin.ca.coglinc2", name: "javacc-gradle-plugin", version: javacc_plugin_version // Fmpp classpath group: "net.sourceforge.fmpp", name: "fmpp", version: fmpp_plugin_version } - ant.taskdef(name: "fmpp", classname:"fmpp.tools.AntTask", classpath: buildscript.configurations.classpath.asPath) + ant.taskdef(name: "fmpp", classname:"fmpp.tools.AntTask", classpath: buildscript.configurations.classpath.asPath)*/ } @@ -30,7 +30,7 @@ dependencies { api project(":config") api project(":information") - javacc group: "net.java.dev.javacc", name: "javacc", version: javacc_version // BSD 2-clause + //javacc group: "net.java.dev.javacc", name: "javacc", version: javacc_version // BSD 2-clause ////// PF4J implementation(group: 'org.pf4j', name: 'pf4j', version: pf4jVersion) { @@ -103,45 +103,7 @@ sourceSets { } } -/** - * Additional MongoDB query language Compile Pipeline - */ -task generateFmppSourcesDocument { - inputs.dir("src/main/codegen/templates/mql") - outputs.dir(project.buildDir.absolutePath + "/generated-sources/fmpp") - doLast { - ant.fmpp(configuration: "src/main/codegen/config_doc.fmpp", - sourceRoot: "src/main/codegen/templates/mql", - outputRoot: project.buildDir.absolutePath + "/generated-sources/fmpp/mql") - } -} -task generateParserDocument (type: CompileJavaccTask) { - dependsOn("generateFmppSourcesDocument") - getConventionMapping().map("classpath", { configurations.javacc }) - arguments = [static: "false", lookahead: "2"] - inputDirectory = file(project.buildDir.absolutePath + "/generated-sources/fmpp/mql/javacc") - outputDirectory = file(project.buildDir.absolutePath + "/generated-sources/org/polypheny/db/mql/parser/impl") -} -/** - * Compile Pipeline - */ -task generateFmppSources { - inputs.dir("src/main/codegen/templates/sql") - outputs.dir(project.buildDir.absolutePath + "/generated-sources/fmpp") - doLast { - ant.fmpp(configuration: "src/main/codegen/config.fmpp", - sourceRoot: "src/main/codegen/templates/sql", - outputRoot: project.buildDir.absolutePath + "/generated-sources/fmpp/sql") - } -} -task generateParser (type: CompileJavaccTask) { - dependsOn("generateFmppSources") - getConventionMapping().map("classpath", { configurations.javacc }) - arguments = [static: "false", lookahead: "2"] - inputDirectory = file(project.buildDir.absolutePath + "/generated-sources/fmpp/sql/javacc") - outputDirectory = file(project.buildDir.absolutePath + "/generated-sources/org/polypheny/db/sql/parser/impl") -} task generateJdbcVersionProperties(type: Copy) { from "src/main/resources/version" into project.buildDir.absolutePath + "/classes" @@ -168,12 +130,12 @@ javadoc { // Include private fields in JavaDoc options.memberLevel = JavadocMemberLevel.PRIVATE // Use HTML 5 for JavaDoc - if(JavaVersion.current().isJava9Compatible()) { + if (JavaVersion.current().isJava9Compatible()) { options.addBooleanOption("html5", true) } // Exclude build dir - exclude "fmpp/sql/javacc/**" - exclude "fmpp/mql/javacc/**" + // exclude "fmpp/sql/javacc/**" + // exclude "fmpp/mql/javacc/**" } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java index 67808b1d03..56b204e1f4 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/AllocationGraphCatalog.java @@ -29,7 +29,7 @@ public interface AllocationGraphCatalog extends AllocationCatalog { * @param graphId The id of the graph for which a new placement is added * @return The id of the new placement */ - public abstract long addGraphPlacement( long adapterId, long graphId ); + long addGraphPlacement( long adapterId, long graphId ); /** * Deletes a specific graph placement for a given graph and adapter. @@ -37,7 +37,7 @@ public interface AllocationGraphCatalog extends AllocationCatalog { * @param adapterId The id of the adapter on which the placement is removed * @param graphId The id of the graph for which the placement is removed */ - public abstract void deleteGraphPlacement( long adapterId, long graphId ); + void deleteGraphPlacement( long adapterId, long graphId ); Map getGraphs(); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java index 2556e996d0..8804808d03 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationGraph.java @@ -16,6 +16,7 @@ package org.polypheny.db.catalog.entity.allocation; +import io.activej.serializer.annotations.Deserialize; import java.io.Serializable; import lombok.EqualsAndHashCode; import lombok.Value; @@ -23,7 +24,6 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.logistic.NamespaceType; @EqualsAndHashCode(callSuper = true) @@ -32,14 +32,12 @@ public class AllocationGraph extends AllocationEntity { - public LogicalGraph logical; - public long id; - - - public AllocationGraph( long id, LogicalGraph graph, long adapterId ) { - super( id, graph.id, graph.namespaceId, adapterId, NamespaceType.GRAPH ); - this.id = id; - this.logical = graph; + public AllocationGraph( + @Deserialize("id") long id, + @Deserialize("logicalId") long logicalId, + @Deserialize("namespaceId") long namespaceId, + @Deserialize("adapterId") long adapterId ) { + super( id, logicalId, namespaceId, adapterId, NamespaceType.GRAPH ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java index 8a38802bff..88fe4c8ee1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalGraph.java @@ -17,6 +17,8 @@ package org.polypheny.db.catalog.entity.logical; import com.drew.lang.annotations.NotNull; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.io.Serializable; import java.util.List; import lombok.EqualsAndHashCode; @@ -33,22 +35,22 @@ public class LogicalGraph extends LogicalEntity implements Comparable { private static final long serialVersionUID = 7343856827901459672L; - public int ownerId; - public boolean modifiable; + @Serialize + public boolean modifiable; + @Serialize public boolean caseSensitive; - public LogicalGraph( long id, String name, long namespaceId, int ownerId, boolean modifiable, boolean caseSensitive ) { - super( id, name, namespaceId, EntityType.ENTITY, NamespaceType.GRAPH ); - this.ownerId = ownerId; + public LogicalGraph( @Deserialize("id") long id, @Deserialize("name") String name, @Deserialize("modifiable") boolean modifiable, @Deserialize("caseSensitive") boolean caseSensitive ) { + super( id, name, id, EntityType.ENTITY, NamespaceType.GRAPH ); this.modifiable = modifiable; this.caseSensitive = caseSensitive; } public LogicalGraph( LogicalGraph graph ) { - this( graph.id, graph.name, graph.namespaceId, graph.ownerId, graph.modifiable, graph.caseSensitive ); + this( graph.id, graph.name, graph.modifiable, graph.caseSensitive ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java index f59d67fd5c..9edb8c4f80 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/AllocSnapshot.java @@ -392,7 +392,7 @@ public interface AllocSnapshot { * @param adapterId The id of the adapter on which the placements are placed * @return The collection of graph placements */ - public abstract List getGraphPlacements( long adapterId ); + List getGraphPlacements( long adapterId ); CatalogCollectionPlacement getCollectionPlacement( long id, long placementId ); diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java index 3e6bbfbffa..54ff5cb977 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/LogicalRelSnapshot.java @@ -18,6 +18,7 @@ import java.util.List; import javax.annotation.Nullable; +import lombok.NonNull; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogConstraint; @@ -45,7 +46,9 @@ public interface LogicalRelSnapshot { List getTables( long namespaceId, @Nullable Pattern name ); - List getTables( @Nullable String namespace, @Nullable String name ); + LogicalTable getTables( @Nullable String namespace, @NonNull String name ); + + List getTablesFromNamespace( long namespace ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index ca09e321ff..74528936b3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -25,6 +25,7 @@ import java.util.Map.Entry; import java.util.TreeSet; import java.util.stream.Collectors; +import lombok.NonNull; import lombok.Value; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -56,6 +57,8 @@ public class LogicalRelSnapshotImpl implements LogicalRelSnapshot { ImmutableMap, LogicalTable> tableNames; + ImmutableMap> tablesNamespace; + ImmutableMap> tableColumns; ImmutableMap columns; @@ -92,6 +95,7 @@ public LogicalRelSnapshotImpl( Map catalogs ) { this.tables = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getTables().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); this.tableNames = ImmutableMap.copyOf( tables.entrySet().stream().collect( Collectors.toMap( e -> Pair.of( e.getValue().namespaceId, getAdjustedName( e.getValue().namespaceId, e.getValue().name ) ), Entry::getValue ) ) ); + this.tablesNamespace = buildTablesNamespace(); this.columns = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getColumns().entrySet().stream() ).collect( Collectors.toMap( Entry::getKey, Entry::getValue ) ) ); this.columnNames = ImmutableMap.copyOf( columns.entrySet().stream().collect( Collectors.toMap( e -> namespaces.get( e.getValue().namespaceId ).caseSensitive ? Pair.of( e.getValue().tableId, e.getValue().name ) : Pair.of( e.getValue().tableId, e.getValue().name.toLowerCase() ), Entry::getValue ) ) ); @@ -138,6 +142,19 @@ public LogicalRelSnapshotImpl( Map catalogs ) { } + private ImmutableMap> buildTablesNamespace() { + Map> map = new HashMap<>(); + for ( LogicalTable table : tables.values() ) { + if ( !map.containsKey( table.namespaceId ) ) { + map.put( table.namespaceId, new ArrayList<>() ); + } + map.get( table.namespaceId ).add( table ); + } + + return ImmutableMap.copyOf( map ); + } + + private ImmutableMap buildViews() { return ImmutableMap.copyOf( tables .values() @@ -272,13 +289,22 @@ private List getNamespaces( @Nullable Pattern namespaceName ) @Override public List getTables( long namespaceId, @Nullable Pattern name ) { - return tableNames.values().stream().filter( e -> e.name.matches( name.toRegex() ) || e.namespaceId == namespaceId ).collect( Collectors.toList() ); + boolean caseSensitive = namespaces.get( namespaceId ).caseSensitive; + return tablesNamespace.get( namespaceId ).stream().filter( e -> (name == null || e.name.matches( caseSensitive ? name.toRegex() : name.toRegex().toLowerCase() )) ).collect( Collectors.toList() ); + } + + + @Override + public LogicalTable getTables( @Nullable String namespaceName, @NonNull String name ) { + LogicalNamespace namespace = namespaceNames.get( namespaceName ); + + return tableNames.get( Pair.of( namespace.id, (namespace.caseSensitive ? name : name.toLowerCase()) ) ); } @Override - public List getTables( @Nullable String namespace, @Nullable String name ) { - return null; + public List getTablesFromNamespace( long namespace ) { + return tablesNamespace.get( namespace ); } @@ -373,7 +399,7 @@ public boolean isConstraint( long keyId ) { @Override public List getForeignKeys( long tableId ) { - return tableKeys.get( tableId ).stream().filter( k -> isForeignKey( k.id ) ).map( f -> (LogicalForeignKey) f ).collect( Collectors.toList() ); + return foreignKeys.values().stream().filter( k -> k.tableId == tableId ).collect( Collectors.toList() ); } diff --git a/core/src/main/java/org/polypheny/db/plugins/PluginContext.java b/core/src/main/java/org/polypheny/db/plugins/PluginContext.java new file mode 100644 index 0000000000..ab0eda4867 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/plugins/PluginContext.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.plugins; + +import org.pf4j.RuntimeMode; + +public class PluginContext { + + private final RuntimeMode runtimeMode; + + + public PluginContext( RuntimeMode runtimeMode ) { + this.runtimeMode = runtimeMode; + } + + + public RuntimeMode getRuntimeMode() { + return runtimeMode; + } + +} diff --git a/core/src/main/java/org/polypheny/db/plugins/PolyPlugin.java b/core/src/main/java/org/polypheny/db/plugins/PolyPlugin.java new file mode 100644 index 0000000000..2435c18a20 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/plugins/PolyPlugin.java @@ -0,0 +1,32 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.plugins; + +import org.pf4j.Plugin; + +public class PolyPlugin extends Plugin { + + protected final PluginContext context; + + + protected PolyPlugin( PluginContext context ) { + super(); + + this.context = context; + } + +} diff --git a/core/src/main/java/org/polypheny/db/plugins/PolyPluginManager.java b/core/src/main/java/org/polypheny/db/plugins/PolyPluginManager.java index 2a0dc52dc7..376a9abc7a 100644 --- a/core/src/main/java/org/polypheny/db/plugins/PolyPluginManager.java +++ b/core/src/main/java/org/polypheny/db/plugins/PolyPluginManager.java @@ -22,6 +22,7 @@ import com.google.gson.stream.JsonWriter; import java.io.File; import java.io.IOException; +import java.lang.reflect.Constructor; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; @@ -44,15 +45,16 @@ import org.pf4j.CompoundPluginDescriptorFinder; import org.pf4j.CompoundPluginLoader; import org.pf4j.DefaultPluginDescriptor; +import org.pf4j.DefaultPluginFactory; import org.pf4j.DefaultPluginLoader; import org.pf4j.DefaultPluginManager; -import org.pf4j.DevelopmentPluginRepository; import org.pf4j.JarPluginLoader; import org.pf4j.ManifestPluginDescriptorFinder; +import org.pf4j.Plugin; import org.pf4j.PluginClassLoader; import org.pf4j.PluginDescriptor; +import org.pf4j.PluginFactory; import org.pf4j.PluginLoader; -import org.pf4j.PluginRepository; import org.pf4j.PluginState; import org.pf4j.PluginWrapper; import org.polypheny.db.catalog.Catalog; @@ -125,6 +127,25 @@ public PolyPluginManager( Path... paths ) { } + @Override + protected PluginFactory createPluginFactory() { + return new DefaultPluginFactory() { + @Override + protected Plugin createInstance( Class pluginClass, PluginWrapper pluginWrapper ) { + PluginContext context = new PluginContext( pluginWrapper.getRuntimeMode() ); + try { + Constructor constructor = pluginClass.getConstructor( PluginContext.class ); + return (Plugin) constructor.newInstance( context ); + } catch ( Exception e ) { + log.error( e.getMessage(), e ); + } + + return null; + } + }; + } + + public static void init( boolean resetPluginsOnStartup ) { attachRuntimeToPlugins(); diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index a8a3dfc9ff..af040b18db 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -304,13 +304,6 @@ public void join( final long millis ) throws InterruptedException { log.error( "Unable to retrieve host information." ); } - /*ThreadManager.getComponent().addShutdownHook( "[ShutdownHook] HttpServerDispatcher.stop()", () -> { - try { - httpServerDispatcher.stop(); - } catch ( Exception e ) { - GLOBAL_LOGGER.warn( "Exception during HttpServerDispatcher shutdown", e ); - } - } );*/ final Authenticator authenticator = new AuthenticatorImpl(); @@ -344,7 +337,7 @@ public void join( final long millis ) throws InterruptedException { trx = transactionManager.startTransaction( Catalog.getInstance().getSnapshot().getUser( Catalog.defaultUserId ), Catalog.getInstance().getSnapshot().getNamespace( 0 ), false, "Catalog Startup" ); AdapterManager.getInstance().restoreAdapters(); - loadDefaults(); + //DefaultInserter.restoreInterfaces(); QueryInterfaceManager.getInstance().restoreInterfaces( catalog.getSnapshot() ); trx.commit(); trx = transactionManager.startTransaction( Catalog.getInstance().getSnapshot().getUser( Catalog.defaultUserId ), Catalog.getInstance().getSnapshot().getNamespace( 0 ), false, "Catalog Startup" ); @@ -414,7 +407,9 @@ public void join( final long millis ) throws InterruptedException { } PolyPluginManager.startUp( transactionManager, authenticator ); - new DefaultInserter( DdlManager.getInstance(), transactionManager ); + DefaultInserter.restoreData( DdlManager.getInstance() ); + DefaultInserter.restoreInterfacesIfNecessary(); + QueryInterfaceManager.getInstance().restoreInterfaces( catalog.getSnapshot() ); // Initialize statistic settings StatisticsManager.getInstance().initializeStatisticSettings(); @@ -453,8 +448,4 @@ public void join( final long millis ) throws InterruptedException { } - public void loadDefaults() { - Catalog.getInstance().restoreInterfacesIfNecessary(); - } - } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DefaultInserter.java b/dbms/src/main/java/org/polypheny/db/ddl/DefaultInserter.java index 253ad6f2f4..b2a926998d 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DefaultInserter.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DefaultInserter.java @@ -16,249 +16,89 @@ package org.polypheny.db.ddl; -import com.google.common.collect.ImmutableList; -import java.util.Collections; -import java.util.List; import org.apache.calcite.linq4j.function.Deterministic; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; -import org.polypheny.db.catalog.entity.allocation.AllocationEntity; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalTable; -import org.polypheny.db.catalog.logistic.Collation; -import org.polypheny.db.catalog.logistic.ConstraintType; -import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.NamespaceType; -import org.polypheny.db.catalog.logistic.PlacementType; -import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; -import org.polypheny.db.ddl.DdlManager.ConstraintInformation; -import org.polypheny.db.ddl.DdlManager.FieldInformation; import org.polypheny.db.iface.QueryInterfaceManager; import org.polypheny.db.iface.QueryInterfaceManager.QueryInterfaceType; -import org.polypheny.db.transaction.Transaction; -import org.polypheny.db.transaction.TransactionManager; -import org.polypheny.db.type.PolyType; @Deterministic public class DefaultInserter { - private final Catalog catalog = Catalog.getInstance(); - private final TransactionManager manager; - private final DdlManager ddlManager; - private Transaction transaction; - - - public DefaultInserter( DdlManager ddlManager, TransactionManager manager ) { - this.manager = manager; - this.ddlManager = ddlManager; - insertDefaultData(); - if ( Catalog.snapshot().getQueryInterface( "avatica" ) == null ) { - QueryInterfaceType avatica = QueryInterfaceManager.getREGISTER().get( "AvaticaInterface" ); - catalog.addQueryInterface( "avatica", avatica.clazz.getName(), avatica.defaultSettings ); - } - } - /** * Fills the catalog database with default data, skips if data is already inserted */ - private void insertDefaultData() { - - ////////////// - // init users - long systemId = catalog.addUser( "system", "" ); - - catalog.addUser( "pa", "" ); - - Catalog.defaultUserId = systemId; + public static void restoreData( DdlManager ddlManager ) { + final Catalog catalog = Catalog.getInstance(); + restoreUsers( catalog ); ////////////// // init schema - long namespaceId = catalog.addNamespace( "public", NamespaceType.getDefault(), false ); + catalog.addNamespace( "public", NamespaceType.getDefault(), false ); ////////////// // init adapters - if ( catalog.getAdapters().size() != 0 ) { - catalog.commit(); - return; - } - - catalog.updateSnapshot(); - this.transaction = manager.startTransaction( catalog.getSnapshot().getUser( Catalog.defaultUserId ), catalog.getSnapshot().getNamespace( namespaceId ), false, "Defaults" ); - - // Deploy default store - ddlManager.addAdapter( "hsqldb", Catalog.defaultStore.getAdapterName(), AdapterType.STORE, Catalog.defaultStore.getDefaultSettings() ); - //AdapterManager.getInstance().addAdapter( Catalog.defaultStore.getAdapterName(), "hsqldb", AdapterType.STORE, Catalog.defaultStore.getDefaultSettings() ); - - // Deploy default CSV view - //Adapter adapter = AdapterManager.getInstance().addAdapter( Catalog.defaultSource.getAdapterName(), "hr", AdapterType.SOURCE, Catalog.defaultSource.getDefaultSettings() ); - ddlManager.addAdapter( "hr", Catalog.defaultSource.getAdapterName(), AdapterType.SOURCE, Catalog.defaultSource.getDefaultSettings() ); - //adapter.createNewSchema( Catalog.snapshot(), "public", namespaceId ); - // init schema - //catalog.updateSnapshot(); - - //CatalogAdapter csv = catalog.getSnapshot().getAdapter( "hr" ); - //addDefaultCsvColumns( csv, namespaceId ); + restoreAdapters( ddlManager, catalog ); catalog.commit(); } - /** - * Initiates default columns for csv files - */ - private void addDefaultCsvColumns( CatalogAdapter csv, long namespaceId ) { - LogicalTable depts = getDepts( csv, namespaceId ); - - LogicalTable emps = getEmps( csv, namespaceId ); - - LogicalTable emp = getEmp( csv, namespaceId ); - - LogicalTable work = getWork( csv, namespaceId ); - - catalog.updateSnapshot(); - - // set foreign keys - catalog.getLogicalRel( namespaceId ).addForeignKey( - emps.id, - ImmutableList.of( catalog.getSnapshot().rel().getColumn( emps.id, "deptno" ).id ), - depts.id, - ImmutableList.of( catalog.getSnapshot().rel().getColumn( depts.id, "deptno" ).id ), - "fk_emps_depts", - ForeignKeyOption.NONE, - ForeignKeyOption.NONE ); - catalog.getLogicalRel( namespaceId ).addForeignKey( - work.id, - ImmutableList.of( catalog.getSnapshot().rel().getColumn( work.id, "employeeno" ).id ), - emp.id, - ImmutableList.of( catalog.getSnapshot().rel().getColumn( emp.id, "employeeno" ).id ), - "fk_work_emp", - ForeignKeyOption.NONE, - ForeignKeyOption.NONE ); - } - - - private LogicalTable getWork( CatalogAdapter csv, long namespaceId ) { - catalog.getLogicalRel( namespaceId ).addTable( "work", EntityType.SOURCE, false ); - LogicalTable work = Catalog.snapshot().rel().getTable( namespaceId, "work" ); - catalog.getLogicalRel( namespaceId ).addPrimaryKey( work.id, Collections.singletonList( catalog.getSnapshot().rel().getColumn( work.id, "employeeno" ).id ) ); - addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null ); - addDefaultCsvColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 ); - addDefaultCsvColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); - addDefaultCsvColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 ); - addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); - addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null ); - return work; + public static void restoreInterfaces() { + restoreAvatica(); + restoreInterfacesIfNecessary(); } - private LogicalTable getEmp( CatalogAdapter csv, long namespaceId ) { - catalog.getLogicalRel( namespaceId ).addTable( "emp", EntityType.SOURCE, false ); - LogicalTable emp = Catalog.snapshot().rel().getTable( namespaceId, "emp" ); - catalog.getLogicalRel( namespaceId ).addPrimaryKey( emp.id, Collections.singletonList( catalog.getSnapshot().rel().getColumn( emp.id, "employeeno" ).id ) ); - addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null ); - addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 ); - addDefaultCsvColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 ); - addDefaultCsvColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 ); - addDefaultCsvColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null ); - addDefaultCsvColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 ); - addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null ); - addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null ); - return emp; - } + private static void restoreAdapters( DdlManager ddlManager, Catalog catalog ) { + if ( catalog.getAdapters().size() != 0 ) { + catalog.commit(); + return; + } + catalog.updateSnapshot(); - private LogicalTable getEmps( CatalogAdapter csv, long namespaceId ) { - catalog.getLogicalRel( namespaceId ).addTable( "emps", EntityType.SOURCE, false ); - LogicalTable emps = Catalog.snapshot().rel().getTable( namespaceId, "emps" ); - catalog.getLogicalRel( namespaceId ).addPrimaryKey( emps.id, Collections.singletonList( catalog.getSnapshot().rel().getColumn( emps.id, "empid" ).id ) ); - addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null ); - addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 ); - addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null ); - addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null ); - return emps; + // Deploy default store + ddlManager.addAdapter( "hsqldb", Catalog.defaultStore.getAdapterName(), AdapterType.STORE, Catalog.defaultStore.getDefaultSettings() ); + // Deploy default CSV view + ddlManager.addAdapter( "hr", Catalog.defaultSource.getAdapterName(), AdapterType.SOURCE, Catalog.defaultSource.getDefaultSettings() ); } - private LogicalTable getDepts( CatalogAdapter csv, long namespaceId ) { - List fields = List.of( - new FieldInformation( - "deptno", - new ColumnTypeInformation( PolyType.INTEGER, null, null, null, null, null, false ), - null, - null, - 1 ), - new FieldInformation( - "name", - new ColumnTypeInformation( PolyType.VARCHAR, null, 20, null, null, null, false ), - Collation.CASE_INSENSITIVE, - null, - 2 ) - ); - List constraints = List.of( - new ConstraintInformation( "primary", ConstraintType.PRIMARY, List.of( "deptno" ) ) - ); + private static void restoreUsers( Catalog catalog ) { + ////////////// + // init users + long systemId = catalog.addUser( "system", "" ); - //DdlManager.getInstance().createTable( namespaceId, "depts", fields, constraints, true, List.of( AdapterManager.getInstance().getSource( csv.id ) ), PlacementType.AUTOMATIC, transaction.createStatement() ); - catalog.getLogicalRel( namespaceId ).addTable( "depts", EntityType.SOURCE, false ); + catalog.addUser( "pa", "" ); - LogicalTable depts = Catalog.snapshot().rel().getTable( namespaceId, "depts" ); - catalog.getLogicalRel( namespaceId ).addPrimaryKey( depts.id, Collections.singletonList( catalog.getSnapshot().rel().getColumn( depts.id, "deptno" ).id ) ); - addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null ); - addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 ); - return depts; + Catalog.defaultUserId = systemId; } - private void addDefaultCsvColumn( CatalogAdapter csv, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - if ( catalog.getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { + public static void restoreInterfacesIfNecessary() { + //////////////////////// + // init query interfaces + if ( Catalog.getInstance().getInterfaces().size() != 0 ) { return; } - LogicalColumn column = catalog.getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - String filename = table.name + ".csv"; - if ( table.name.equals( "emp" ) || table.name.equals( "work" ) ) { - filename += ".gz"; - } - - catalog.updateSnapshot(); - AllocationEntity alloc; - if ( !catalog.getSnapshot().alloc().adapterHasPlacement( csv.id, table.id ) ) { - alloc = catalog.getAllocRel( table.namespaceId ).createAllocationTable( csv.id, table.id ); - } else { - alloc = catalog.getSnapshot().alloc().getAllocation( csv.id, table.id ); - } - - catalog.getAllocRel( table.namespaceId ).addColumn( alloc.id, column.id, PlacementType.AUTOMATIC, position ); - //getAllocRel( table.namespaceId ).addColumn( alloc.id, colId, PlacementType.AUTOMATIC, filename, table.name, name, position ); - //getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( allocId, colId, position ); - - catalog.updateSnapshot(); - - // long partitionId = table.partitionProperty.partitionIds.get( 0 ); - // getAllocRel( table.namespaceId ).addPartitionPlacement( table.namespaceId, csv.id, table.id, partitionId, PlacementType.AUTOMATIC, DataPlacementRole.UPTODATE ); + QueryInterfaceManager.getREGISTER().values().forEach( i -> Catalog.getInstance().addQueryInterface( i.interfaceName, i.clazz.getName(), i.defaultSettings ) ); + Catalog.getInstance().commit(); } - private void addDefaultColumn( CatalogAdapter adapter, LogicalTable table, String name, PolyType type, Collation collation, int position, Integer length ) { - /*if ( !getSnapshot().rel().checkIfExistsColumn( table.id, name ) ) { - LogicalColumn column = getLogicalRel( table.namespaceId ).addColumn( name, table.id, position, type, null, length, null, null, null, false, collation ); - AllocationEntity entity = getSnapshot().alloc().getAllocation( adapter.id, table.id ); - getAllocRel( table.namespaceId ).addColumn( entity.id, column.id, PlacementType.AUTOMATIC, position ); - //getAllocRel( table.namespaceId ).addColumn( entity.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, position ); - getAllocRel( table.namespaceId ).updateColumnPlacementPhysicalPosition( adapter.id, column.id, position ); - }*/ + public static void restoreAvatica() { + if ( Catalog.snapshot().getQueryInterface( "avatica" ) != null ) { + return; + } + QueryInterfaceType avatica = QueryInterfaceManager.getREGISTER().get( "AvaticaInterface" ); + Catalog.getInstance().addQueryInterface( "avatica", avatica.clazz.getName(), avatica.defaultSettings ); } } diff --git a/gradle.properties b/gradle.properties index 9550469503..9e73b98920 100644 --- a/gradle.properties +++ b/gradle.properties @@ -80,9 +80,8 @@ janino_version = 3.0.11 java_diff_version = 1.1.2 javalin_version = 4.6.7 javacc_plugin_version = 3.0.0 -javacc_version = 4.0 -javacc_version_cypher = 4.1 -java_docker_version = 3.2.13 +javacc_version = 7.0.12 +java_docker_version = 3.3.0 joda_time_version = 2.8.1 jetty_websocket_api_version = 9.4.48.v20220622 json_path_version = 2.4.0 @@ -115,7 +114,7 @@ poi_ooxml_version = 5.2.3 polypheny_jdbc_driver_version = 1.5.3 polypheny_ui_version = 1.0-SNAPSHOT postgresql_version = 42.2.19 -pf4jVersion = 3.8.0 +pf4jVersion = 3.9.0 quidem_version = 0.9 reflections_version = 0.10.2 scott_data_hsqldb_version = 0.1 diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java index 541fd2010d..4d6181219c 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticQueryProcessor.java @@ -27,8 +27,6 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.logical.LogicalColumn; -import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.logistic.EntityType; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -80,33 +78,6 @@ public StatisticQueryResult selectOneColumnStat( AlgNode node, Transaction trans } - /** - * Method to get all schemas, tables, and their columns in a database - */ - public List> getSchemaTree() { - Snapshot snapshot = Catalog.getInstance().getSnapshot(); - List> result = new ArrayList<>(); - List schemaTree = new ArrayList<>(); - List schemas = snapshot.getNamespaces( null ); - for ( LogicalNamespace schema : schemas ) { - List tables = new ArrayList<>(); - List childTables = snapshot.rel().getTables( new Pattern( schema.name ), null ); - for ( LogicalTable childTable : childTables ) { - List table = new ArrayList<>(); - List columns = snapshot.rel().getColumns( childTable.id ); - for ( LogicalColumn logicalColumn : columns ) { - table.add( schema.name + "." + childTable.name + "." + logicalColumn.name ); - } - if ( childTable.entityType == EntityType.ENTITY ) { - tables.addAll( table ); - } - } - schemaTree.addAll( tables ); - result.add( schemaTree ); - } - return result; - } - /** * Gets all columns in the database @@ -226,12 +197,6 @@ private int getPageSize() { return RuntimeConfig.UI_PAGE_SIZE.getInteger(); } - - public static String buildQualifiedName( String... strings ) { - return "\"" + String.join( "\".\"", strings ) + "\""; - } - - static class QueryExecutionException extends Exception { QueryExecutionException( Throwable t ) { diff --git a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/AvaticaInterfacePlugin.java b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/AvaticaInterfacePlugin.java index c5ea9ce242..51ff163ddc 100644 --- a/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/AvaticaInterfacePlugin.java +++ b/plugins/avatica-interface/src/main/java/org/polypheny/db/avatica/AvaticaInterfacePlugin.java @@ -18,6 +18,11 @@ import com.google.common.collect.ImmutableList; +import java.beans.PropertyChangeEvent; +import java.beans.PropertyChangeListener; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.avatica.metrics.MetricsSystem; import org.apache.calcite.avatica.metrics.MetricsSystemConfiguration; @@ -27,30 +32,24 @@ import org.apache.calcite.avatica.server.AvaticaHandler; import org.apache.calcite.avatica.server.HandlerFactory; import org.pf4j.Extension; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.StatusService; import org.polypheny.db.iface.Authenticator; import org.polypheny.db.iface.QueryInterface; import org.polypheny.db.iface.QueryInterfaceManager; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.transaction.TransactionManager; import org.polypheny.db.util.Util; -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class AvaticaInterfacePlugin extends Plugin { +public class AvaticaInterfacePlugin extends PolyPlugin { /** * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public AvaticaInterfacePlugin( PluginWrapper wrapper ) { - super( wrapper ); + public AvaticaInterfacePlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/cql-language/build.gradle b/plugins/cql-language/build.gradle index 9b138841bd..1fec5e5b1d 100644 --- a/plugins/cql-language/build.gradle +++ b/plugins/cql-language/build.gradle @@ -39,6 +39,9 @@ dependencies { } task generateParser(type: CompileJavaccTask) { + /*mainClass = "org.javacc.parser.Main" + classpath = configurations.runtimeClasspath + args = ['-OUTPUT_DIRECTORY=' + file(project.buildDir.absolutePath + "/generated-sources/org/polypheny/db/cql/parser"), '-STATIC=false', 'src/main/codegen/CqlParser.jj']*/ getConventionMapping().map("classpath", { configurations.javacc }) arguments = [static: "false"] inputDirectory = file("src/main/codegen") diff --git a/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java b/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java index c5526712eb..c75f20cfeb 100644 --- a/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java +++ b/plugins/cql-language/src/main/java/org/polypheny/db/cql/CqlLanguagePlugin.java @@ -20,8 +20,6 @@ import java.util.List; import lombok.extern.slf4j.Slf4j; import org.eclipse.jetty.websocket.api.Session; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.PolyImplementation; import org.polypheny.db.adapter.java.JavaTypeFactory; import org.polypheny.db.algebra.AlgRoot; @@ -32,6 +30,8 @@ import org.polypheny.db.information.InformationObserver; import org.polypheny.db.languages.LanguageManager; import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.tools.AlgBuilder; @@ -45,7 +45,7 @@ import org.polypheny.db.webui.models.requests.QueryRequest; @Slf4j -public class CqlLanguagePlugin extends Plugin { +public class CqlLanguagePlugin extends PolyPlugin { public static final String NAME = "cql"; @@ -55,8 +55,8 @@ public class CqlLanguagePlugin extends Plugin { * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public CqlLanguagePlugin( PluginWrapper wrapper ) { - super( wrapper ); + public CqlLanguagePlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvPlugin.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvPlugin.java index a514f3c710..94ab5c739f 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvPlugin.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvPlugin.java @@ -19,18 +19,18 @@ import com.google.common.collect.ImmutableMap; import java.util.Map; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.catalog.Adapter; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; -public class CsvPlugin extends Plugin { +public class CsvPlugin extends PolyPlugin { /** * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public CsvPlugin( PluginWrapper wrapper ) { - super( wrapper ); + public CsvPlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/cypher-language/build.gradle b/plugins/cypher-language/build.gradle index c186dc7c50..ba232c0a7e 100644 --- a/plugins/cypher-language/build.gradle +++ b/plugins/cypher-language/build.gradle @@ -22,7 +22,7 @@ dependencies { compileOnly project(":webui") compileOnly project(":dbms") - javacc group: "net.java.dev.javacc", name: "javacc", version: javacc_version_cypher // BSD 2-clause + javacc group: "net.java.dev.javacc", name: "javacc", version: javacc_version // BSD 2-clause implementation group: "org.apache.commons", name: "commons-lang3", version: commons_lang3_version // Apache 2.0 implementation group: 'org.eclipse.jetty.websocket', name: 'websocket-api', version: jetty_websocket_api_version diff --git a/plugins/cypher-language/src/main/codegen/CypherParser.jj b/plugins/cypher-language/src/main/codegen/CypherParser.jj index 0adfc2e0fc..496689cd23 100644 --- a/plugins/cypher-language/src/main/codegen/CypherParser.jj +++ b/plugins/cypher-language/src/main/codegen/CypherParser.jj @@ -28,13 +28,13 @@ options { UNICODE_INPUT = false; IGNORE_CASE = false; USER_TOKEN_MANAGER = false; - USER_CHAR_STREAM = true; + USER_CHAR_STREAM = false; BUILD_PARSER = true; BUILD_TOKEN_MANAGER = true; SANITY_CHECK = true; FORCE_LA_CHECK = false; TOKEN_EXTENDS = "WithOffset"; - COMMON_TOKEN_ACTION = true; + COMMON_TOKEN_ACTION = false; } PARSER_BEGIN(CypherParserImpl) @@ -109,12 +109,12 @@ public class CypherParserImpl extends CypherAbstractParserImpl { * {@link ParserFactory} implementation for creating parser. */ public static final ParserFactory FACTORY = new ParserFactory() { - public CypherAbstractParserImpl getParser( Reader reader ) { - return new CypherParserImpl( new CypherCharStream( ((SourceStringReader) reader).getSourceString() )); + public CypherParserImpl getParser( Reader reader ) { + return new CypherParserImpl( reader ); } - public CypherAbstractParserImpl getParser( String query ) { - return new CypherParserImpl( new CypherCharStream( query )); + public CypherParserImpl getParser( String cypher ) { + return getParser( new SourceStringReader( cypher ) ); } }; @@ -147,7 +147,7 @@ public class CypherParserImpl extends CypherAbstractParserImpl { PARSER_END(CypherParserImpl) -TOKEN_MGR_DECLS : +/*TOKEN_MGR_DECLS : { public void CommonTokenAction( Token t ) { @@ -155,7 +155,7 @@ TOKEN_MGR_DECLS : t.beginOffset = ccStream.getBeginOffset(); t.endOffset = ccStream.getEndOffset(); } -} +}*/ SKIP : { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherLanguagePlugin.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherLanguagePlugin.java index 8d77388063..d48559ef55 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherLanguagePlugin.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherLanguagePlugin.java @@ -19,8 +19,6 @@ import java.util.ArrayList; import java.util.List; import org.eclipse.jetty.websocket.api.Session; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.PolyImplementation; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -29,6 +27,8 @@ import org.polypheny.db.languages.LanguageManager; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Node; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.processing.AutomaticDdlProcessor; import org.polypheny.db.processing.ExtendedQueryParameters; @@ -40,7 +40,7 @@ import org.polypheny.db.webui.models.Result; import org.polypheny.db.webui.models.requests.QueryRequest; -public class CypherLanguagePlugin extends Plugin { +public class CypherLanguagePlugin extends PolyPlugin { public static final String NAME = "cypher"; @@ -50,8 +50,8 @@ public class CypherLanguagePlugin extends Plugin { * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public CypherLanguagePlugin( PluginWrapper wrapper ) { - super( wrapper ); + public CypherLanguagePlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/parser/CypherCharStream.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/parser/CypherCharStream.java deleted file mode 100644 index 0db4857162..0000000000 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/parser/CypherCharStream.java +++ /dev/null @@ -1,308 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project (Modifications) - * Copyright (c) Neo4j Sweden AB (http://neo4j.com) (Original Version) - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.cypher.parser; - -import java.io.IOException; - -/** - * CharStream operating over an input String. - *

      - * This class unescapes escaped unicode characters, and to do that efficiently - * it keeps an internal incremental copy of the input - *

      - *

      - * Example
      - *      query: "WITH 1 AS x
      - *              RETURN '\\u01FF' AS y"
      - *     result: [W, I, T, H,  , 1,  , A, S,  , x,\n, R, E, T, U, R, N,  , ', ǿ, ',  , A, S,  , y]
      - *      lines: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]
      - *    columns: [1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12, 1, 2, 3, 4, 5, 6, 7, 8, 9,16,17,18,19,20,21]
      - *     offset: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,27,28,29,30,31,32]
      - *                                                                          ^
      - *                                                                          un-escaped unicode
      - * 
      - *

      - * As parsing progresses, the {@link CypherCharStream} will convert more and more - * of `query` into `result`, while updating `lines`, `columns` and `offset`. - */ -public class CypherCharStream implements CharStream { - - private static final char BACKSLASH = '\\'; - private static final IOException END_OF_INPUT = new IOException( "End of input" ); - - private final String query; - private int queryCursor = -1; - private int queryCursorColumn; - private int queryCursorLine = 1; - private boolean queryCursorIsCR; - private boolean queryCursorIsLF; - - private char[] result; - private int resultCursor = -1; - private int resultHighMark; - - private final int[] lines; - private final int[] columns; - private final int[] offsets; - - private int beginOffset; - - private int tabSize = 1; - - - public CypherCharStream( String query ) { - this.query = query; - this.result = new char[query.length()]; - this.lines = new int[query.length()]; - this.columns = new int[query.length()]; - this.offsets = new int[query.length()]; - } - - - @Override - public char readChar() throws IOException { - if ( resultCursor + 1 == resultHighMark ) { - convertChar(); - } - resultCursor++; - - return result[resultCursor]; - } - - - private void convertChar() throws IOException { - char c = nextQueryChar(); - - if ( c == BACKSLASH ) { - char c2 = nextQueryChar(); - if ( c2 == 'u' ) { - c = convertUnicode( c2 ); - } else { - appendToResult( c ); - c = c2; - } - } - - appendToResult( c ); - } - - - private void appendToResult( char c ) { - result[resultHighMark] = c; - lines[resultHighMark] = queryCursorLine; - columns[resultHighMark] = queryCursorColumn; - offsets[resultHighMark] = queryCursor; - resultHighMark++; - } - - - private char nextQueryChar() throws IOException { - if ( queryCursor + 1 >= query.length() ) { - throw END_OF_INPUT; - } - queryCursor++; - - char c = query.charAt( queryCursor ); - updateLineColumn( c ); - - return c; - } - - - private void updateLineColumn( char c ) { - queryCursorColumn++; - - if ( queryCursorIsLF ) { - queryCursorIsLF = false; - queryCursorColumn = 1; - queryCursorLine++; - } else if ( queryCursorIsCR ) { - queryCursorIsCR = false; - if ( c == '\n' ) { - queryCursorIsLF = true; - } else { - queryCursorColumn = 1; - queryCursorLine++; - } - } - - switch ( c ) { - case '\r': - queryCursorIsCR = true; - break; - case '\n': - queryCursorIsLF = true; - break; - case '\t': - queryCursorColumn--; - queryCursorColumn += tabSize - (queryCursorColumn % tabSize); - break; - default: - break; - } - } - - - private char convertUnicode( char c ) { - try { - while ( c == 'u' ) { - c = nextQueryChar(); - } - - return (char) (hexval( c ) << 12 | - hexval( nextQueryChar() ) << 8 | - hexval( nextQueryChar() ) << 4 | - hexval( nextQueryChar() )); - } catch ( final IOException e ) { - throw new RuntimeException( e.getMessage() ); - //throw new RuntimeException( e.getMessage(), queryCursor, queryCursorLine, queryCursorColumn ); - } - } - - - @Override - public void backup( int amount ) { - resultCursor -= amount; - } - - - @Override - public int getBeginColumn() { - return columns[beginOffset]; - } - - - @Override - public int getBeginLine() { - return lines[beginOffset]; - } - - - public int getBeginOffset() { - return offsets[beginOffset]; - } - - - @Override - public int getEndColumn() { - return columns[resultCursor]; - } - - - @Override - public int getEndLine() { - return lines[resultCursor]; - } - - - public int getEndOffset() { - return offsets[resultCursor]; - } - - - @Override - public char BeginToken() throws IOException { - char c = readChar(); - beginOffset = resultCursor; - return c; - } - - - @Override - public String GetImage() { - return new String( result, beginOffset, nextOffset() - beginOffset ); - } - - - private int nextOffset() { - return resultCursor + 1; - } - - - @Override - public char[] GetSuffix( int len ) { - char[] suffix = new char[len]; - int endOffset = nextOffset(); - System.arraycopy( result, endOffset - len, suffix, 0, len ); - return suffix; - } - - - @Override - public void Done() { - } - - - @Override - public int getColumn() { - return columns[resultCursor]; - } - - - @Override - public int getLine() { - return lines[resultCursor]; - } - - - static int hexval( final char c ) throws IOException { - switch ( c ) { - case '0': - return 0; - case '1': - return 1; - case '2': - return 2; - case '3': - return 3; - case '4': - return 4; - case '5': - return 5; - case '6': - return 6; - case '7': - return 7; - case '8': - return 8; - case '9': - return 9; - case 'a': - case 'A': - return 10; - case 'b': - case 'B': - return 11; - case 'c': - case 'C': - return 12; - case 'd': - case 'D': - return 13; - case 'e': - case 'E': - return 14; - case 'f': - case 'F': - return 15; - default: - throw new IOException( "Invalid input '" + c + "': expected four hexadecimal digits specifying a unicode character" ); - } - } - -} diff --git a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreByExamplePlugin.java b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreByExamplePlugin.java index 1306a66e5c..9e845f35f0 100644 --- a/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreByExamplePlugin.java +++ b/plugins/explore-by-example/src/main/java/org/polypheny/db/exploreByExample/ExploreByExamplePlugin.java @@ -18,22 +18,22 @@ import lombok.extern.slf4j.Slf4j; import org.pf4j.Extension; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.iface.Authenticator; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.processing.TransactionExtension; import org.polypheny.db.transaction.TransactionManager; import org.polypheny.db.webui.HttpServer; import org.polypheny.db.webui.HttpServer.HandlerType; -public class ExploreByExamplePlugin extends Plugin { +public class ExploreByExamplePlugin extends PolyPlugin { /** * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public ExploreByExamplePlugin( PluginWrapper wrapper ) { - super( wrapper ); + public ExploreByExamplePlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbPlugin.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbPlugin.java index 72938929f4..8a6eb86fb9 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbPlugin.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbPlugin.java @@ -18,11 +18,11 @@ import com.google.common.collect.ImmutableMap; import java.util.Map; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.catalog.Adapter; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; -public class HsqldbPlugin extends Plugin { +public class HsqldbPlugin extends PolyPlugin { public static final String ADAPTER_NAME = "HSQLDB"; @@ -32,8 +32,8 @@ public class HsqldbPlugin extends Plugin { * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public HsqldbPlugin( PluginWrapper wrapper ) { - super( wrapper ); + public HsqldbPlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/http-interface/src/main/java/org/polypheny/db/http/HttpInterfacePlugin.java b/plugins/http-interface/src/main/java/org/polypheny/db/http/HttpInterfacePlugin.java index 81434c7846..54e5e30217 100644 --- a/plugins/http-interface/src/main/java/org/polypheny/db/http/HttpInterfacePlugin.java +++ b/plugins/http-interface/src/main/java/org/polypheny/db/http/HttpInterfacePlugin.java @@ -36,8 +36,6 @@ import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.pf4j.Extension; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.StatusService; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.iface.Authenticator; @@ -49,6 +47,8 @@ import org.polypheny.db.information.InformationTable; import org.polypheny.db.languages.LanguageManager; import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.transaction.TransactionManager; import org.polypheny.db.util.Util; import org.polypheny.db.webui.Crud; @@ -57,14 +57,14 @@ import org.polypheny.db.webui.models.Result; import org.polypheny.db.webui.models.requests.QueryRequest; -public class HttpInterfacePlugin extends Plugin { +public class HttpInterfacePlugin extends PolyPlugin { /** * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public HttpInterfacePlugin( PluginWrapper wrapper ) { - super( wrapper ); + public HttpInterfacePlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcAdapterFramework.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcAdapterFramework.java index f76e9046fa..51c2c15b67 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcAdapterFramework.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcAdapterFramework.java @@ -16,17 +16,17 @@ package org.polypheny.db.adapter.jdbc; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; -public class JdbcAdapterFramework extends Plugin { +public class JdbcAdapterFramework extends PolyPlugin { /** * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public JdbcAdapterFramework( PluginWrapper wrapper ) { - super( wrapper ); + public JdbcAdapterFramework( PluginContext context ) { + super( context ); } diff --git a/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDBMonitoringPlugin.java b/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDBMonitoringPlugin.java index 874691bfe4..afdfb79f69 100644 --- a/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDBMonitoringPlugin.java +++ b/plugins/mapdb-monitoring/src/main/java/org/polypheny/db/monitoring/MapDBMonitoringPlugin.java @@ -16,18 +16,18 @@ package org.polypheny.db.monitoring; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.plugins.PolyPluginManager; -public class MapDBMonitoringPlugin extends Plugin { +public class MapDBMonitoringPlugin extends PolyPlugin { /** * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public MapDBMonitoringPlugin( PluginWrapper wrapper ) { - super( wrapper ); + public MapDBMonitoringPlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/mql-language/build.gradle b/plugins/mql-language/build.gradle index ddd26153ac..8565614291 100644 --- a/plugins/mql-language/build.gradle +++ b/plugins/mql-language/build.gradle @@ -16,16 +16,15 @@ buildscript { // JavaCC (https://github.com/johnmartel/javaccPlugin) classpath group: "gradle.plugin.ca.coglinc2", name: "javacc-gradle-plugin", version: javacc_plugin_version // Fmpp - classpath group: "net.sourceforge.fmpp", name: "fmpp", version: fmpp_plugin_version + //classpath group: "net.sourceforge.fmpp", name: "fmpp", version: fmpp_plugin_version } - ant.taskdef(name: "fmpp", classname:"fmpp.tools.AntTask", classpath: buildscript.configurations.classpath.asPath) + // ant.taskdef(name: "fmpp", classname:"fmpp.tools.AntTask", classpath: buildscript.configurations.classpath.asPath) } dependencies { compileOnly project(":core") compileOnly project(":monitoring") compileOnly project(":webui") - // implementation project(":sql-language") compileOnly project(":dbms") javacc group: "net.java.dev.javacc", name: "javacc", version: javacc_version // BSD 2-clause diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java index 44962d7aa4..6b6f64397c 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java @@ -21,8 +21,6 @@ import java.util.List; import lombok.Getter; import org.eclipse.jetty.websocket.api.Session; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.PolyImplementation; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; @@ -37,6 +35,8 @@ import org.polypheny.db.nodes.DeserializeFunctionOperator; import org.polypheny.db.nodes.LangFunctionOperator; import org.polypheny.db.nodes.Operator; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.processing.AutomaticDdlProcessor; import org.polypheny.db.transaction.Statement; @@ -47,7 +47,7 @@ import org.polypheny.db.webui.models.Result; import org.polypheny.db.webui.models.requests.QueryRequest; -public class MongoLanguagePlugin extends Plugin { +public class MongoLanguagePlugin extends PolyPlugin { @Getter @VisibleForTesting @@ -60,8 +60,8 @@ public class MongoLanguagePlugin extends Plugin { * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public MongoLanguagePlugin( PluginWrapper wrapper ) { - super( wrapper ); + public MongoLanguagePlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java index 41f155bf99..a638760ad7 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlRenameCollection.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Optional; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; @@ -51,7 +52,7 @@ public Type getMqlKind() { public void execute( Context context, Statement statement, QueryParameters parameters ) { String database = ((MqlQueryParameters) parameters).getDatabase(); - List tables = context.getSnapshot().rel().getTables( database, null ); + List tables = context.getSnapshot().rel().getTables( Pattern.of( database ), null ); if ( dropTarget ) { Optional newTable = tables.stream() diff --git a/plugins/pig-language/src/main/codegen/javacc/PigletParser.jj b/plugins/pig-language/src/main/codegen/javacc/PigletParser.jj index 8e56efa1f4..b7b7210492 100644 --- a/plugins/pig-language/src/main/codegen/javacc/PigletParser.jj +++ b/plugins/pig-language/src/main/codegen/javacc/PigletParser.jj @@ -698,7 +698,7 @@ PigNode exp10() : * precedence. */ PigNode atom() : { - final PigNode e; + PigNode e = null; PigNode f; final List list; ImmutableList.Builder builder = null; diff --git a/plugins/pig-language/src/main/java/org/polypheny/db/PigLanguagePlugin.java b/plugins/pig-language/src/main/java/org/polypheny/db/PigLanguagePlugin.java index ba8c3da8a2..4e1225f032 100644 --- a/plugins/pig-language/src/main/java/org/polypheny/db/PigLanguagePlugin.java +++ b/plugins/pig-language/src/main/java/org/polypheny/db/PigLanguagePlugin.java @@ -20,8 +20,6 @@ import java.util.List; import lombok.extern.slf4j.Slf4j; import org.eclipse.jetty.websocket.api.Session; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.information.InformationManager; @@ -30,6 +28,8 @@ import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; import org.polypheny.db.piglet.PigProcessorImpl; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.processing.Processor; import org.polypheny.db.transaction.Statement; @@ -42,7 +42,7 @@ import org.polypheny.db.webui.models.requests.QueryRequest; @Slf4j -public class PigLanguagePlugin extends Plugin { +public class PigLanguagePlugin extends PolyPlugin { public static final String NAME = "pig"; @@ -52,8 +52,8 @@ public class PigLanguagePlugin extends Plugin { * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public PigLanguagePlugin( PluginWrapper wrapper ) { - super( wrapper ); + public PigLanguagePlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java index db4f20966c..e70884b4d9 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/CatalogPlugin.java @@ -16,21 +16,19 @@ package org.polypheny.db.catalog; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.plugins.PolyPluginManager; -public class CatalogPlugin extends Plugin { +public class CatalogPlugin extends PolyPlugin { /** * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to * be successfully loaded by manager. - * - * @param wrapper */ - public CatalogPlugin( PluginWrapper wrapper ) { - super( wrapper ); + public CatalogPlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java index e04708313c..af0cd509c9 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/allocation/PolyAllocGraphCatalog.java @@ -17,26 +17,42 @@ package org.polypheny.db.catalog.allocation; import io.activej.serializer.BinarySerializer; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; +import lombok.Value; import org.polypheny.db.catalog.Serializable; import org.polypheny.db.catalog.catalogs.AllocationGraphCatalog; import org.polypheny.db.catalog.entity.allocation.AllocationGraph; import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +@Value public class PolyAllocGraphCatalog implements Serializable, AllocationGraphCatalog { @Getter - private final LogicalNamespace namespace; + @Serialize + public LogicalNamespace namespace; @Getter public BinarySerializer serializer = Serializable.builder.get().build( PolyAllocGraphCatalog.class ); @Getter + @Serialize public ConcurrentHashMap graphs; public PolyAllocGraphCatalog( LogicalNamespace namespace ) { + this( namespace, new HashMap<>() ); + } + + + public PolyAllocGraphCatalog( + @Deserialize("namespace") LogicalNamespace namespace, + @Deserialize("graphs") Map graphs ) { this.namespace = namespace; + this.graphs = new ConcurrentHashMap<>( graphs ); } diff --git a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java index 6dd6655885..1649efbbcb 100644 --- a/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java +++ b/plugins/poly-catalog/src/main/java/org/polypheny/db/catalog/logical/GraphCatalog.java @@ -17,6 +17,8 @@ package org.polypheny.db.catalog.logical; import io.activej.serializer.BinarySerializer; +import io.activej.serializer.annotations.Deserialize; +import io.activej.serializer.annotations.Serialize; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -40,11 +42,13 @@ public class GraphCatalog implements Serializable, LogicalGraphCatalog { @Getter public BinarySerializer serializer = Serializable.builder.get().build( GraphCatalog.class ); @Getter + @Serialize public LogicalNamespace logicalNamespace; public IdBuilder idBuilder = IdBuilder.getInstance(); @Getter - ConcurrentHashMap graphs; + @Serialize + public ConcurrentHashMap graphs; @NonFinal @@ -57,7 +61,9 @@ public GraphCatalog( LogicalNamespace logicalNamespace ) { } - public GraphCatalog( LogicalNamespace logicalNamespace, Map graphs ) { + public GraphCatalog( + @Deserialize("logicalNamespace") LogicalNamespace logicalNamespace, + @Deserialize("graphs") Map graphs ) { this.logicalNamespace = logicalNamespace; this.graphs = new ConcurrentHashMap<>( graphs ); } diff --git a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java index 71d522f506..f35c8d0096 100644 --- a/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java +++ b/plugins/rest-interface/src/main/java/org/polypheny/db/restapi/RestInterfacePlugin.java @@ -46,8 +46,6 @@ import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.pf4j.Extension; -import org.pf4j.Plugin; -import org.pf4j.PluginWrapper; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogUser; import org.polypheny.db.iface.Authenticator; @@ -60,6 +58,8 @@ import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; import org.polypheny.db.information.InformationTable; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.restapi.exception.ParserException; import org.polypheny.db.restapi.exception.RestException; import org.polypheny.db.restapi.exception.UnauthorizedAccessException; @@ -71,15 +71,15 @@ import org.polypheny.db.util.Util; -public class RestInterfacePlugin extends Plugin { +public class RestInterfacePlugin extends PolyPlugin { /** * Constructor to be used by plugin manager for plugin instantiation. * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. */ - public RestInterfacePlugin( PluginWrapper wrapper ) { - super( wrapper ); + public RestInterfacePlugin( PluginContext context ) { + super( context ); } diff --git a/plugins/sql-language/build.gradle b/plugins/sql-language/build.gradle index 4eac1727c4..43e76897ae 100644 --- a/plugins/sql-language/build.gradle +++ b/plugins/sql-language/build.gradle @@ -16,9 +16,9 @@ buildscript { // JavaCC (https://github.com/johnmartel/javaccPlugin) classpath group: "gradle.plugin.ca.coglinc2", name: "javacc-gradle-plugin", version: javacc_plugin_version // Fmpp - classpath group: "net.sourceforge.fmpp", name: "fmpp", version: fmpp_plugin_version + //classpath group: "net.sourceforge.fmpp", name: "fmpp", version: fmpp_plugin_version } - ant.taskdef(name: "fmpp", classname:"fmpp.tools.AntTask", classpath: buildscript.configurations.classpath.asPath) + //ant.taskdef(name: "fmpp", classname:"fmpp.tools.AntTask", classpath: buildscript.configurations.classpath.asPath) } dependencies { @@ -61,10 +61,9 @@ task generateFmppSources { } } task generateParser (type: CompileJavaccTask) { - dependsOn("generateFmppSources") getConventionMapping().map("classpath", { configurations.javacc }) arguments = [static: "false", lookahead: "2"] - inputDirectory = file(project.buildDir.absolutePath + "/generated-sources/fmpp/sql/javacc") + inputDirectory = file("src/main/codegen") outputDirectory = file(project.buildDir.absolutePath + "/generated-sources/org/polypheny/db/languages/sql/parser/impl") } diff --git a/plugins/sql-language/src/main/codegen/templates/Parser.jj b/plugins/sql-language/src/main/codegen/Parser.jj similarity index 75% rename from plugins/sql-language/src/main/codegen/templates/Parser.jj rename to plugins/sql-language/src/main/codegen/Parser.jj index ae51e67022..fc6fed5520 100644 --- a/plugins/sql-language/src/main/codegen/templates/Parser.jj +++ b/plugins/sql-language/src/main/codegen/Parser.jj @@ -30,9 +30,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -<@pp.dropOutputFile /> - -<@pp.changeOutputFile name="javacc/Parser.jj" /> options { STATIC = false; @@ -41,13 +38,111 @@ options { } -PARSER_BEGIN(${parser.class}) - -package ${parser.package}; - -<#list parser.imports as importStr> -import ${importStr}; - +PARSER_BEGIN(SqlParserImpl) + +package org.polypheny.db.languages.sql.parser.impl; + +import org.polypheny.db.schema.ColumnStrategy; +import org.polypheny.db.sql.language.SqlAlter; +import org.polypheny.db.sql.language.SqlBasicCall; +import org.polypheny.db.sql.language.SqlBinaryOperator; +import org.polypheny.db.sql.language.SqlCall; +import org.polypheny.db.sql.language.SqlCharStringLiteral; +import org.polypheny.db.sql.language.SqlCollation; +import org.polypheny.db.sql.language.SqlCreate; +import org.polypheny.db.sql.language.SqlDataTypeSpec; +import org.polypheny.db.sql.language.SqlDelete; +import org.polypheny.db.sql.language.SqlDescribeSchema; +import org.polypheny.db.sql.language.SqlDescribeTable; +import org.polypheny.db.sql.language.SqlDrop; +import org.polypheny.db.sql.language.SqlDynamicParam; +import org.polypheny.db.sql.language.SqlExplain; +import org.polypheny.db.sql.language.SqlIdentifier; +import org.polypheny.db.sql.language.SqlInsert; +import org.polypheny.db.sql.language.SqlInsertKeyword; +import org.polypheny.db.sql.language.SqlIntervalQualifier; +import org.polypheny.db.sql.language.SqlJdbcDataTypeName; +import org.polypheny.db.sql.language.SqlJdbcFunctionCall; +import org.polypheny.db.sql.language.SqlJoin; +import org.polypheny.db.sql.language.SqlLiteral; +import org.polypheny.db.sql.language.SqlMatchRecognize; +import org.polypheny.db.sql.language.SqlMerge; +import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlNodeList; +import org.polypheny.db.sql.language.SqlNumericLiteral; +import org.polypheny.db.sql.language.SqlOperator; +import org.polypheny.db.sql.language.SqlOrderBy; +import org.polypheny.db.sql.language.SqlPostfixOperator; +import org.polypheny.db.sql.language.SqlPrefixOperator; +import org.polypheny.db.sql.language.SqlSampleSpec; +import org.polypheny.db.sql.language.SqlSelect; +import org.polypheny.db.sql.language.SqlSelectKeyword; +import org.polypheny.db.sql.language.SqlSetOption; +import org.polypheny.db.sql.language.SqlUnnestOperator; +import org.polypheny.db.sql.language.SqlUpdate; +import org.polypheny.db.sql.language.SqlUtil; +import org.polypheny.db.sql.language.SqlWindow; +import org.polypheny.db.sql.language.SqlWith; +import org.polypheny.db.sql.language.SqlWithItem; +import org.polypheny.db.sql.language.ddl.SqlAlterAdaptersAdd; +import org.polypheny.db.sql.language.ddl.SqlAlterAdaptersDrop; +import org.polypheny.db.sql.language.ddl.SqlAlterConfig; +import org.polypheny.db.sql.language.ddl.SqlAlterInterfacesAdd; +import org.polypheny.db.sql.language.ddl.SqlAlterInterfacesDrop; +import org.polypheny.db.sql.language.ddl.SqlAlterMaterializedView; +import org.polypheny.db.sql.language.ddl.SqlAlterSchema; +import org.polypheny.db.sql.language.ddl.SqlAlterTable; +import org.polypheny.db.sql.language.ddl.SqlAlterView; +import org.polypheny.db.sql.language.ddl.SqlDdlNodes; +import org.polypheny.db.sql.language.ddl.SqlTruncate; +import org.polypheny.db.sql.language.ddl.altermaterializedview.SqlAlterMaterializedViewAddIndex; +import org.polypheny.db.sql.language.ddl.altermaterializedview.SqlAlterMaterializedViewDropIndex; +import org.polypheny.db.sql.language.ddl.altermaterializedview.SqlAlterMaterializedViewFreshnessManual; +import org.polypheny.db.sql.language.ddl.altermaterializedview.SqlAlterMaterializedViewRename; +import org.polypheny.db.sql.language.ddl.altermaterializedview.SqlAlterMaterializedViewRenameColumn; +import org.polypheny.db.sql.language.ddl.alterschema.SqlAlterSchemaOwner; +import org.polypheny.db.sql.language.ddl.alterschema.SqlAlterSchemaRename; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterSourceTableAddColumn; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddColumn; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddForeignKey; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddIndex; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddPartitions; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddPlacement; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddPrimaryKey; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddUniqueConstraint; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableDropColumn; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableDropConstraint; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableDropForeignKey; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableDropIndex; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableDropPlacement; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableDropPrimaryKey; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableMergePartitions; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableModifyColumn; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableModifyPartitions; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableModifyPlacement; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableModifyPlacementAddColumn; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableModifyPlacementDropColumn; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableOwner; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableRename; +import org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableRenameColumn; +import org.polypheny.db.sql.language.ddl.alterview.SqlAlterViewRename; +import org.polypheny.db.sql.language.ddl.alterview.SqlAlterViewRenameColumn; +import org.polypheny.db.sql.language.fun.OracleSqlOperatorTable; +import org.polypheny.db.sql.language.fun.SqlArrayValueConstructor; +import org.polypheny.db.sql.language.fun.SqlCase; +import org.polypheny.db.sql.language.fun.SqlJsonArrayAggAggFunction; +import org.polypheny.db.sql.language.fun.SqlJsonObjectAggAggFunction; +import org.polypheny.db.sql.language.fun.SqlStdOperatorTable; +import org.polypheny.db.sql.language.fun.SqlTrimFunction; +import org.polypheny.db.sql.language.parser.Span; +import org.polypheny.db.sql.language.parser.SqlAbstractParserImpl; +import org.polypheny.db.sql.language.parser.SqlAbstractParserImpl.ExprContext; +import org.polypheny.db.sql.language.parser.SqlAbstractParserImpl.MetadataImpl; +import org.polypheny.db.sql.language.parser.SqlParser; +import org.polypheny.db.sql.language.parser.SqlParserUtil; +import java.util.Map; +import java.util.HashMap; +import org.polypheny.db.util.CoreUtil; import org.polypheny.db.catalog.logistic.NamespaceType; @@ -108,7 +203,7 @@ import static org.polypheny.db.util.Static.RESOURCE; * * The public wrapper for this parser is {@link SqlParser}. */ -public class ${parser.class} extends SqlAbstractParserImpl +public class SqlParserImpl extends SqlAbstractParserImpl { private static final Logger LOGGER = PolyphenyDbTrace.getParserTracer(); @@ -130,7 +225,7 @@ public class ${parser.class} extends SqlAbstractParserImpl */ public static final ParserFactory FACTORY = new ParserFactory() { public SqlAbstractParserImpl getParser(Reader reader) { - final ${parser.class} parser = new ${parser.class}(reader); + final SqlParserImpl parser = new SqlParserImpl(reader); if (reader instanceof SourceStringReader) { final String sql = ((SourceStringReader) reader).getSourceString(); parser.setOriginalSql(sql); @@ -153,9 +248,9 @@ public class ${parser.class} extends SqlAbstractParserImpl public Metadata getMetadata() { - synchronized (${parser.class}.class) { + synchronized (SqlParserImpl.class) { if (metadata == null) { - metadata = new MetadataImpl(new ${parser.class}(new java.io.StringReader(""))); + metadata = new MetadataImpl(new SqlParserImpl(new java.io.StringReader(""))); } return metadata; } @@ -168,7 +263,7 @@ public class ${parser.class} extends SqlAbstractParserImpl public void switchTo(String stateName) { - int state = Arrays.asList(${parser.class}TokenManager.lexStateNames).indexOf(stateName); + int state = Arrays.asList(SqlParserImplTokenManager.lexStateNames).indexOf(stateName); token_source.SwitchTo(state); } @@ -207,7 +302,7 @@ public class ${parser.class} extends SqlAbstractParserImpl } } -PARSER_END(${parser.class}) +PARSER_END(SqlParserImpl) /*************************************** @@ -769,209 +864,1503 @@ SqlNodeList ParenthesizedQueryOrCommaListWithDefault( checkNonQueryExpression(exprContext); } ( - e = Expression(exprContext) - | - e = Default() + e = Expression(exprContext) + | + e = Default() + ) + { + list.add(e); + } + )* + + { + return new SqlNodeList(list, s.end(this)); + } +} + +/** + * Parses function parameter lists including DISTINCT keyword recognition, + * DEFAULT, and named argument assignment. + */ +List FunctionParameterList( + ExprContext exprContext) : +{ + SqlNode e = null; + List list = new ArrayList(); +} +{ + + [ + { + e = SqlLiteral.createSymbol(SqlSelectKeyword.DISTINCT, getPos()); + } + | + { + e = SqlLiteral.createSymbol(SqlSelectKeyword.ALL, getPos()); + } + ] + { + list.add(e); + } + Arg0(list, exprContext) + ( + { + // a comma-list can't appear where only a query is expected + checkNonQueryExpression(exprContext); + } + Arg(list, exprContext) + )* + + { + return list; + } +} + +void Arg0(List list, ExprContext exprContext) : +{ + SqlIdentifier name = null; + SqlNode e = null; + final ExprContext firstExprContext; + { + // we've now seen left paren, so queries inside should + // be allowed as sub-queries + switch (exprContext) { + case ACCEPT_SUB_QUERY: + firstExprContext = ExprContext.ACCEPT_NONCURSOR; + break; + case ACCEPT_CURSOR: + firstExprContext = ExprContext.ACCEPT_ALL; + break; + default: + firstExprContext = exprContext; + break; + } + } +} +{ + [ + name = SimpleIdentifier() + ] + ( + e = Default() + | + e = OrderedQueryOrExpr(firstExprContext) + ) + { + if (e != null) { + if (name != null) { + e = (SqlCall)OperatorRegistry.get( OperatorName.ARGUMENT_ASSIGNMENT ).createCall( + Span.of(name, e).pos(), e, name); + } + list.add(e); + } + } +} + +void Arg(List list, ExprContext exprContext) : +{ + SqlIdentifier name = null; + SqlNode e = null; +} +{ + [ + name = SimpleIdentifier() + ] + ( + e = Default() + | + e = Expression(exprContext) + ) + { + if (e != null) { + if (name != null) { + e = (SqlCall) OperatorRegistry.get( OperatorName.ARGUMENT_ASSIGNMENT ).createCall( Span.of(name, e).pos(), e, name ); + } + list.add(e); + } + } +} + +SqlNode Default() : {} +{ + { + return (SqlCall) OperatorRegistry.get( OperatorName.DEFAULT ).createCall(getPos()); + } +} + +/** + * Parses a query (SELECT, UNION, INTERSECT, EXCEPT, VALUES, TABLE) followed by + * the end-of-file symbol. + */ +SqlNode SqlQueryEof() : +{ + SqlNode query; +} +{ + query = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) + + { return query; } +} + +/** + * Parses an SQL statement. + */ +SqlNode SqlStmt() : +{ + SqlNode stmt; +} +{ + ( + stmt = SqlTruncateTable() + | + +// stmt = SqlSetOption(Span.of()) +// | + stmt = SqlAlter() + | + stmt = SqlCreate() + | + stmt = SqlDrop() + | + stmt = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) + | + stmt = Explain() + | + stmt = SqlDescribe() + | + stmt = SqlInsert() + | + stmt = SqlDelete() + | + stmt = SqlUpdate() + | + stmt = SqlMerge() + | + stmt = SqlProcedureCall() + ) + { + return stmt; + } +} + +/** + * Parses an SQL statement followed by the end-of-file symbol. + */ +SqlNode SqlStmtEof() : +{ + SqlNode stmt; +} +{ + stmt = SqlStmt() + { + return stmt; + } +} + +//////////////////////////////////// +/////////////parserimpl start + +/** +* Parses a TRUNCATE TABLE statement. +*/ +SqlTruncate SqlTruncateTable() : +{ + final Span s; + final SqlIdentifier entity; +} +{ + { s = span(); } +

    entity = CompoundIdentifier() + { + return new SqlTruncate(s.end(this), entity); + } +} + + +/** +* Parses a ALTER SCHEMA statement. +*/ +SqlAlterSchema SqlAlterSchema(Span s) : +{ + final SqlIdentifier namespace; + final SqlIdentifier name; + final SqlIdentifier owner; +} +{ + + namespace = CompoundIdentifier() + ( + + name = CompoundIdentifier() + { + return new SqlAlterSchemaRename(s.end(this), namespace, name); + } + | + + owner = SimpleIdentifier() + { + return new SqlAlterSchemaOwner(s.end(this), namespace, owner); + } + ) +} + +/** +* Parses a ALTER VIEW statement. +**/ +SqlAlterView SqlAlterView(Span s) : +{ + final SqlIdentifier view; + final SqlIdentifier name; + final SqlIdentifier column; +} +{ + + view = CompoundIdentifier() + ( + + name = SimpleIdentifier() + { + return new SqlAlterViewRename(s.end(this), view, name); + } + | + + column = SimpleIdentifier() + + name = SimpleIdentifier() + { + return new SqlAlterViewRenameColumn(s.end(this), view, column, name); + } + ) +} + +/** +* Parses a ALTER MATERIALIZED VIEW statement. +**/ +SqlAlterMaterializedView SqlAlterMaterializedView(Span s) : +{ + final SqlIdentifier materializedview; + final SqlIdentifier name; + final SqlIdentifier column; + final SqlIdentifier store; + final SqlIdentifier indexName; + final SqlNodeList columnList; + final SqlIdentifier indexMethod; + final boolean unique; + final SqlIdentifier storeName; + +} +{ + + materializedview = CompoundIdentifier() + ( + + name = SimpleIdentifier() + { + return new SqlAlterMaterializedViewRename(s.end(this), materializedview, name); + } + | + + column = SimpleIdentifier() + + name = SimpleIdentifier() + { + return new SqlAlterMaterializedViewRenameColumn(s.end(this), materializedview, column, name); + } + | + + { + return new SqlAlterMaterializedViewFreshnessManual(s.end(this), materializedview); + } + | + + ( + { unique = true; } + | + { unique = false; } + ) + + indexName = SimpleIdentifier() + + ( + columnList = ParenthesizedSimpleIdentifierList() + | + column = SimpleIdentifier() + { + columnList = new SqlNodeList(Arrays.asList( new SqlNode[]{ column }), s.end(this)); + } + ) + ( + indexMethod = SimpleIdentifier() + | + { indexMethod = null; } + ) + ( + storeName = SimpleIdentifier() + | + { storeName = null; } + ) + { + return new SqlAlterMaterializedViewAddIndex(s.end(this), materializedview, columnList, unique, indexMethod, indexName, storeName); + } + | + + indexName = SimpleIdentifier() + { + return new SqlAlterMaterializedViewDropIndex(s.end(this), materializedview, indexName); + } + + ) +} + +/** +* Parses a ALTER TABLE statement. +*/ +SqlAlterTable SqlAlterTable(Span s) : +{ + final SqlIdentifier entity; + final SqlIdentifier column; + final SqlIdentifier name; + final SqlIdentifier owner; + final SqlDataTypeSpec type; + final boolean nullable; + final SqlNode defaultValue; + final SqlIdentifier beforeColumn; + final SqlIdentifier afterColumn; + final SqlAlterTable statement; + final SqlNodeList columnList; + final SqlNodeList referencesList; + final SqlIdentifier refColumn; + final SqlIdentifier refTable; + final SqlIdentifier constraintName; + final SqlIdentifier store; + final SqlIdentifier indexName; + final SqlIdentifier indexMethod; + final SqlIdentifier storeName; + final String onUpdate; + final String onDelete; + final boolean unique; + final SqlIdentifier physicalName; + final SqlIdentifier partitionType; + SqlIdentifier partitionColumn; // error in javacc + List partitionList = new ArrayList(); + int partitionIndex = 0; + int numPartitionGroups = 0; + int numPartitions = 0; + List partitionNamesList = new ArrayList(); + SqlIdentifier partitionName = null; + List< List> partitionQualifierList = new ArrayList>(); + List partitionQualifiers = new ArrayList(); + SqlNode partitionValues = null; + SqlIdentifier tmpIdent = null; + int tmpInt = 0; + RawPartitionInformation rawPartitionInfo; +} +{ +
    + entity = CompoundIdentifier() + ( + + name = SimpleIdentifier() + { + return new SqlAlterTableRename(s.end(this), entity, name); + } + | + + owner = SimpleIdentifier() + { + return new SqlAlterTableOwner(s.end(this), entity, owner); + } + | + + column = SimpleIdentifier() + + name = SimpleIdentifier() + { + return new SqlAlterTableRenameColumn(s.end(this), entity, column, name); + } + | + + name = SimpleIdentifier() + ( + type = DataType() + ( + { nullable = true; } + | + { nullable = false; } + | + { nullable = true; } + ) + ( + + defaultValue = Literal() + | + defaultValue = ArrayConstructor() + | + { defaultValue = null; } + ) + ( + { beforeColumn = SimpleIdentifier(); afterColumn = null; } + | + { afterColumn = SimpleIdentifier(); beforeColumn = null; } + | + { afterColumn = null; beforeColumn = null; } + ) + { + return new SqlAlterTableAddColumn(s.end(this), entity, name, type, nullable, defaultValue, beforeColumn, afterColumn); + } + | + + physicalName = SimpleIdentifier() + ( + + defaultValue = Literal() + | + defaultValue = ArrayConstructor() + | + { defaultValue = null; } + ) + ( + { beforeColumn = SimpleIdentifier(); afterColumn = null; } + | + { afterColumn = SimpleIdentifier(); beforeColumn = null; } + | + { afterColumn = null; beforeColumn = null; } + ) + { + return new SqlAlterSourceTableAddColumn(s.end(this), entity, name, physicalName, defaultValue, beforeColumn, afterColumn); + } + ) + | + + column = SimpleIdentifier() + { + return new SqlAlterTableDropColumn(s.end(this), entity, column); + } + | + + ( + columnList = ParenthesizedSimpleIdentifierList() + | + column = SimpleIdentifier() + { + columnList = new SqlNodeList(Arrays.asList( new SqlNode[]{ column }), s.end(this)); + } + ) + { + return new SqlAlterTableAddPrimaryKey(s.end(this), entity, columnList); + } + | + + { + return new SqlAlterTableDropPrimaryKey(s.end(this), entity); + } + | + + constraintName = SimpleIdentifier() + ( + + ( + columnList = ParenthesizedSimpleIdentifierList() + | + column = SimpleIdentifier() + { + columnList = new SqlNodeList(Arrays.asList( new SqlNode[]{ column }), s.end(this)); + } + ) + { + return new SqlAlterTableAddUniqueConstraint(s.end(this), entity, constraintName, columnList); + } + | + + ( + columnList = ParenthesizedSimpleIdentifierList() + | + column = SimpleIdentifier() + { + columnList = new SqlNodeList(Arrays.asList( new SqlNode[]{ column }), s.end(this)); + } + ) + + refTable = CompoundIdentifier() + referencesList = ParenthesizedSimpleIdentifierList() + ( + + ( + { onUpdate = "CASCADE"; } + | + { onUpdate = "NONE"; } + | + { onUpdate = "RESTRICT"; } + | + { onUpdate = "SET NULL"; } + | + { onUpdate = "SET DEFAULT"; } + ) + | + { onUpdate = null; } + ) + ( + + ( + { onDelete = "CASCADE"; } + | + { onDelete = "NONE"; } + | + { onDelete = "RESTRICT"; } + | + { onDelete = "SET NULL"; } + | + { onDelete = "SET DEFAULT"; } + ) + | + { onDelete = null; } + ) + { + return new SqlAlterTableAddForeignKey(s.end(this), entity, constraintName, columnList, refTable, referencesList, onUpdate, onDelete); + } + ) + | + + constraintName = SimpleIdentifier() + { + return new SqlAlterTableDropConstraint(s.end(this), entity, constraintName); + } + | + + + + constraintName = SimpleIdentifier() + { + return new SqlAlterTableDropForeignKey(s.end(this), entity, constraintName); + } + | + + + ( + ( + columnList = ParenthesizedSimpleIdentifierList() + | + column = SimpleIdentifier() + { + columnList = new SqlNodeList(Arrays.asList( new SqlNode[]{ column }), s.end(this)); + } + ) + | + { + columnList = SqlNodeList.EMPTY; + } + ) + + + store = SimpleIdentifier() + [ + + + ( + partitionIndex = UnsignedIntLiteral() { partitionList.add(partitionIndex); } + ( + partitionIndex = UnsignedIntLiteral() { partitionList.add(partitionIndex); } + )* + | + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + ( + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + )* + ) + + ] + { + return new SqlAlterTableAddPlacement(s.end(this), entity, columnList, store, partitionList, partitionNamesList); + } + | + + + + + store = SimpleIdentifier() + { + return new SqlAlterTableDropPlacement(s.end(this), entity, store); + } + | + + + ( + + + column = SimpleIdentifier() + + + store = SimpleIdentifier() + { + return new SqlAlterTableModifyPlacementAddColumn(s.end(this), entity, column, store); + } + | + + + column = SimpleIdentifier() + + + store = SimpleIdentifier() + { + return new SqlAlterTableModifyPlacementDropColumn(s.end(this), entity, column, store); + } + | + columnList = ParenthesizedSimpleIdentifierList() + + + store = SimpleIdentifier() + [ + + + ( + partitionIndex = UnsignedIntLiteral() { partitionList.add(partitionIndex); } + ( + partitionIndex = UnsignedIntLiteral() { partitionList.add(partitionIndex); } + )* + | + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + ( + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + )* + ) + + ] + { + return new SqlAlterTableModifyPlacement(s.end(this), entity, columnList, store, partitionList, partitionNamesList); + } + ) + + | + + + + ( + partitionIndex = UnsignedIntLiteral() { partitionList.add(partitionIndex); } + ( + partitionIndex = UnsignedIntLiteral() { partitionList.add(partitionIndex); } + )* + | + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + ( + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + )* + ) + + + store = SimpleIdentifier() + { + return new SqlAlterTableModifyPartitions(s.end(this), entity, store, partitionList, partitionNamesList); + } + + | + + ( + { unique = true; } + | + { unique = false; } + ) + + indexName = SimpleIdentifier() + + ( + columnList = ParenthesizedSimpleIdentifierList() + | + column = SimpleIdentifier() + { + columnList = new SqlNodeList(Arrays.asList( new SqlNode[]{ column }), s.end(this)); + } + ) + ( + indexMethod = SimpleIdentifier() + | + { indexMethod = null; } + ) + ( + storeName = SimpleIdentifier() + | + { storeName = null; } + ) + { + return new SqlAlterTableAddIndex(s.end(this), entity, columnList, unique, indexMethod, indexName, storeName); + } + | + + indexName = SimpleIdentifier() + { + return new SqlAlterTableDropIndex(s.end(this), entity, indexName); + } + | + + column = SimpleIdentifier() + statement = AlterTableModifyColumn(s, entity, column) + { + return statement; + } + + | + + ( + partitionType = SimpleIdentifier() + | + { partitionType = new SqlIdentifier( "RANGE", s.end(this) );} + + | + { partitionType = new SqlIdentifier( "TEMPERATURE", s.end(this) ); + rawPartitionInfo = new RawTemperaturePartitionInformation(); + rawPartitionInfo.setPartitionType( partitionType ); + } + partitionColumn = SimpleIdentifier() { rawPartitionInfo.setPartitionColumn( partitionColumn ); } + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + + partitionValues = Literal() + { + partitionQualifiers.add(partitionValues); + ((RawTemperaturePartitionInformation)rawPartitionInfo).setHotAccessPercentageIn( partitionValues ); + } + {partitionQualifierList.add(partitionQualifiers); partitionQualifiers = new ArrayList();} + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + + partitionValues = Literal() + { + partitionQualifiers.add(partitionValues); + ((RawTemperaturePartitionInformation)rawPartitionInfo).setHotAccessPercentageOut( partitionValues ); + } + {partitionQualifierList.add(partitionQualifiers); partitionQualifiers = new ArrayList();} + + + ( + { ((RawTemperaturePartitionInformation)rawPartitionInfo).setAccessPattern( new SqlIdentifier( "ALL", s.end(this) ) ); tmpIdent = null; } + | + { ((RawTemperaturePartitionInformation)rawPartitionInfo).setAccessPattern( new SqlIdentifier( "WRITE", s.end(this) ) ); tmpIdent = null; } + | + { ((RawTemperaturePartitionInformation)rawPartitionInfo).setAccessPattern( new SqlIdentifier( "READ", s.end(this) ) ); tmpIdent = null;} + ) + + tmpInt = UnsignedIntLiteral() { ((RawTemperaturePartitionInformation)rawPartitionInfo).setInterval( tmpInt ); tmpInt = 0; } + tmpIdent = SimpleIdentifier() { ((RawTemperaturePartitionInformation)rawPartitionInfo).setIntervalUnit( tmpIdent ); tmpIdent = null; } + numPartitions = UnsignedIntLiteral() {rawPartitionInfo.setNumPartitions( numPartitions );} + tmpIdent = SimpleIdentifier() { + ((RawTemperaturePartitionInformation)rawPartitionInfo).setInternalPartitionFunction( tmpIdent ); tmpIdent = null; + } + { + rawPartitionInfo.setPartitionNamesList( CoreUtil.toNodeList( partitionNamesList, Identifier.class ) ); + rawPartitionInfo.setPartitionQualifierList( SqlUtil.toNodeListList( partitionQualifierList ) ); + + return new SqlAlterTableAddPartitions(s.end(this), entity, partitionColumn, partitionType, numPartitionGroups, numPartitions, partitionNamesList, partitionQualifierList, rawPartitionInfo); + } + ) + + partitionColumn = SimpleIdentifier() + [ + ( + numPartitionGroups = UnsignedIntLiteral() + | + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + ( + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + )* + + + | + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + + partitionValues = Literal() { partitionQualifiers.add(partitionValues); } + ( + partitionValues = Literal() { partitionQualifiers.add(partitionValues); } + )* + {partitionQualifierList.add(partitionQualifiers); partitionQualifiers = new ArrayList();} + ( + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + + partitionValues = Literal() { partitionQualifiers.add(partitionValues); } + ( + partitionValues = Literal() { partitionQualifiers.add(partitionValues); } + )* + {partitionQualifierList.add(partitionQualifiers); partitionQualifiers = new ArrayList();} + )* + + ) + ] + { + rawPartitionInfo = new RawPartitionInformation(); + return new SqlAlterTableAddPartitions(s.end(this), entity, partitionColumn, partitionType, numPartitionGroups, numPartitions, partitionNamesList, partitionQualifierList, rawPartitionInfo); + } + + | + + { + return new SqlAlterTableMergePartitions(s.end(this), entity); + } + ) +} + +/** +* Parses the MODIFY COLUMN part of an ALTER TABLE statement. +*/ +SqlAlterTableModifyColumn AlterTableModifyColumn(Span s, SqlIdentifier entity, SqlIdentifier column) : +{ + SqlDataTypeSpec type = null; + Boolean nullable = null; + SqlIdentifier beforeColumn = null; + SqlIdentifier afterColumn = null; + SqlNode defaultValue = null; + Boolean dropDefault = null; + String collation = null; +} +{ + ( + + { nullable = false; } + | + + { nullable = true; } + | + + type = DataType() + | + + ( + + beforeColumn = SimpleIdentifier() + | + + afterColumn = SimpleIdentifier() + ) + | + + ( + { collation = "CASE SENSITIVE"; } + | + { collation = "CASE INSENSITIVE"; } + ) + | + defaultValue = Expression(ExprContext.ACCEPT_NONCURSOR) + | + { dropDefault = true; } + ) + { + return new SqlAlterTableModifyColumn(s.end(this), entity, column, type, nullable, beforeColumn, afterColumn, collation, defaultValue, dropDefault); + } +} + + +SqlAlterConfig SqlAlterConfig(Span s) : +{ + final SqlNode key; + final SqlNode value; +} +{ + key = Expression(ExprContext.ACCEPT_NONCURSOR) + value = Expression(ExprContext.ACCEPT_NONCURSOR) + { + return new SqlAlterConfig(s.end(this), key, value); + } +} + + +SqlAlterAdaptersAdd SqlAlterAdaptersAdd(Span s) : +{ + final SqlNode uniqueName; + final SqlNode adapterName; + final SqlNode adapterType; + final SqlNode config; +} +{ + uniqueName = Expression(ExprContext.ACCEPT_NONCURSOR) + adapterName = Expression(ExprContext.ACCEPT_NONCURSOR) + adapterType = Expression(ExprContext.ACCEPT_NONCURSOR) + config = Expression(ExprContext.ACCEPT_NONCURSOR) + { + return new SqlAlterAdaptersAdd(s.end(this), uniqueName, adapterName, adapterType, config); + } +} + + +SqlAlterAdaptersDrop SqlAlterAdaptersDrop(Span s) : +{ + final SqlNode uniqueName; +} +{ + uniqueName = Expression(ExprContext.ACCEPT_NONCURSOR) + { + return new SqlAlterAdaptersDrop(s.end(this), uniqueName); + } +} + + +SqlAlterInterfacesAdd SqlAlterInterfacesAdd(Span s) : +{ + final SqlNode uniqueName; + final SqlNode clazzName; + final SqlNode config; +} +{ + uniqueName = Expression(ExprContext.ACCEPT_NONCURSOR) + clazzName = Expression(ExprContext.ACCEPT_NONCURSOR) + config = Expression(ExprContext.ACCEPT_NONCURSOR) + { + return new SqlAlterInterfacesAdd(s.end(this), uniqueName, clazzName, config); + } +} + + +SqlAlterInterfacesDrop SqlAlterInterfacesDrop(Span s) : +{ + final SqlNode uniqueName; +} +{ + uniqueName = Expression(ExprContext.ACCEPT_NONCURSOR) + { + return new SqlAlterInterfacesDrop(s.end(this), uniqueName); + } +} + + +//////////////////////////////////// +/////////////parserimpl end +//////////////////////////////////// +/////////////ddlparser start + +boolean IfNotExistsOpt() : +{ +} +{ + { return true; } + | + { return false; } +} + +boolean IfExistsOpt() : +{ +} +{ + { return true; } + | + { return false; } +} + + +SqlCreate SqlCreateSchema(Span s, boolean replace) : +{ + final boolean ifNotExists; + final SqlIdentifier id; + final NamespaceType namespaceType; +} +{ + ( + { namespaceType = NamespaceType.DOCUMENT; } + | + { namespaceType = NamespaceType.RELATIONAL; } + ) + ifNotExists = IfNotExistsOpt() id = CompoundIdentifier() + { + return SqlDdlNodes.createSchema(s.end(this), replace, ifNotExists, id, namespaceType); + } +} + +SqlNodeList Options() : +{ + final Span s; + final List + list = new ArrayList + (); +} +{ + { s = span(); } + + [ + Option(list) + ( + + Option(list) + )* + ] + { + return new SqlNodeList(list, s.end(this)); + } +} + +void Option(List list) : +{ + final SqlIdentifier id; + final SqlNode value; +} +{ + id = SimpleIdentifier() + value = Literal() { + list.add(id); + list.add(value); +} +} + +SqlNodeList TableElementList() : +{ + final Span s; + final List + list = new ArrayList + (); +} +{ + { s = span(); } + TableElement(list) + ( + TableElement(list) + )* + { + return new SqlNodeList(list, s.end(this)); + } +} + +void TableElement(List list) : +{ + final SqlIdentifier id; + final SqlDataTypeSpec type; + final boolean nullable; + final SqlNode e; + final SqlNode constraint; + SqlIdentifier name = null; + final SqlNodeList columnList; + final Span s = Span.of(); + final ColumnStrategy strategy; + final String collation; +} +{ + id = SimpleIdentifier() + ( + type = DataType() + ( + { nullable = true; } + | + { nullable = false; } + | + { nullable = true; } + ) + ( + [ ] + + e = Expression(ExprContext.ACCEPT_SUB_QUERY) + + ( + { strategy = ColumnStrategy.VIRTUAL; } + | + { strategy = ColumnStrategy.STORED; } + | + { strategy = ColumnStrategy.VIRTUAL; } + ) + | + e = Expression(ExprContext.ACCEPT_SUB_QUERY) { + strategy = ColumnStrategy.DEFAULT; + } + | + { + e = null; + strategy = nullable ? ColumnStrategy.NULLABLE : ColumnStrategy.NOT_NULLABLE; + } + ) + ( + + ( + { collation = "CASE SENSITIVE"; } + | + { collation = "CASE INSENSITIVE"; } + ) + | + { + collation = null; + } + ) + { + list.add( SqlDdlNodes.column(s.add(id).end(this), id, type.withNullable(nullable), collation, e, strategy)); + } + | + { list.add(id); } + ) + | + id = SimpleIdentifier() { + list.add(id); + } + | + [ { s.add(this); } name = SimpleIdentifier() ] + ( + { s.add(this); } + + e = Expression(ExprContext.ACCEPT_SUB_QUERY) + { + list.add(SqlDdlNodes.check(s.end(this), name, e)); + } + | + { s.add(this); } + columnList = ParenthesizedSimpleIdentifierList() { + list.add(SqlDdlNodes.unique(s.end(columnList), name, columnList)); + } + | + { s.add(this); } + + columnList = ParenthesizedSimpleIdentifierList() { + list.add(SqlDdlNodes.primary(s.end(columnList), name, columnList)); + } ) - { - list.add(e); - } +} + +SqlNodeList AttributeDefList() : +{ + final Span s; + final List + list = new ArrayList(); +} +{ + { s = span(); } + AttributeDef(list) + ( + AttributeDef(list) )* - - { + { return new SqlNodeList(list, s.end(this)); } } -/** - * Parses function parameter lists including DISTINCT keyword recognition, - * DEFAULT, and named argument assignment. - */ -List FunctionParameterList( - ExprContext exprContext) : +void AttributeDef(List list) : { + final SqlIdentifier id; + final SqlDataTypeSpec type; + final boolean nullable; SqlNode e = null; - List list = new ArrayList(); + final Span s = Span.of(); } { - - [ - { - e = SqlLiteral.createSymbol(SqlSelectKeyword.DISTINCT, getPos()); - } - | - { - e = SqlLiteral.createSymbol(SqlSelectKeyword.ALL, getPos()); - } - ] + id = SimpleIdentifier() + ( + type = DataType() + ( + { nullable = true; } + | + { nullable = false; } + | + { nullable = true; } + ) + ) + [ e = Expression(ExprContext.ACCEPT_SUB_QUERY) ] { - list.add(e); + list.add(SqlDdlNodes.attribute(s.add(id).end(this), id, + type.withNullable(nullable), e, null)); } - Arg0(list, exprContext) +} + +SqlCreate SqlCreateType(Span s, boolean replace) : +{ + final SqlIdentifier id; + SqlNodeList attributeDefList = null; + SqlDataTypeSpec type = null; +} +{ + + id = CompoundIdentifier() + ( - { - // a comma-list can't appear where only a query is expected - checkNonQueryExpression(exprContext); - } - Arg(list, exprContext) - )* - + attributeDefList = AttributeDefList() + | + type = DataType() + ) { - return list; + return SqlDdlNodes.createType(s.end(this), replace, id, attributeDefList, type); } } -void Arg0(List list, ExprContext exprContext) : +SqlCreate SqlCreateTable(Span s, boolean replace) : { - SqlIdentifier name = null; - SqlNode e = null; - final ExprContext firstExprContext; + final boolean ifNotExists; + final SqlIdentifier id; + SqlNodeList tableElementList = null; + SqlNode query = null; + SqlIdentifier store = null; + SqlIdentifier partitionColumn = null; + SqlIdentifier partitionType = null; + int numPartitionGroups = 0; + int numPartitions = 0; + List partitionNamesList = new ArrayList(); + SqlIdentifier partitionName = null; + List< List> partitionQualifierList = new ArrayList>(); + List partitionQualifiers = new ArrayList(); + SqlNode partitionValues = null; + SqlIdentifier tmpIdent = null; + int tmpInt = 0; + RawPartitionInformation rawPartitionInfo; +} +{ +
    ifNotExists = IfNotExistsOpt() id = CompoundIdentifier() + [ tableElementList = TableElementList() ] + [ query = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) ] + [ store = SimpleIdentifier() ] + [ + ( + partitionType = SimpleIdentifier() + | + { partitionType = new SqlIdentifier( "RANGE", s.end(this) );} + | + { partitionType = new SqlIdentifier( "TEMPERATURE", s.end(this) ); + rawPartitionInfo = new RawTemperaturePartitionInformation(); + rawPartitionInfo.setPartitionType( partitionType ); + } + partitionColumn = SimpleIdentifier() { rawPartitionInfo.setPartitionColumn( partitionColumn ); } + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + + partitionValues = Literal() + { + partitionQualifiers.add(partitionValues); + ((RawTemperaturePartitionInformation)rawPartitionInfo).setHotAccessPercentageIn( partitionValues ); + } + {partitionQualifierList.add(partitionQualifiers); partitionQualifiers = new ArrayList();} + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + + partitionValues = Literal() + { + partitionQualifiers.add(partitionValues); + ((RawTemperaturePartitionInformation)rawPartitionInfo).setHotAccessPercentageOut( partitionValues ); + } + {partitionQualifierList.add(partitionQualifiers); partitionQualifiers = new ArrayList();} + + + + ( + { ((RawTemperaturePartitionInformation)rawPartitionInfo).setAccessPattern( new SqlIdentifier( "ALL", s.end(this) ) ); tmpIdent = null; } + | + { ((RawTemperaturePartitionInformation)rawPartitionInfo).setAccessPattern( new SqlIdentifier( "WRITE", s.end(this) ) ); tmpIdent = null; } + | + { ((RawTemperaturePartitionInformation)rawPartitionInfo).setAccessPattern( new SqlIdentifier( "READ", s.end(this) ) ); tmpIdent = null;} + ) + + tmpInt = UnsignedIntLiteral() { ((RawTemperaturePartitionInformation)rawPartitionInfo).setInterval( tmpInt ); tmpInt = 0; } + tmpIdent = SimpleIdentifier() { ((RawTemperaturePartitionInformation)rawPartitionInfo).setIntervalUnit( tmpIdent ); tmpIdent = null; } + numPartitions = UnsignedIntLiteral() {rawPartitionInfo.setNumPartitions( numPartitions );} + tmpIdent = SimpleIdentifier() { + ((RawTemperaturePartitionInformation)rawPartitionInfo).setInternalPartitionFunction( tmpIdent ); tmpIdent = null; + } + { + rawPartitionInfo.setPartitionNamesList( CoreUtil.toNodeList( partitionNamesList, Identifier.class ) ); + rawPartitionInfo.setPartitionQualifierList( SqlUtil.toNodeListList( partitionQualifierList ) ); + + return SqlDdlNodes.createTable(s.end(this), replace, ifNotExists, id, tableElementList, query, store, partitionType, partitionColumn, numPartitionGroups, numPartitions, partitionNamesList, partitionQualifierList, rawPartitionInfo); + } + ) + partitionColumn = SimpleIdentifier() + [ + ( + numPartitionGroups = UnsignedIntLiteral() + | + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + ( + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + )* + + | + + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + + partitionValues = Literal() { partitionQualifiers.add(partitionValues); } + ( + partitionValues = Literal() { partitionQualifiers.add(partitionValues); } + )* + {partitionQualifierList.add(partitionQualifiers); partitionQualifiers = new ArrayList();} + ( + partitionName = SimpleIdentifier() { partitionNamesList.add(partitionName); } + + partitionValues = Literal() { partitionQualifiers.add(partitionValues); } + ( + partitionValues = Literal() { partitionQualifiers.add(partitionValues); } + )* + {partitionQualifierList.add(partitionQualifiers); partitionQualifiers = new ArrayList();} + )* + + + ) + + ] + ] { - // we've now seen left paren, so queries inside should - // be allowed as sub-queries - switch (exprContext) { - case ACCEPT_SUB_QUERY: - firstExprContext = ExprContext.ACCEPT_NONCURSOR; - break; - case ACCEPT_CURSOR: - firstExprContext = ExprContext.ACCEPT_ALL; - break; - default: - firstExprContext = exprContext; - break; - } + rawPartitionInfo = new RawPartitionInformation(); + return SqlDdlNodes.createTable(s.end(this), replace, ifNotExists, id, tableElementList, query, store, partitionType, partitionColumn, numPartitionGroups, numPartitions, partitionNamesList, partitionQualifierList, rawPartitionInfo); } } + +SqlCreate SqlCreateView(Span s, boolean replace) : +{ + final SqlIdentifier id; + SqlNodeList columnList = null; + final SqlNode query; +} +{ + id = CompoundIdentifier() + [ columnList = ParenthesizedSimpleIdentifierList() ] + query = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) { + return SqlDdlNodes.createView(s.end(this), replace, id, columnList, query); + } +} + +SqlCreate SqlCreateMaterializedView(Span s, boolean replace) : +{ + final SqlIdentifier id; + final boolean ifNotExists; + SqlNodeList columnList = null; + final SqlNode query; + SqlIdentifier storeName = null; + List store = new ArrayList(); + String freshnessType = null; + Integer time = null; + SqlIdentifier freshnessId = null; +} +{ + ifNotExists = IfNotExistsOpt() id = CompoundIdentifier() + [ columnList = ParenthesizedSimpleIdentifierList() ] + query = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) + [ storeName = SimpleIdentifier() { store.add(storeName); } + ( + storeName = SimpleIdentifier() { store.add(storeName); } + )*] + [ + ( + ( time=UnsignedIntLiteral() freshnessId=CompoundIdentifier()) + { + freshnessType="INTERVAL"; + } + | + ( time=UnsignedIntLiteral()) + { + freshnessType="UPDATE"; + } + |() + { + freshnessType="MANUEL"; + } + ) ]{ + return SqlDdlNodes.createMaterializedView(s.end(this), replace, ifNotExists, id, columnList, query, store, freshnessType, time, freshnessId); + } +} + +private void FunctionJarDef(SqlNodeList usingList) : +{ + final SqlDdlNodes.FileType fileType; + final SqlNode uri; +} { - [ - name = SimpleIdentifier() - ] ( - e = Default() - | - e = OrderedQueryOrExpr(firstExprContext) - ) - { - if (e != null) { - if (name != null) { - e = (SqlCall)OperatorRegistry.get( OperatorName.ARGUMENT_ASSIGNMENT ).createCall( - Span.of(name, e).pos(), e, name); - } - list.add(e); - } + { fileType = SqlDdlNodes.FileType.ARCHIVE; } + | + { fileType = SqlDdlNodes.FileType.FILE; } + | + { fileType = SqlDdlNodes.FileType.JAR; } + ) { + usingList.add(SqlLiteral.createSymbol(fileType, getPos())); + } + uri = StringLiteral() { + usingList.add(uri); } } -void Arg(List list, ExprContext exprContext) : +SqlCreate SqlCreateFunction(Span s, boolean replace) : { - SqlIdentifier name = null; - SqlNode e = null; + final boolean ifNotExists; + final SqlIdentifier id; + final SqlNode className; + SqlNodeList usingList = SqlNodeList.EMPTY; } { + ifNotExists = IfNotExistsOpt() + id = CompoundIdentifier() + + className = StringLiteral() [ - name = SimpleIdentifier() - ] - ( - e = Default() - | - e = Expression(exprContext) - ) - { - if (e != null) { - if (name != null) { - e = (SqlCall) OperatorRegistry.get( OperatorName.ARGUMENT_ASSIGNMENT ).createCall( Span.of(name, e).pos(), e, name ); - } - list.add(e); + { + usingList = new SqlNodeList(getPos()); } + FunctionJarDef(usingList) + ( + + FunctionJarDef(usingList) + )* + ] { + return SqlDdlNodes.createFunction(s.end(this), replace, ifNotExists, id, className, usingList); } } -SqlNode Default() : {} +SqlDrop SqlDropSchema(Span s, boolean replace) : { - { - return (SqlCall) OperatorRegistry.get( OperatorName.DEFAULT ).createCall(getPos()); + final boolean ifExists; + final SqlIdentifier id; +} +{ + ifExists = IfExistsOpt() id = CompoundIdentifier() { + return SqlDdlNodes.dropSchema(s.end(this), ifExists, id); } } -/** - * Parses a query (SELECT, UNION, INTERSECT, EXCEPT, VALUES, TABLE) followed by - * the end-of-file symbol. - */ -SqlNode SqlQueryEof() : +SqlDrop SqlDropType(Span s, boolean replace) : { - SqlNode query; + final boolean ifExists; + final SqlIdentifier id; } { - query = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) - - { return query; } + ifExists = IfExistsOpt() id = CompoundIdentifier() { + return SqlDdlNodes.dropType(s.end(this), ifExists, id); + } } -/** - * Parses an SQL statement. - */ -SqlNode SqlStmt() : +SqlDrop SqlDropTable(Span s, boolean replace) : { - SqlNode stmt; + final boolean ifExists; + final SqlIdentifier id; } { - ( -<#-- Add methods to parse additional statements here --> -<#list parser.statementParserMethods as method> - stmt = ${method} - | - -// stmt = SqlSetOption(Span.of()) -// | - stmt = SqlAlter() - | -<#if parser.createStatementParserMethods?size != 0> - stmt = SqlCreate() - | - -<#if parser.dropStatementParserMethods?size != 0> - stmt = SqlDrop() - | - - stmt = OrderedQueryOrExpr(ExprContext.ACCEPT_QUERY) - | - stmt = Explain() - | - stmt = SqlDescribe() - | - stmt = SqlInsert() - | - stmt = SqlDelete() - | - stmt = SqlUpdate() - | - stmt = SqlMerge() - | - stmt = SqlProcedureCall() - ) - { - return stmt; +
    ifExists = IfExistsOpt() id = CompoundIdentifier() { + return SqlDdlNodes.dropTable(s.end(this), ifExists, id); } } -/** - * Parses an SQL statement followed by the end-of-file symbol. - */ -SqlNode SqlStmtEof() : +SqlDrop SqlDropView(Span s, boolean replace) : { - SqlNode stmt; + final boolean ifExists; + final SqlIdentifier id; } { - stmt = SqlStmt() - { - return stmt; + ifExists = IfExistsOpt() id = CompoundIdentifier() { + return SqlDdlNodes.dropView(s.end(this), ifExists, id); + } +} + +SqlDrop SqlDropMaterializedView(Span s, boolean replace) : +{ + final boolean ifExists; + final SqlIdentifier id; +} +{ + ifExists = IfExistsOpt() id = CompoundIdentifier() { + return SqlDdlNodes.dropMaterializedView(s.end(this), ifExists, id); } } -<#-- Add implementations of additional parser statement calls here --> -<#list parser.implementationFiles as file> - <#include "/@includes/"+file /> - +SqlDrop SqlDropFunction(Span s, boolean replace) : +{ + final boolean ifExists; + final SqlIdentifier id; +} +{ + ifExists = IfExistsOpt() + id = CompoundIdentifier() { + return SqlDdlNodes.dropFunction(s.end(this), ifExists, id); + } +} + +//////////////////////////////////// +/////////////ddlparser end + /** * Parses a leaf SELECT expression without ORDER BY. @@ -1568,10 +2957,6 @@ SqlLiteral JoinType() : [ ] { joinType = JoinType.FULL; } | { joinType = JoinType.CROSS; } -<#list parser.joinTypes as method> - | - joinType = ${method}() - ) { return SqlLiteral.createSymbol(joinType, getPos()); @@ -2211,7 +3596,7 @@ SqlNode GroupingElement() : /** * Parses a list of expressions separated by commas. */ -SqlNodeList ExpressionCommaList(final Span s, ExprContext exprContext) : +SqlNodeList ExpressionCommaList( Span s, ExprContext exprContext) : { List list; SqlNode e; @@ -3476,18 +4861,31 @@ SqlAlter SqlAlter() : { { s = span(); } ( -<#-- additional literal parser methods are included here --> -<#list parser.alterStatementParserMethods as method> - alterNode = ${method}(s) - <#sep>| - + alterNode = SqlSetOption(s) + | + alterNode = SqlAlterSchema(s) + | + alterNode = SqlAlterView(s) + | + alterNode = SqlAlterMaterializedView(s) + | + alterNode = SqlAlterTable(s) + | + alterNode = SqlAlterConfig(s) + | + alterNode = SqlAlterAdaptersAdd(s) + | + alterNode = SqlAlterAdaptersDrop(s) + | + alterNode = SqlAlterInterfacesAdd(s) + | + alterNode = SqlAlterInterfacesDrop(s) ) { return alterNode; } } -<#if parser.createStatementParserMethods?size != 0> /** * Parses a CREATE statement. */ @@ -3505,19 +4903,24 @@ SqlCreate SqlCreate() : } ] ( -<#-- additional literal parser methods are included here --> -<#list parser.createStatementParserMethods as method> - create = ${method}(s, replace) - <#sep>| - + create = SqlCreateSchema(s, replace) + | + create = SqlCreateTable(s, replace) + | + create = SqlCreateType(s, replace) + | + create = SqlCreateView(s, replace) + | + create = SqlCreateMaterializedView(s, replace) + | + create = SqlCreateFunction(s, replace) ) { return create; } } - -<#if parser.dropStatementParserMethods?size != 0> + /** * Parses a DROP statement. */ @@ -3530,17 +4933,22 @@ SqlDrop SqlDrop() : { { s = span(); } ( -<#-- additional literal parser methods are included here --> -<#list parser.dropStatementParserMethods as method> - drop = ${method}(s, replace) - <#sep>| - + drop = SqlDropSchema(s, replace) + | + drop = SqlDropTable(s, replace) + | + drop = SqlDropType(s, replace) + | + drop = SqlDropView(s, replace) + | + drop = SqlDropMaterializedView(s, replace) + | + drop = SqlDropFunction(s, replace) ) { return drop; } } - /** * Parses a literal expression, allowing continued string literals. @@ -3563,11 +4971,6 @@ SqlNode Literal() : e = DateTimeLiteral() | e = IntervalLiteral() -<#-- additional literal parser methods are included here --> -<#list parser.literalParserMethods as method> - | - e = ${method} - ) { return e; @@ -4254,7 +5657,6 @@ SqlNodeList ParenthesizedSimpleIdentifierList() : } } -<#if parser.includeCompoundIdentifier > /** * Parses a compound identifier. */ @@ -4323,9 +5725,6 @@ Pair ParenthesizedCompoundIdentifierList() : return Pair.of(new SqlNodeList(list, s.end(this)), new SqlNodeList(extendList, s.end(this))); } } -<#else> - <#include "/@includes/compoundIdentifier.ftl" /> - /** * Parses a NEW UDT(...) expression. @@ -4466,11 +5865,6 @@ SqlIdentifier TypeName() : polyType = PolyType(s) { typeName = new SqlIdentifier(polyType.name(), s.end(this)); } -<#-- additional types are included here --> -<#list parser.dataTypeParserMethods as method> - | - typeName = ${method} - | typeName = CollectionsTypeName() | @@ -6714,10 +8108,12 @@ SqlPostfixOperator PostfixRowOperator() : | < XML: "XML" > | < YEAR: "YEAR" > | < ZONE: "ZONE" > -<#-- additional parser keywords are included here --> -<#list parser.keywords as keyword> -| < ${keyword}: "${keyword}" > - +| < IF: "IF" > +| < STORED: "STORED" > +| < VIRTUAL: "VIRTUAL" > +| < JAR: "JAR" > +| < FILE: "FILE" > +| < ARCHIVE: "ARCHIVE" > } /** @@ -6754,13 +8150,46 @@ void NonReservedKeyWord0of3() : } { ( -<#list parser.nonReservedKeywords as keyword> -<#if keyword?index == 0> - <${keyword}> -<#elseif keyword?index % 3 == 0> - | <${keyword}> - - + + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | ) } @@ -6770,13 +8199,120 @@ void NonReservedKeyWord1of3() : } { ( -<#list parser.nonReservedKeywords as keyword> -<#if keyword?index == 1> - <${keyword}> -<#elseif keyword?index % 3 == 1> - | <${keyword}> - - + + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + | + |